Apply new proposition from Copilot + little refine in player design (with better responsive design) (lint done this time)

This commit is contained in:
TheNexter 2025-11-19 17:59:15 +01:00
parent 6e6ade0ab5
commit eb7f779bb8
4 changed files with 186 additions and 98 deletions

View File

@ -18,6 +18,12 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
const audio = audioRef.current;
if (!audio) return;
// Reset state when src changes
setIsPlaying(false);
setCurrentTime(0);
setDuration(0);
setIsLoading(true);
const handleLoadedMetadata = () => {
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
setDuration(audio.duration);
@ -57,7 +63,22 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
audio.removeEventListener("timeupdate", handleTimeUpdate);
audio.removeEventListener("ended", handleEnded);
};
}, []);
}, [src]);
useEffect(() => {
const handlePlayAudio = (e: Event) => {
const customEvent = e as CustomEvent;
if (customEvent.detail !== audioRef.current && isPlaying) {
audioRef.current?.pause();
setIsPlaying(false);
}
};
document.addEventListener("play-audio", handlePlayAudio);
return () => {
document.removeEventListener("play-audio", handlePlayAudio);
};
}, [isPlaying]);
const togglePlayPause = async () => {
const audio = audioRef.current;
@ -68,6 +89,10 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
setIsPlaying(false);
} else {
try {
// Stop other audio players
const event = new CustomEvent("play-audio", { detail: audio });
document.dispatchEvent(event);
await audio.play();
setIsPlaying(true);
} catch (error) {
@ -97,19 +122,15 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
return (
<div className={`flex items-center gap-2 ${className}`}>
<audio ref={audioRef} src={src} preload="metadata" />
<div className="flex flex-row items-center px-2 py-1 rounded-md text-secondary-foreground gap-2">
<span className="font-mono text-sm">
{formatTime(currentTime)} / {formatTime(duration)}
</span>
<Button
variant="ghost"
size="sm"
onClick={togglePlayPause}
disabled={isLoading}
className="shrink-0 h-auto w-auto p-0.5 hover:bg-background/50"
className="shrink-0 p-0 h-5 w-5 hover:bg-transparent text-muted-foreground hover:text-foreground"
aria-label={isPlaying ? "Pause audio" : "Play audio"}
>
{isPlaying ? <PauseIcon className="w-4 h-4" /> : <PlayIcon className="w-4 h-4" />}
{isPlaying ? <PauseIcon className="w-5 h-5" /> : <PlayIcon className="w-5 h-5" />}
</Button>
<input
type="range"
@ -118,9 +139,12 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
value={currentTime}
onChange={handleSeek}
disabled={isLoading || !duration}
className="flex-1 h-1 bg-muted hover:bg-background/50 rounded-lg appearance-none cursor-pointer disabled:opacity-50 [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:bg-primary [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-0"
className="w-full min-w-[128px] h-1 rounded-md bg-secondary cursor-pointer appearance-none [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:bg-primary [&::-webkit-slider-thumb]:rounded-full [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-none [&::-moz-range-thumb]:rounded-full"
aria-label="Seek audio position"
/>
</div>
<span className="text-sm text-muted-foreground whitespace-nowrap">
{formatTime(currentTime)} / {formatTime(duration)}
</span>
</div>
);
};

View File

@ -1,6 +1,18 @@
import { LatLng } from "leaflet";
import { uniqBy } from "lodash-es";
import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MicIcon, MoreHorizontalIcon, PlusIcon, XIcon } from "lucide-react";
import {
FileIcon,
LinkIcon,
LoaderIcon,
MapPinIcon,
Maximize2Icon,
MicIcon,
MoreHorizontalIcon,
PauseIcon,
PlayIcon,
PlusIcon,
XIcon,
} from "lucide-react";
import { observer } from "mobx-react-lite";
import { useContext, useState } from "react";
import { toast } from "react-hot-toast";
@ -136,7 +148,7 @@ const InsertMenu = observer((props: Props) => {
context.setAttachmentList([...context.attachmentList, attachment]);
} catch (error: any) {
console.error("Failed to upload audio recording:", error);
toast.error(error.details || "Failed to upload audio recording");
toast.error(error.message || "Failed to upload audio recording");
}
};
@ -148,13 +160,31 @@ const InsertMenu = observer((props: Props) => {
<div className={`w-2 h-2 rounded-full bg-red-500 mr-2 ${!audioRecorder.isPaused ? "animate-pulse" : ""}`} />
<span className="font-mono text-sm">{new Date(audioRecorder.recordingTime * 1000).toISOString().substring(14, 19)}</span>
</div>
<Button variant="outline" size="icon" onClick={audioRecorder.togglePause} className="shrink-0">
{audioRecorder.isPaused ? <MicIcon className="w-4 h-4" /> : <span className="font-bold text-xs">||</span>}
<Button
variant="outline"
size="icon"
onClick={audioRecorder.togglePause}
className="shrink-0"
aria-label={audioRecorder.isPaused ? "Resume recording" : "Pause recording"}
>
{audioRecorder.isPaused ? <PlayIcon className="w-4 h-4" /> : <PauseIcon className="w-4 h-4" />}
</Button>
<Button variant="outline" size="icon" onClick={handleStopRecording} className="shrink-0 text-red-600 hover:text-red-700">
<Button
variant="outline"
size="icon"
onClick={handleStopRecording}
className="shrink-0 text-red-600 hover:text-red-700"
aria-label="Stop and save recording"
>
<div className="w-3 h-3 bg-current rounded-sm" />
</Button>
<Button variant="ghost" size="icon" onClick={audioRecorder.cancelRecording} className="shrink-0">
<Button
variant="ghost"
size="icon"
onClick={audioRecorder.cancelRecording}
className="shrink-0 text-red-600 hover:text-red-700"
aria-label="Cancel recording"
>
<XIcon className="w-4 h-4" />
</Button>
</div>

View File

@ -1,46 +1,60 @@
import { useRef, useState } from "react";
interface AudioRecorderState {
isRecording: boolean;
isPaused: boolean;
recordingTime: number;
mediaRecorder: MediaRecorder | null;
}
import { useEffect, useRef, useState } from "react";
export const useAudioRecorder = () => {
const [state, setState] = useState<AudioRecorderState>({
isRecording: false,
isPaused: false,
recordingTime: 0,
mediaRecorder: null,
});
const [isRecording, setIsRecording] = useState(false);
const [isPaused, setIsPaused] = useState(false);
const [recordingTime, setRecordingTime] = useState(0);
const chunksRef = useRef<Blob[]>([]);
const timerRef = useRef<number | null>(null);
const durationRef = useRef<number>(0);
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
useEffect(() => {
return () => {
if (mediaRecorderRef.current) {
mediaRecorderRef.current.stream.getTracks().forEach((track) => track.stop());
mediaRecorderRef.current = null;
}
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
};
}, []);
const startRecording = async () => {
let stream: MediaStream | null = null;
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const recorder = new MediaRecorder(stream);
chunksRef.current = [];
durationRef.current = 0;
setRecordingTime(0);
mediaRecorder.ondataavailable = (e: BlobEvent) => {
recorder.ondataavailable = (e: BlobEvent) => {
if (e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
mediaRecorder.start();
setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder }));
recorder.start();
mediaRecorderRef.current = recorder;
setIsRecording(true);
setIsPaused(false);
timerRef.current = window.setInterval(() => {
setState((prev) => {
if (prev.isPaused) {
return prev;
if (!mediaRecorderRef.current || mediaRecorderRef.current.state === "paused") {
return;
}
return { ...prev, recordingTime: prev.recordingTime + 1 };
});
durationRef.current += 1;
setRecordingTime(durationRef.current);
}, 1000);
} catch (error) {
if (stream) {
stream.getTracks().forEach((track) => track.stop());
}
console.error("Error accessing microphone:", error);
throw error;
}
@ -48,73 +62,92 @@ export const useAudioRecorder = () => {
const stopRecording = (): Promise<Blob> => {
return new Promise((resolve, reject) => {
const { mediaRecorder } = state;
if (!mediaRecorder) {
// Cleanup timer immediately to prevent further updates
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
const recorder = mediaRecorderRef.current;
if (!recorder) {
reject(new Error("No active recording"));
return;
}
mediaRecorder.onstop = () => {
let isResolved = false;
const finalize = () => {
if (isResolved) return;
isResolved = true;
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
chunksRef.current = [];
durationRef.current = 0;
setIsRecording(false);
setIsPaused(false);
setRecordingTime(0);
mediaRecorderRef.current = null;
resolve(blob);
};
mediaRecorder.stop();
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
recorder.onstop = finalize;
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
try {
recorder.stop();
recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
} catch (error) {
// Ignore errors during stop, as we'll finalize anyway
console.warn("Error stopping media recorder:", error);
}
setState({
isRecording: false,
isPaused: false,
recordingTime: 0,
mediaRecorder: null,
});
// Safety timeout in case onstop never fires
setTimeout(finalize, 1000);
});
};
const cancelRecording = () => {
const { mediaRecorder } = state;
if (mediaRecorder) {
mediaRecorder.stop();
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
}
// Cleanup timer immediately
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
const recorder = mediaRecorderRef.current;
if (recorder) {
recorder.stop();
recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
}
chunksRef.current = [];
setState({
isRecording: false,
isPaused: false,
recordingTime: 0,
mediaRecorder: null,
});
durationRef.current = 0;
setIsRecording(false);
setIsPaused(false);
setRecordingTime(0);
mediaRecorderRef.current = null;
};
const togglePause = () => {
const { mediaRecorder, isPaused } = state;
if (!mediaRecorder) return;
const recorder = mediaRecorderRef.current;
if (!recorder) return;
if (isPaused) {
mediaRecorder.resume();
recorder.resume();
setIsPaused(false);
} else {
mediaRecorder.pause();
recorder.pause();
setIsPaused(true);
}
setState((prev) => ({ ...prev, isPaused: !prev.isPaused }));
};
return {
isRecording: state.isRecording,
isPaused: state.isPaused,
recordingTime: state.recordingTime,
isRecording,
isPaused,
recordingTime,
startRecording,
stopRecording,
cancelRecording,

View File

@ -10,6 +10,7 @@
* {
@apply border-border outline-none ring-0;
}
body {
@apply bg-background text-foreground;
}