Apply new proposition from Copilot + little refine in player design (with better responsive design) (lint done this time)

This commit is contained in:
TheNexter 2025-11-19 17:59:15 +01:00
parent 6e6ade0ab5
commit eb7f779bb8
4 changed files with 186 additions and 98 deletions

View File

@ -18,6 +18,12 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
const audio = audioRef.current; const audio = audioRef.current;
if (!audio) return; if (!audio) return;
// Reset state when src changes
setIsPlaying(false);
setCurrentTime(0);
setDuration(0);
setIsLoading(true);
const handleLoadedMetadata = () => { const handleLoadedMetadata = () => {
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
setDuration(audio.duration); setDuration(audio.duration);
@ -57,7 +63,22 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
audio.removeEventListener("timeupdate", handleTimeUpdate); audio.removeEventListener("timeupdate", handleTimeUpdate);
audio.removeEventListener("ended", handleEnded); audio.removeEventListener("ended", handleEnded);
}; };
}, []); }, [src]);
useEffect(() => {
const handlePlayAudio = (e: Event) => {
const customEvent = e as CustomEvent;
if (customEvent.detail !== audioRef.current && isPlaying) {
audioRef.current?.pause();
setIsPlaying(false);
}
};
document.addEventListener("play-audio", handlePlayAudio);
return () => {
document.removeEventListener("play-audio", handlePlayAudio);
};
}, [isPlaying]);
const togglePlayPause = async () => { const togglePlayPause = async () => {
const audio = audioRef.current; const audio = audioRef.current;
@ -68,6 +89,10 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
setIsPlaying(false); setIsPlaying(false);
} else { } else {
try { try {
// Stop other audio players
const event = new CustomEvent("play-audio", { detail: audio });
document.dispatchEvent(event);
await audio.play(); await audio.play();
setIsPlaying(true); setIsPlaying(true);
} catch (error) { } catch (error) {
@ -97,30 +122,29 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
return ( return (
<div className={`flex items-center gap-2 ${className}`}> <div className={`flex items-center gap-2 ${className}`}>
<audio ref={audioRef} src={src} preload="metadata" /> <audio ref={audioRef} src={src} preload="metadata" />
<Button
<div className="flex flex-row items-center px-2 py-1 rounded-md text-secondary-foreground gap-2"> variant="ghost"
<span className="font-mono text-sm"> size="sm"
{formatTime(currentTime)} / {formatTime(duration)} onClick={togglePlayPause}
</span> disabled={isLoading}
<Button className="shrink-0 p-0 h-5 w-5 hover:bg-transparent text-muted-foreground hover:text-foreground"
variant="ghost" aria-label={isPlaying ? "Pause audio" : "Play audio"}
size="sm" >
onClick={togglePlayPause} {isPlaying ? <PauseIcon className="w-5 h-5" /> : <PlayIcon className="w-5 h-5" />}
disabled={isLoading} </Button>
className="shrink-0 h-auto w-auto p-0.5 hover:bg-background/50" <input
> type="range"
{isPlaying ? <PauseIcon className="w-4 h-4" /> : <PlayIcon className="w-4 h-4" />} min="0"
</Button> max={duration || 0}
<input value={currentTime}
type="range" onChange={handleSeek}
min="0" disabled={isLoading || !duration}
max={duration || 0} className="w-full min-w-[128px] h-1 rounded-md bg-secondary cursor-pointer appearance-none [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:bg-primary [&::-webkit-slider-thumb]:rounded-full [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-none [&::-moz-range-thumb]:rounded-full"
value={currentTime} aria-label="Seek audio position"
onChange={handleSeek} />
disabled={isLoading || !duration} <span className="text-sm text-muted-foreground whitespace-nowrap">
className="flex-1 h-1 bg-muted hover:bg-background/50 rounded-lg appearance-none cursor-pointer disabled:opacity-50 [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:bg-primary [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-0" {formatTime(currentTime)} / {formatTime(duration)}
/> </span>
</div>
</div> </div>
); );
}; };

View File

@ -1,6 +1,18 @@
import { LatLng } from "leaflet"; import { LatLng } from "leaflet";
import { uniqBy } from "lodash-es"; import { uniqBy } from "lodash-es";
import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MicIcon, MoreHorizontalIcon, PlusIcon, XIcon } from "lucide-react"; import {
FileIcon,
LinkIcon,
LoaderIcon,
MapPinIcon,
Maximize2Icon,
MicIcon,
MoreHorizontalIcon,
PauseIcon,
PlayIcon,
PlusIcon,
XIcon,
} from "lucide-react";
import { observer } from "mobx-react-lite"; import { observer } from "mobx-react-lite";
import { useContext, useState } from "react"; import { useContext, useState } from "react";
import { toast } from "react-hot-toast"; import { toast } from "react-hot-toast";
@ -136,7 +148,7 @@ const InsertMenu = observer((props: Props) => {
context.setAttachmentList([...context.attachmentList, attachment]); context.setAttachmentList([...context.attachmentList, attachment]);
} catch (error: any) { } catch (error: any) {
console.error("Failed to upload audio recording:", error); console.error("Failed to upload audio recording:", error);
toast.error(error.details || "Failed to upload audio recording"); toast.error(error.message || "Failed to upload audio recording");
} }
}; };
@ -148,13 +160,31 @@ const InsertMenu = observer((props: Props) => {
<div className={`w-2 h-2 rounded-full bg-red-500 mr-2 ${!audioRecorder.isPaused ? "animate-pulse" : ""}`} /> <div className={`w-2 h-2 rounded-full bg-red-500 mr-2 ${!audioRecorder.isPaused ? "animate-pulse" : ""}`} />
<span className="font-mono text-sm">{new Date(audioRecorder.recordingTime * 1000).toISOString().substring(14, 19)}</span> <span className="font-mono text-sm">{new Date(audioRecorder.recordingTime * 1000).toISOString().substring(14, 19)}</span>
</div> </div>
<Button variant="outline" size="icon" onClick={audioRecorder.togglePause} className="shrink-0"> <Button
{audioRecorder.isPaused ? <MicIcon className="w-4 h-4" /> : <span className="font-bold text-xs">||</span>} variant="outline"
size="icon"
onClick={audioRecorder.togglePause}
className="shrink-0"
aria-label={audioRecorder.isPaused ? "Resume recording" : "Pause recording"}
>
{audioRecorder.isPaused ? <PlayIcon className="w-4 h-4" /> : <PauseIcon className="w-4 h-4" />}
</Button> </Button>
<Button variant="outline" size="icon" onClick={handleStopRecording} className="shrink-0 text-red-600 hover:text-red-700"> <Button
variant="outline"
size="icon"
onClick={handleStopRecording}
className="shrink-0 text-red-600 hover:text-red-700"
aria-label="Stop and save recording"
>
<div className="w-3 h-3 bg-current rounded-sm" /> <div className="w-3 h-3 bg-current rounded-sm" />
</Button> </Button>
<Button variant="ghost" size="icon" onClick={audioRecorder.cancelRecording} className="shrink-0"> <Button
variant="ghost"
size="icon"
onClick={audioRecorder.cancelRecording}
className="shrink-0 text-red-600 hover:text-red-700"
aria-label="Cancel recording"
>
<XIcon className="w-4 h-4" /> <XIcon className="w-4 h-4" />
</Button> </Button>
</div> </div>

View File

@ -1,46 +1,60 @@
import { useRef, useState } from "react"; import { useEffect, useRef, useState } from "react";
interface AudioRecorderState {
isRecording: boolean;
isPaused: boolean;
recordingTime: number;
mediaRecorder: MediaRecorder | null;
}
export const useAudioRecorder = () => { export const useAudioRecorder = () => {
const [state, setState] = useState<AudioRecorderState>({ const [isRecording, setIsRecording] = useState(false);
isRecording: false, const [isPaused, setIsPaused] = useState(false);
isPaused: false, const [recordingTime, setRecordingTime] = useState(0);
recordingTime: 0,
mediaRecorder: null,
});
const chunksRef = useRef<Blob[]>([]); const chunksRef = useRef<Blob[]>([]);
const timerRef = useRef<number | null>(null); const timerRef = useRef<number | null>(null);
const durationRef = useRef<number>(0);
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
useEffect(() => {
return () => {
if (mediaRecorderRef.current) {
mediaRecorderRef.current.stream.getTracks().forEach((track) => track.stop());
mediaRecorderRef.current = null;
}
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
};
}, []);
const startRecording = async () => { const startRecording = async () => {
let stream: MediaStream | null = null;
try { try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream); const recorder = new MediaRecorder(stream);
chunksRef.current = []; chunksRef.current = [];
durationRef.current = 0;
setRecordingTime(0);
mediaRecorder.ondataavailable = (e: BlobEvent) => { recorder.ondataavailable = (e: BlobEvent) => {
if (e.data.size > 0) { if (e.data.size > 0) {
chunksRef.current.push(e.data); chunksRef.current.push(e.data);
} }
}; };
mediaRecorder.start(); recorder.start();
setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); mediaRecorderRef.current = recorder;
setIsRecording(true);
setIsPaused(false);
timerRef.current = window.setInterval(() => { timerRef.current = window.setInterval(() => {
setState((prev) => { if (!mediaRecorderRef.current || mediaRecorderRef.current.state === "paused") {
if (prev.isPaused) { return;
return prev; }
} durationRef.current += 1;
return { ...prev, recordingTime: prev.recordingTime + 1 }; setRecordingTime(durationRef.current);
});
}, 1000); }, 1000);
} catch (error) { } catch (error) {
if (stream) {
stream.getTracks().forEach((track) => track.stop());
}
console.error("Error accessing microphone:", error); console.error("Error accessing microphone:", error);
throw error; throw error;
} }
@ -48,73 +62,92 @@ export const useAudioRecorder = () => {
const stopRecording = (): Promise<Blob> => { const stopRecording = (): Promise<Blob> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const { mediaRecorder } = state; // Cleanup timer immediately to prevent further updates
if (!mediaRecorder) {
reject(new Error("No active recording"));
return;
}
mediaRecorder.onstop = () => {
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
chunksRef.current = [];
resolve(blob);
};
mediaRecorder.stop();
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
if (timerRef.current) { if (timerRef.current) {
clearInterval(timerRef.current); clearInterval(timerRef.current);
timerRef.current = null; timerRef.current = null;
} }
setState({ const recorder = mediaRecorderRef.current;
isRecording: false, if (!recorder) {
isPaused: false, reject(new Error("No active recording"));
recordingTime: 0, return;
mediaRecorder: null, }
});
let isResolved = false;
const finalize = () => {
if (isResolved) return;
isResolved = true;
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
chunksRef.current = [];
durationRef.current = 0;
setIsRecording(false);
setIsPaused(false);
setRecordingTime(0);
mediaRecorderRef.current = null;
resolve(blob);
};
recorder.onstop = finalize;
try {
recorder.stop();
recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
} catch (error) {
// Ignore errors during stop, as we'll finalize anyway
console.warn("Error stopping media recorder:", error);
}
// Safety timeout in case onstop never fires
setTimeout(finalize, 1000);
}); });
}; };
const cancelRecording = () => { const cancelRecording = () => {
const { mediaRecorder } = state; // Cleanup timer immediately
if (mediaRecorder) {
mediaRecorder.stop();
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
}
if (timerRef.current) { if (timerRef.current) {
clearInterval(timerRef.current); clearInterval(timerRef.current);
timerRef.current = null; timerRef.current = null;
} }
const recorder = mediaRecorderRef.current;
if (recorder) {
recorder.stop();
recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
}
chunksRef.current = []; chunksRef.current = [];
setState({ durationRef.current = 0;
isRecording: false,
isPaused: false, setIsRecording(false);
recordingTime: 0, setIsPaused(false);
mediaRecorder: null, setRecordingTime(0);
});
mediaRecorderRef.current = null;
}; };
const togglePause = () => { const togglePause = () => {
const { mediaRecorder, isPaused } = state; const recorder = mediaRecorderRef.current;
if (!mediaRecorder) return; if (!recorder) return;
if (isPaused) { if (isPaused) {
mediaRecorder.resume(); recorder.resume();
setIsPaused(false);
} else { } else {
mediaRecorder.pause(); recorder.pause();
setIsPaused(true);
} }
setState((prev) => ({ ...prev, isPaused: !prev.isPaused }));
}; };
return { return {
isRecording: state.isRecording, isRecording,
isPaused: state.isPaused, isPaused,
recordingTime: state.recordingTime, recordingTime,
startRecording, startRecording,
stopRecording, stopRecording,
cancelRecording, cancelRecording,

View File

@ -10,6 +10,7 @@
* { * {
@apply border-border outline-none ring-0; @apply border-border outline-none ring-0;
} }
body { body {
@apply bg-background text-foreground; @apply bg-background text-foreground;
} }