Audio recording

This commit is contained in:
TheNexter 2025-11-18 17:58:47 +01:00
parent 4de8712cb0
commit abcab8d6ab
2 changed files with 186 additions and 29 deletions

View File

@ -1,8 +1,9 @@
import { LatLng } from "leaflet";
import { uniqBy } from "lodash-es";
import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MoreHorizontalIcon, PlusIcon } from "lucide-react";
import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MicIcon, MoreHorizontalIcon, PlusIcon, XIcon } from "lucide-react";
import { observer } from "mobx-react-lite";
import { useContext, useState } from "react";
import { toast } from "react-hot-toast";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
@ -13,12 +14,14 @@ import {
DropdownMenuSubTrigger,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { attachmentStore } from "@/store";
import { Attachment } from "@/types/proto/api/v1/attachment_service";
import { Location, MemoRelation } from "@/types/proto/api/v1/memo_service";
import { useTranslate } from "@/utils/i18n";
import { MemoEditorContext } from "../types";
import { LinkMemoDialog } from "./InsertMenu/LinkMemoDialog";
import { LocationDialog } from "./InsertMenu/LocationDialog";
import { useAudioRecorder } from "./InsertMenu/useAudioRecorder";
import { useFileUpload } from "./InsertMenu/useFileUpload";
import { useLinkMemo } from "./InsertMenu/useLinkMemo";
import { useLocation } from "./InsertMenu/useLocation";
@ -52,6 +55,7 @@ const InsertMenu = observer((props: Props) => {
});
const location = useLocation(props.location);
const audioRecorder = useAudioRecorder();
const isUploading = uploadingFlag || props.isUploading;
@ -112,41 +116,91 @@ const InsertMenu = observer((props: Props) => {
});
};
const handleStopRecording = async () => {
try {
const blob = await audioRecorder.stopRecording();
const filename = `recording-${Date.now()}.webm`;
const file = new File([blob], filename, { type: "audio/webm" });
const { name, size, type } = file;
const buffer = new Uint8Array(await file.arrayBuffer());
const attachment = await attachmentStore.createAttachment({
attachment: Attachment.fromPartial({
filename: name,
size,
type,
content: buffer,
}),
attachmentId: "",
});
context.setAttachmentList([...context.attachmentList, attachment]);
} catch (error: any) {
console.error("Failed to upload audio recording:", error);
toast.error(error.details || "Failed to upload audio recording");
}
};
return (
<>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant="outline" size="icon" className="shadow-none" disabled={isUploading}>
{isUploading ? <LoaderIcon className="size-4 animate-spin" /> : <PlusIcon className="size-4" />}
</Button>
{audioRecorder.isRecording ? (
<Button variant="outline" className="text-red-500 border-red-500 hover:text-red-600 hover:bg-red-50">
<div className="w-2 h-2 rounded-full bg-red-500 animate-pulse mr-2" />
{new Date(audioRecorder.recordingTime * 1000).toISOString().substr(14, 5)}
</Button>
) : (
<Button variant="outline" size="icon" className="shadow-none" disabled={isUploading}>
{isUploading ? <LoaderIcon className="size-4 animate-spin" /> : <PlusIcon className="size-4" />}
</Button>
)}
</DropdownMenuTrigger>
<DropdownMenuContent align="start">
<DropdownMenuItem onClick={handleUploadClick}>
<FileIcon className="w-4 h-4" />
{t("common.upload")}
</DropdownMenuItem>
<DropdownMenuItem onClick={() => setLinkDialogOpen(true)}>
<LinkIcon className="w-4 h-4" />
{t("tooltip.link-memo")}
</DropdownMenuItem>
<DropdownMenuItem onClick={handleLocationClick}>
<MapPinIcon className="w-4 h-4" />
{t("tooltip.select-location")}
</DropdownMenuItem>
{/* View submenu with Focus Mode */}
<DropdownMenuSub>
<DropdownMenuSubTrigger>
<MoreHorizontalIcon className="w-4 h-4" />
{t("common.more")}
</DropdownMenuSubTrigger>
<DropdownMenuSubContent>
<DropdownMenuItem onClick={props.onToggleFocusMode}>
<Maximize2Icon className="w-4 h-4" />
{t("editor.focus-mode")}
<span className="ml-auto text-xs text-muted-foreground opacity-60">F</span>
{audioRecorder.isRecording ? (
<>
<DropdownMenuItem onClick={handleStopRecording} className="text-red-500 focus:text-red-600 focus:bg-red-50">
<div className="w-2 h-2 rounded-full bg-red-500 mr-2" />
Stop Recording
</DropdownMenuItem>
</DropdownMenuSubContent>
</DropdownMenuSub>
<DropdownMenuItem onClick={audioRecorder.cancelRecording}>
<XIcon className="w-4 h-4 mr-2" />
Cancel Recording
</DropdownMenuItem>
</>
) : (
<>
<DropdownMenuItem onClick={handleUploadClick}>
<FileIcon className="w-4 h-4" />
{t("common.upload")}
</DropdownMenuItem>
<DropdownMenuItem onClick={() => setLinkDialogOpen(true)}>
<LinkIcon className="w-4 h-4" />
{t("tooltip.link-memo")}
</DropdownMenuItem>
<DropdownMenuItem onClick={handleLocationClick}>
<MapPinIcon className="w-4 h-4" />
{t("tooltip.select-location")}
</DropdownMenuItem>
<DropdownMenuItem onClick={audioRecorder.startRecording}>
<MicIcon className="w-4 h-4" />
Record Audio
</DropdownMenuItem>
{/* View submenu with Focus Mode */}
<DropdownMenuSub>
<DropdownMenuSubTrigger>
<MoreHorizontalIcon className="w-4 h-4" />
{t("common.more")}
</DropdownMenuSubTrigger>
<DropdownMenuSubContent>
<DropdownMenuItem onClick={props.onToggleFocusMode}>
<Maximize2Icon className="w-4 h-4" />
{t("editor.focus-mode")}
<span className="ml-auto text-xs text-muted-foreground opacity-60">F</span>
</DropdownMenuItem>
</DropdownMenuSubContent>
</DropdownMenuSub>
</>
)}
</DropdownMenuContent>
</DropdownMenu>

View File

@ -0,0 +1,103 @@
import { useRef, useState } from "react";
interface AudioRecorderState {
isRecording: boolean;
isPaused: boolean;
recordingTime: number;
mediaRecorder: MediaRecorder | null;
}
export const useAudioRecorder = () => {
const [state, setState] = useState<AudioRecorderState>({
isRecording: false,
isPaused: false,
recordingTime: 0,
mediaRecorder: null,
});
const chunksRef = useRef<Blob[]>([]);
const timerRef = useRef<number | null>(null);
const startRecording = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
chunksRef.current = [];
mediaRecorder.ondataavailable = (e: BlobEvent) => {
if (e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
mediaRecorder.start();
setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder }));
timerRef.current = window.setInterval(() => {
setState((prev: AudioRecorderState) => ({ ...prev, recordingTime: prev.recordingTime + 1 }));
}, 1000);
} catch (error) {
console.error("Error accessing microphone:", error);
throw error;
}
};
const stopRecording = (): Promise<Blob> => {
return new Promise((resolve, reject) => {
const { mediaRecorder } = state;
if (!mediaRecorder) {
reject(new Error("No active recording"));
return;
}
mediaRecorder.onstop = () => {
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
chunksRef.current = [];
resolve(blob);
};
mediaRecorder.stop();
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
setState({
isRecording: false,
isPaused: false,
recordingTime: 0,
mediaRecorder: null,
});
});
};
const cancelRecording = () => {
const { mediaRecorder } = state;
if (mediaRecorder) {
mediaRecorder.stop();
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
}
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
chunksRef.current = [];
setState({
isRecording: false,
isPaused: false,
recordingTime: 0,
mediaRecorder: null,
});
};
return {
isRecording: state.isRecording,
recordingTime: state.recordingTime,
startRecording,
stopRecording,
cancelRecording,
};
};