diff --git a/hooks/useAudio.ts b/hooks/useAudio.ts index cca229c..b5ac54d 100644 --- a/hooks/useAudio.ts +++ b/hooks/useAudio.ts @@ -1,15 +1,10 @@ -import { useState, useEffect, useCallback } from 'react' - -async function getAudioObjectURL(src: string): Promise { - const blob = await fetch(src).then((resp) => resp.blob()) - return URL.createObjectURL(blob) -} +import { useState, useEffect, useCallback, useMemo } from 'react' function useAudio(url: string) { - const [audio] = useState(new Audio()) const [playing, setPlaying] = useState(false) const [duration, setDuration] = useState(0) const [current, setCurrent] = useState(0) + const audio = useMemo(() => new Audio(url), [url]) const toggle = useCallback(() => { if (!playing) { @@ -19,41 +14,19 @@ function useAudio(url: string) { }, [audio, playing]) const init = useCallback(async () => { - const audioObjectURL = await getAudioObjectURL(url) - audio.src = audioObjectURL audio.preload = 'auto' - }, [audio, url]) - - useEffect(() => { - playing ? audio.play() : audio.pause() - }, [audio, playing]) + }, [audio]) useEffect(() => { init() - let audioDuration = 0 audio.addEventListener('ended', () => setPlaying(false)) audio.addEventListener('loadeddata', () => { - if (audio.duration === Infinity) { - // HACK: Set a duration longer than the audio to get the actual duration of the audio - audio.currentTime = 1e1 - } else { - setDuration(audio.duration) - audioDuration = audio.duration - } + setDuration(audio.duration) }) audio.addEventListener('timeupdate', () => { - if (audioDuration === 0) { - audioDuration = audio.currentTime - setDuration(audioDuration) - setTimeout(() => { - audio.currentTime = 0 - setCurrent(0) - }, 0) - } setCurrent(audio.currentTime) }) return () => { - audioDuration = 0 audio.removeEventListener('ended', () => setPlaying(false)) audio.removeEventListener('loadeddata', () => setDuration(0)) audio.removeEventListener('timeupdate', () => setCurrent(0)) diff --git a/package.json b/package.json index 1b258be..a87247b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "talk-with-gemini", - "version": "0.12.1", + "version": "0.12.2", "private": true, "author": "Amery2010 ", "license": "GPL-3.0-only", @@ -40,6 +40,7 @@ "clipboard": "^2.0.11", "clsx": "^2.1.1", "crypto-js": "^4.2.0", + "fix-webm-duration": "^1.0.5", "highlight.js": "^11.9.0", "i18next": "^23.11.5", "i18next-browser-languagedetector": "^7.2.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5477ec7..0219a63 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -80,6 +80,9 @@ importers: crypto-js: specifier: ^4.2.0 version: 4.2.0 + fix-webm-duration: + specifier: ^1.0.5 + version: 1.0.5 highlight.js: specifier: ^11.9.0 version: 11.9.0 @@ -1635,6 +1638,9 @@ packages: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} engines: {node: '>=10'} + fix-webm-duration@1.0.5: + resolution: {integrity: sha512-b6oula3OfSknx0aWoLsxvp4DVIYbwsf+UAkr6EDAK3iuMYk/OSNKzmeSI61GXK0MmFTEuzle19BPvTxMIKjkZg==} + flat-cache@3.2.0: resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==} engines: {node: ^10.12.0 || >=12.0.0} @@ -4509,6 +4515,8 @@ snapshots: locate-path: 6.0.0 path-exists: 4.0.0 + fix-webm-duration@1.0.5: {} + flat-cache@3.2.0: dependencies: flatted: 3.3.1 diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 7c5a8e6..b572286 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -8,7 +8,7 @@ }, "package": { "productName": "talk-with-gemini", - "version": "0.12.1" + "version": "0.12.2" }, "tauri": { "allowlist": { diff --git a/utils/Recorder.ts b/utils/Recorder.ts index aa4d8be..58a7c4f 100644 --- a/utils/Recorder.ts +++ b/utils/Recorder.ts @@ -1,3 +1,4 @@ +import fixWebmDuration from 'fix-webm-duration' import { isFunction } from 'lodash-es' export interface AudioRecorderPayload { @@ -16,10 +17,10 @@ export interface RecordMineType { } export class AudioRecorder { - public blob: Blob | null = null public time: number = 0 public isRecording: boolean = false public autoStop: boolean = false + private startTime: number = 0 protected audioContext: AudioContext protected mediaRecorder: MediaRecorder | null = null protected volumeThreshold: number = 30 @@ -82,7 +83,14 @@ export class AudioRecorder { } else { // 获取麦克风音频流 navigator.mediaDevices - .getUserMedia({ audio: true }) + .getUserMedia({ + audio: { + sampleSize: 16, + channelCount: 1, + noiseSuppression: false, + echoCancellation: false, + }, + }) .then((stream) => { this.recording(stream) }) @@ -110,6 +118,15 @@ export class AudioRecorder { // 将麦克风连接到分析器 microphone.connect(analyser) + const finishRecord = async () => { + const duration = Date.now() - this.startTime + const blob = new Blob(chunks, { type: mediaRecorderType.mineType }) + const fixedBlob = await fixWebmDuration(blob, duration, { logger: false }) + this.onFinish(fixedBlob) + this.startTime = 0 + chunks = [] + } + // 监听录音数据可用事件,将数据发送到服务器 mediaRecorder.addEventListener('dataavailable', (ev) => { if (ev.data.size > 0) { @@ -118,21 +135,15 @@ export class AudioRecorder { }) mediaRecorder.addEventListener('start', () => { this.isRecording = true + this.startTime = Date.now() this.startTimer() this.onStart() }) mediaRecorder.addEventListener('pause', () => { - const blob = new Blob(chunks) - this.onFinish(blob) - this.blob = blob - chunks = [] + finishRecord() }) mediaRecorder.addEventListener('stop', () => { - const blob = new Blob(chunks) - this.onFinish(blob) - this.mediaRecorder = null - this.blob = blob - chunks = [] + finishRecord() stream.getTracks().forEach((track) => track.stop()) })