fix audio input infinite rendering (#496)

This commit is contained in:
heheer
2023-11-20 18:43:10 +08:00
committed by GitHub
parent b05dd0fde1
commit 9c4eabfc9e

View File

@@ -1,4 +1,4 @@
import { useEffect, useMemo, useRef, useState } from 'react'; import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { POST } from '../api/request'; import { POST } from '../api/request';
import { useToast } from './useToast'; import { useToast } from './useToast';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
@@ -8,7 +8,8 @@ export const useSpeech = (props?: { shareId?: string }) => {
const { shareId } = props || {}; const { shareId } = props || {};
const { t } = useTranslation(); const { t } = useTranslation();
const mediaRecorder = useRef<MediaRecorder>(); const mediaRecorder = useRef<MediaRecorder>();
const mediaStream = useRef<MediaStream>(); // const mediaStream = useRef<MediaStream>();
const [mediaStream, setMediaStream] = useState<MediaStream>();
const { toast } = useToast(); const { toast } = useToast();
const [isSpeaking, setIsSpeaking] = useState(false); const [isSpeaking, setIsSpeaking] = useState(false);
const [isTransCription, setIsTransCription] = useState(false); const [isTransCription, setIsTransCription] = useState(false);
@@ -24,7 +25,7 @@ export const useSpeech = (props?: { shareId?: string }) => {
return `${formattedMinutes}:${formattedSeconds}`; return `${formattedMinutes}:${formattedSeconds}`;
}, [audioSecond]); }, [audioSecond]);
const renderAudioGraph = (analyser: AnalyserNode, canvas: HTMLCanvasElement) => { const renderAudioGraph = useCallback((analyser: AnalyserNode, canvas: HTMLCanvasElement) => {
const bufferLength = analyser.frequencyBinCount; const bufferLength = analyser.frequencyBinCount;
const backgroundColor = 'white'; const backgroundColor = 'white';
const dataArray = new Uint8Array(bufferLength); const dataArray = new Uint8Array(bufferLength);
@@ -47,12 +48,12 @@ export const useSpeech = (props?: { shareId?: string }) => {
canvasCtx.fillRect(x, height - adjustedBarHeight, barWidth, adjustedBarHeight); canvasCtx.fillRect(x, height - adjustedBarHeight, barWidth, adjustedBarHeight);
x += barWidth + 1; x += barWidth + 1;
} }
}; }, []);
const startSpeak = async (onFinish: (text: string) => void) => { const startSpeak = async (onFinish: (text: string) => void) => {
try { try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaStream.current = stream; setMediaStream(stream);
mediaRecorder.current = new MediaRecorder(stream); mediaRecorder.current = new MediaRecorder(stream);
const chunks: Blob[] = []; const chunks: Blob[] = [];
setIsSpeaking(true); setIsSpeaking(true);
@@ -121,8 +122,8 @@ export const useSpeech = (props?: { shareId?: string }) => {
if (mediaRecorder.current && mediaRecorder.current.state !== 'inactive') { if (mediaRecorder.current && mediaRecorder.current.state !== 'inactive') {
mediaRecorder.current.stop(); mediaRecorder.current.stop();
} }
if (mediaStream.current) { if (mediaStream) {
mediaStream.current.getTracks().forEach((track) => track.stop()); mediaStream.getTracks().forEach((track) => track.stop());
} }
}; };
}, []); }, []);
@@ -133,7 +134,7 @@ export const useSpeech = (props?: { shareId?: string }) => {
isSpeaking, isSpeaking,
isTransCription, isTransCription,
renderAudioGraph, renderAudioGraph,
stream: mediaStream.current, stream: mediaStream,
speakingTimeString speakingTimeString
}; };
}; };