fix(voice): cleanup audio context on processing init failure

This commit is contained in:
ServerBob 2026-03-05 06:41:13 +00:00
parent 7a85935eee
commit bdd5ff768e
1 changed files with 93 additions and 67 deletions

View File

@ -415,79 +415,105 @@ const VoiceProvider = memo(({ children }: TVoiceProviderProps) => {
let rnnoiseNode: AudioWorkletNode | undefined; let rnnoiseNode: AudioWorkletNode | undefined;
let gateNode: AudioWorkletNode | undefined; let gateNode: AudioWorkletNode | undefined;
if (useDeepFilterNet) { const cleanupLocalProcessing = async () => {
const deepFilterSuppression = sensitivityToDeepFilterLevel( try {
devices.voiceSensitivity ?? 70 gateNode?.disconnect();
); rnnoiseNode?.disconnect();
await withDeepFilterFetchProxy(async () => { deepFilterNode?.disconnect();
deepFilterCore = new DeepFilterNet3Core({ source.disconnect();
sampleRate: 48000, dest.disconnect();
noiseReductionLevel: deepFilterSuppression, deepFilterCore?.destroy?.();
assetConfig: { } catch {
cdnUrl: DEEPFILTER_LOCAL_CDN_URL // ignore cleanup failures
}
});
await deepFilterCore.initialize();
deepFilterNode = await deepFilterCore.createAudioWorkletNode(ctx);
});
if (!deepFilterNode) {
throw new Error('DeepFilterNet worklet node was not created');
} }
// Apply again after node creation; pre-node setSuppressionLevel is a no-op.
deepFilterCore?.setSuppressionLevel(deepFilterSuppression);
current.connect(deepFilterNode);
current = deepFilterNode;
}
if (useRnnoise) { try {
await ctx.audioWorklet.addModule(NoiseSuppressorWorklet); if (ctx.state !== 'closed') {
rnnoiseNode = new AudioWorkletNode(ctx, NoiseSuppressorWorklet_Name); await ctx.close();
current.connect(rnnoiseNode);
current = rnnoiseNode;
}
if (useKeyboardSuppression) {
await ctx.audioWorklet.addModule(KeyboardNoiseGateWorklet);
gateNode = new AudioWorkletNode(ctx, 'keyboard-noise-gate-processor', {
parameterData: {
threshold: sensitivityToThreshold(devices.voiceSensitivity ?? 70),
floor: 0.04,
attack: 0.65,
release: 0.985
} }
}); } catch {
current.connect(gateNode); // ignore context close failures
current = gateNode; }
}
current.connect(dest);
audioProcessingRef.current = {
ctx,
source,
deepFilterNode,
deepFilterCore,
rnnoiseNode,
gateNode,
dest,
input
}; };
const chain = [ try {
useDeepFilterNet ? 'DeepFilterNet' : '', if (useDeepFilterNet) {
useRnnoise ? 'RNNoise' : '', const deepFilterSuppression = sensitivityToDeepFilterLevel(
useKeyboardSuppression ? 'Keyboard Gate' : '' devices.voiceSensitivity ?? 70
] );
.filter(Boolean) await withDeepFilterFetchProxy(async () => {
.join(' + '); deepFilterCore = new DeepFilterNet3Core({
sampleRate: 48000,
noiseReductionLevel: deepFilterSuppression,
assetConfig: {
cdnUrl: DEEPFILTER_LOCAL_CDN_URL
}
});
await deepFilterCore.initialize();
deepFilterNode = await deepFilterCore.createAudioWorkletNode(ctx);
});
if (!deepFilterNode) {
throw new Error('DeepFilterNet worklet node was not created');
}
// Apply again after node creation; pre-node setSuppressionLevel is a no-op.
deepFilterCore?.setSuppressionLevel(deepFilterSuppression);
current.connect(deepFilterNode);
current = deepFilterNode;
}
setMicProcessingStatus({ if (useRnnoise) {
active: true, await ctx.audioWorklet.addModule(NoiseSuppressorWorklet);
chain: chain || 'none', rnnoiseNode = new AudioWorkletNode(ctx, NoiseSuppressorWorklet_Name);
note: 'Client-side processing active' current.connect(rnnoiseNode);
}); current = rnnoiseNode;
}
return dest.stream; if (useKeyboardSuppression) {
await ctx.audioWorklet.addModule(KeyboardNoiseGateWorklet);
gateNode = new AudioWorkletNode(ctx, 'keyboard-noise-gate-processor', {
parameterData: {
threshold: sensitivityToThreshold(devices.voiceSensitivity ?? 70),
floor: 0.04,
attack: 0.65,
release: 0.985
}
});
current.connect(gateNode);
current = gateNode;
}
current.connect(dest);
audioProcessingRef.current = {
ctx,
source,
deepFilterNode,
deepFilterCore,
rnnoiseNode,
gateNode,
dest,
input
};
const chain = [
useDeepFilterNet ? 'DeepFilterNet' : '',
useRnnoise ? 'RNNoise' : '',
useKeyboardSuppression ? 'Keyboard Gate' : ''
]
.filter(Boolean)
.join(' + ');
setMicProcessingStatus({
active: true,
chain: chain || 'none',
note: 'Client-side processing active'
});
return dest.stream;
} catch (err) {
await cleanupLocalProcessing();
throw err;
}
}, [devices.noiseSuppressionDeepFilterNet, devices.noiseSuppressionRnnoise, devices.keyboardSuppression, devices.voiceSensitivity, cleanupAudioProcessing]); }, [devices.noiseSuppressionDeepFilterNet, devices.noiseSuppressionRnnoise, devices.keyboardSuppression, devices.voiceSensitivity, cleanupAudioProcessing]);
const acquireMicStream = useCallback(async (): Promise<{ stream: MediaStream; track: MediaStreamTrack; raw: MediaStream }> => { const acquireMicStream = useCallback(async (): Promise<{ stream: MediaStream; track: MediaStreamTrack; raw: MediaStream }> => {
@ -537,7 +563,7 @@ const VoiceProvider = memo(({ children }: TVoiceProviderProps) => {
stopRawMic(); stopRawMic();
rawMicStreamRef.current = raw; rawMicStreamRef.current = raw;
if (track) { if (track) {
logVoice('Obtained audio track', { audioTrack: track }); logVoice('Obtained audio track', { audioTrack: track });
localAudioProducer.current = await producerTransport.current?.produce({ localAudioProducer.current = await producerTransport.current?.produce({