kvm/ui/src/hooks/useAudioLevel.ts

113 lines
3.3 KiB
TypeScript

import { useEffect, useRef, useState } from 'react';
interface AudioLevelHookResult {
audioLevel: number; // 0-100 percentage
isAnalyzing: boolean;
}
export const useAudioLevel = (stream: MediaStream | null): AudioLevelHookResult => {
const [audioLevel, setAudioLevel] = useState(0);
const [isAnalyzing, setIsAnalyzing] = useState(false);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const animationFrameRef = useRef<number | null>(null);
useEffect(() => {
if (!stream) {
// Clean up when stream is null
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (sourceRef.current) {
sourceRef.current.disconnect();
sourceRef.current = null;
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
analyserRef.current = null;
setIsAnalyzing(false);
setAudioLevel(0);
return;
}
const audioTracks = stream.getAudioTracks();
if (audioTracks.length === 0) {
setIsAnalyzing(false);
setAudioLevel(0);
return;
}
try {
// Create audio context and analyser
const audioContext = new (window.AudioContext || (window as Window & { webkitAudioContext?: typeof AudioContext }).webkitAudioContext)();
const analyser = audioContext.createAnalyser();
const source = audioContext.createMediaStreamSource(stream);
// Configure analyser
analyser.fftSize = 256;
analyser.smoothingTimeConstant = 0.8;
// Connect nodes
source.connect(analyser);
// Store references
audioContextRef.current = audioContext;
analyserRef.current = analyser;
sourceRef.current = source;
const dataArray = new Uint8Array(analyser.frequencyBinCount);
const updateLevel = () => {
if (!analyserRef.current) return;
analyserRef.current.getByteFrequencyData(dataArray);
// Calculate RMS (Root Mean Square) for more accurate level representation
let sum = 0;
for (const value of dataArray) {
sum += value * value;
}
const rms = Math.sqrt(sum / dataArray.length);
// Convert to percentage (0-100)
const level = Math.min(100, (rms / 255) * 100);
setAudioLevel(level);
animationFrameRef.current = requestAnimationFrame(updateLevel);
};
setIsAnalyzing(true);
updateLevel();
} catch (error) {
console.error('Failed to create audio level analyzer:', error);
setIsAnalyzing(false);
setAudioLevel(0);
}
// Cleanup function
return () => {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (sourceRef.current) {
sourceRef.current.disconnect();
sourceRef.current = null;
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
analyserRef.current = null;
setIsAnalyzing(false);
setAudioLevel(0);
};
}, [stream]);
return { audioLevel, isAnalyzing };
};