mirror of https://github.com/jetkvm/kvm.git
feat: show audio level metrics in connection stats sidebar
This commit is contained in:
parent
c67b93578f
commit
d68dc4eee8
|
|
@ -208,6 +208,8 @@
|
||||||
"connection_stats_connection_description": "The connection between the client and the JetKVM.",
|
"connection_stats_connection_description": "The connection between the client and the JetKVM.",
|
||||||
"connection_stats_frames_per_second": "Frames per second",
|
"connection_stats_frames_per_second": "Frames per second",
|
||||||
"connection_stats_frames_per_second_description": "Number of inbound video frames displayed per second.",
|
"connection_stats_frames_per_second_description": "Number of inbound video frames displayed per second.",
|
||||||
|
"connection_stats_audio_level": "Audio Level",
|
||||||
|
"connection_stats_audio_level_description": "The level of the audio stream from the JetKVM to the client.",
|
||||||
"connection_stats_network_stability": "Network Stability",
|
"connection_stats_network_stability": "Network Stability",
|
||||||
"connection_stats_network_stability_description": "How steady the flow of inbound video packets is across the network.",
|
"connection_stats_network_stability_description": "How steady the flow of inbound video packets is across the network.",
|
||||||
"connection_stats_packets_lost": "Packets Lost",
|
"connection_stats_packets_lost": "Packets Lost",
|
||||||
|
|
@ -220,8 +222,11 @@
|
||||||
"connection_stats_unit_frames_per_second": " fps",
|
"connection_stats_unit_frames_per_second": " fps",
|
||||||
"connection_stats_unit_milliseconds": " ms",
|
"connection_stats_unit_milliseconds": " ms",
|
||||||
"connection_stats_unit_packets": " packets",
|
"connection_stats_unit_packets": " packets",
|
||||||
|
"connection_stats_unit_decibels": " dB",
|
||||||
"connection_stats_video": "Video",
|
"connection_stats_video": "Video",
|
||||||
"connection_stats_video_description": "The video stream from the JetKVM to the client.",
|
"connection_stats_video_description": "The video stream from the JetKVM to the client.",
|
||||||
|
"connection_stats_audio": "Audio",
|
||||||
|
"connection_stats_audio_description": "The audio stream from the JetKVM to the client.",
|
||||||
"continue": "Continue",
|
"continue": "Continue",
|
||||||
"creating_peer_connection": "Creating peer connection…",
|
"creating_peer_connection": "Creating peer connection…",
|
||||||
"dc_power_control_current": "Current",
|
"dc_power_control_current": "Current",
|
||||||
|
|
|
||||||
|
|
@ -13,62 +13,15 @@ import { Button } from "@components/Button";
|
||||||
import { useCopyToClipboard } from "@components/useCopyToClipBoard";
|
import { useCopyToClipboard } from "@components/useCopyToClipBoard";
|
||||||
import notifications from "@/notifications";
|
import notifications from "@/notifications";
|
||||||
|
|
||||||
export default function ConnectionStatsSidebar() {
|
interface RtpStatChartsProps {
|
||||||
const { sidebarView, setSidebarView } = useUiStore();
|
inboundRtpStats: Map<number, RTCInboundRtpStreamStats>;
|
||||||
const {
|
showFramesPerSecond?: boolean;
|
||||||
mediaStream,
|
showAudioLevel?: boolean;
|
||||||
peerConnection,
|
}
|
||||||
inboundRtpStats: inboundVideoRtpStats,
|
function RtpStatCharts({ inboundRtpStats, showFramesPerSecond, showAudioLevel }: RtpStatChartsProps) {
|
||||||
appendInboundRtpStats: appendInboundVideoRtpStats,
|
const jitterBufferDelay = createChartArray(inboundRtpStats, "jitterBufferDelay");
|
||||||
candidatePairStats: iceCandidatePairStats,
|
|
||||||
appendCandidatePairStats,
|
|
||||||
appendLocalCandidateStats,
|
|
||||||
appendRemoteCandidateStats,
|
|
||||||
appendDiskDataChannelStats,
|
|
||||||
} = useRTCStore();
|
|
||||||
|
|
||||||
const [remoteIPAddress, setRemoteIPAddress] = useState<string | null>(null);
|
|
||||||
|
|
||||||
useInterval(function collectWebRTCStats() {
|
|
||||||
(async () => {
|
|
||||||
if (!mediaStream) return;
|
|
||||||
|
|
||||||
const videoTrack = mediaStream.getVideoTracks()[0];
|
|
||||||
if (!videoTrack) return;
|
|
||||||
|
|
||||||
const stats = await peerConnection?.getStats();
|
|
||||||
let successfulLocalCandidateId: string | null = null;
|
|
||||||
let successfulRemoteCandidateId: string | null = null;
|
|
||||||
|
|
||||||
stats?.forEach(report => {
|
|
||||||
if (report.type === "inbound-rtp" && report.kind === "video") {
|
|
||||||
appendInboundVideoRtpStats(report);
|
|
||||||
} else if (report.type === "candidate-pair" && report.nominated) {
|
|
||||||
if (report.state === "succeeded") {
|
|
||||||
successfulLocalCandidateId = report.localCandidateId;
|
|
||||||
successfulRemoteCandidateId = report.remoteCandidateId;
|
|
||||||
}
|
|
||||||
appendCandidatePairStats(report);
|
|
||||||
} else if (report.type === "local-candidate") {
|
|
||||||
// We only want to append the local candidate stats that were used in nominated candidate pair
|
|
||||||
if (successfulLocalCandidateId === report.id) {
|
|
||||||
appendLocalCandidateStats(report);
|
|
||||||
}
|
|
||||||
} else if (report.type === "remote-candidate") {
|
|
||||||
if (successfulRemoteCandidateId === report.id) {
|
|
||||||
appendRemoteCandidateStats(report);
|
|
||||||
setRemoteIPAddress(report.address);
|
|
||||||
}
|
|
||||||
} else if (report.type === "data-channel" && report.label === "disk") {
|
|
||||||
appendDiskDataChannelStats(report);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
})();
|
|
||||||
}, 500);
|
|
||||||
|
|
||||||
const jitterBufferDelay = createChartArray(inboundVideoRtpStats, "jitterBufferDelay");
|
|
||||||
const jitterBufferEmittedCount = createChartArray(
|
const jitterBufferEmittedCount = createChartArray(
|
||||||
inboundVideoRtpStats,
|
inboundRtpStats,
|
||||||
"jitterBufferEmittedCount",
|
"jitterBufferEmittedCount",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -102,6 +55,137 @@ export default function ConnectionStatsSidebar() {
|
||||||
return { date: d.date, metric: valueMs };
|
return { date: d.date, metric: valueMs };
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return (<>
|
||||||
|
{/* RTP Jitter */}
|
||||||
|
<Metric
|
||||||
|
title={m.connection_stats_network_stability()}
|
||||||
|
badge={m.connection_stats_badge_jitter()}
|
||||||
|
badgeTheme="light"
|
||||||
|
description={m.connection_stats_network_stability_description()}
|
||||||
|
stream={inboundRtpStats}
|
||||||
|
metric="jitter"
|
||||||
|
map={x => ({
|
||||||
|
date: x.date,
|
||||||
|
metric: x.metric != null ? Math.round(x.metric * 1000) : null,
|
||||||
|
})}
|
||||||
|
domain={[0, 10]}
|
||||||
|
unit={m.connection_stats_unit_milliseconds()}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Playback Delay */}
|
||||||
|
<Metric
|
||||||
|
title={m.connection_stats_playback_delay()}
|
||||||
|
description={m.connection_stats_playback_delay_description()}
|
||||||
|
badge={m.connection_stats_badge_jitter_buffer_avg_delay()}
|
||||||
|
badgeTheme="light"
|
||||||
|
data={jitterBufferAvgDelayData}
|
||||||
|
gate={inboundRtpStats}
|
||||||
|
supported={
|
||||||
|
someIterable(
|
||||||
|
inboundRtpStats,
|
||||||
|
([, x]) => x.jitterBufferDelay != null,
|
||||||
|
) &&
|
||||||
|
someIterable(
|
||||||
|
inboundRtpStats,
|
||||||
|
([, x]) => x.jitterBufferEmittedCount != null,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
domain={[0, 30]}
|
||||||
|
unit={m.connection_stats_unit_milliseconds()}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Packets Lost */}
|
||||||
|
<Metric
|
||||||
|
title={m.connection_stats_packets_lost()}
|
||||||
|
description={m.connection_stats_packets_lost_description()}
|
||||||
|
stream={inboundRtpStats}
|
||||||
|
metric="packetsLost"
|
||||||
|
domain={[0, 100]}
|
||||||
|
unit={m.connection_stats_unit_packets()}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Frames Per Second */}
|
||||||
|
{showFramesPerSecond && <Metric
|
||||||
|
title={m.connection_stats_frames_per_second()}
|
||||||
|
description={m.connection_stats_frames_per_second_description()}
|
||||||
|
stream={inboundRtpStats}
|
||||||
|
metric="framesPerSecond"
|
||||||
|
domain={[0, 80]}
|
||||||
|
unit={m.connection_stats_unit_frames_per_second()}
|
||||||
|
/>}
|
||||||
|
|
||||||
|
{showAudioLevel && <Metric
|
||||||
|
title={m.connection_stats_audio_level()}
|
||||||
|
description={m.connection_stats_audio_level_description()}
|
||||||
|
stream={inboundRtpStats}
|
||||||
|
metric="audioLevel"
|
||||||
|
domain={[0, 1]}
|
||||||
|
unit={m.connection_stats_unit_decibels()}
|
||||||
|
/>}
|
||||||
|
</>);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function ConnectionStatsSidebar() {
|
||||||
|
const { sidebarView, setSidebarView } = useUiStore();
|
||||||
|
const {
|
||||||
|
mediaStream,
|
||||||
|
peerConnection,
|
||||||
|
inboundVideoRtpStats,
|
||||||
|
appendInboundVideoRtpStats,
|
||||||
|
inboundAudioRtpStats,
|
||||||
|
appendInboundAudioRtpStats,
|
||||||
|
candidatePairStats: iceCandidatePairStats,
|
||||||
|
appendCandidatePairStats,
|
||||||
|
appendLocalCandidateStats,
|
||||||
|
appendRemoteCandidateStats,
|
||||||
|
appendDiskDataChannelStats,
|
||||||
|
} = useRTCStore();
|
||||||
|
|
||||||
|
const [remoteIPAddress, setRemoteIPAddress] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useInterval(function collectWebRTCStats() {
|
||||||
|
(async () => {
|
||||||
|
if (!mediaStream) return;
|
||||||
|
|
||||||
|
const videoTrack = mediaStream.getVideoTracks()[0];
|
||||||
|
if (!videoTrack) return;
|
||||||
|
|
||||||
|
const stats = await peerConnection?.getStats();
|
||||||
|
let successfulLocalCandidateId: string | null = null;
|
||||||
|
let successfulRemoteCandidateId: string | null = null;
|
||||||
|
|
||||||
|
stats?.forEach(report => {
|
||||||
|
if (report.type === "inbound-rtp") {
|
||||||
|
if (report.kind === "video") {
|
||||||
|
appendInboundVideoRtpStats(report);
|
||||||
|
} else if (report.kind === "audio") {
|
||||||
|
appendInboundAudioRtpStats(report);
|
||||||
|
}
|
||||||
|
} else if (report.type === "candidate-pair" && report.nominated) {
|
||||||
|
if (report.state === "succeeded") {
|
||||||
|
successfulLocalCandidateId = report.localCandidateId;
|
||||||
|
successfulRemoteCandidateId = report.remoteCandidateId;
|
||||||
|
}
|
||||||
|
appendCandidatePairStats(report);
|
||||||
|
} else if (report.type === "local-candidate") {
|
||||||
|
// We only want to append the local candidate stats that were used in nominated candidate pair
|
||||||
|
if (successfulLocalCandidateId === report.id) {
|
||||||
|
appendLocalCandidateStats(report);
|
||||||
|
}
|
||||||
|
} else if (report.type === "remote-candidate") {
|
||||||
|
if (successfulRemoteCandidateId === report.id) {
|
||||||
|
appendRemoteCandidateStats(report);
|
||||||
|
setRemoteIPAddress(report.address);
|
||||||
|
}
|
||||||
|
} else if (report.type === "data-channel" && report.label === "disk") {
|
||||||
|
appendDiskDataChannelStats(report);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
}, 500);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const { copy } = useCopyToClipboard();
|
const { copy } = useCopyToClipboard();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
@ -159,63 +243,16 @@ export default function ConnectionStatsSidebar() {
|
||||||
description={m.connection_stats_video_description()}
|
description={m.connection_stats_video_description()}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{/* RTP Jitter */}
|
<RtpStatCharts inboundRtpStats={inboundVideoRtpStats} showFramesPerSecond />
|
||||||
<Metric
|
</div>
|
||||||
title={m.connection_stats_network_stability()}
|
|
||||||
badge={m.connection_stats_badge_jitter()}
|
|
||||||
badgeTheme="light"
|
|
||||||
description={m.connection_stats_network_stability_description()}
|
|
||||||
stream={inboundVideoRtpStats}
|
|
||||||
metric="jitter"
|
|
||||||
map={x => ({
|
|
||||||
date: x.date,
|
|
||||||
metric: x.metric != null ? Math.round(x.metric * 1000) : null,
|
|
||||||
})}
|
|
||||||
domain={[0, 10]}
|
|
||||||
unit={m.connection_stats_unit_milliseconds()}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Playback Delay */}
|
{/* Audio Group */}
|
||||||
<Metric
|
<div className="space-y-3">
|
||||||
title={m.connection_stats_playback_delay()}
|
<SettingsSectionHeader
|
||||||
description={m.connection_stats_playback_delay_description()}
|
title={m.connection_stats_audio()}
|
||||||
badge={m.connection_stats_badge_jitter_buffer_avg_delay()}
|
description={m.connection_stats_audio_description()}
|
||||||
badgeTheme="light"
|
|
||||||
data={jitterBufferAvgDelayData}
|
|
||||||
gate={inboundVideoRtpStats}
|
|
||||||
supported={
|
|
||||||
someIterable(
|
|
||||||
inboundVideoRtpStats,
|
|
||||||
([, x]) => x.jitterBufferDelay != null,
|
|
||||||
) &&
|
|
||||||
someIterable(
|
|
||||||
inboundVideoRtpStats,
|
|
||||||
([, x]) => x.jitterBufferEmittedCount != null,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
domain={[0, 30]}
|
|
||||||
unit={m.connection_stats_unit_milliseconds()}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Packets Lost */}
|
|
||||||
<Metric
|
|
||||||
title={m.connection_stats_packets_lost()}
|
|
||||||
description={m.connection_stats_packets_lost_description()}
|
|
||||||
stream={inboundVideoRtpStats}
|
|
||||||
metric="packetsLost"
|
|
||||||
domain={[0, 100]}
|
|
||||||
unit={m.connection_stats_unit_packets()}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Frames Per Second */}
|
|
||||||
<Metric
|
|
||||||
title={m.connection_stats_frames_per_second()}
|
|
||||||
description={m.connection_stats_frames_per_second_description()}
|
|
||||||
stream={inboundVideoRtpStats}
|
|
||||||
metric="framesPerSecond"
|
|
||||||
domain={[0, 80]}
|
|
||||||
unit={m.connection_stats_unit_frames_per_second()}
|
|
||||||
/>
|
/>
|
||||||
|
<RtpStatCharts inboundRtpStats={inboundAudioRtpStats} showAudioLevel />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
|
||||||
|
|
@ -153,8 +153,10 @@ export interface RTCState {
|
||||||
isTurnServerInUse: boolean;
|
isTurnServerInUse: boolean;
|
||||||
setTurnServerInUse: (inUse: boolean) => void;
|
setTurnServerInUse: (inUse: boolean) => void;
|
||||||
|
|
||||||
inboundRtpStats: Map<number, RTCInboundRtpStreamStats>;
|
inboundVideoRtpStats: Map<number, RTCInboundRtpStreamStats>;
|
||||||
appendInboundRtpStats: (stats: RTCInboundRtpStreamStats) => void;
|
appendInboundVideoRtpStats: (stats: RTCInboundRtpStreamStats) => void;
|
||||||
|
inboundAudioRtpStats: Map<number, RTCInboundRtpStreamStats>;
|
||||||
|
appendInboundAudioRtpStats: (stats: RTCInboundRtpStreamStats) => void;
|
||||||
clearInboundRtpStats: () => void;
|
clearInboundRtpStats: () => void;
|
||||||
|
|
||||||
candidatePairStats: Map<number, RTCIceCandidatePairStats>;
|
candidatePairStats: Map<number, RTCIceCandidatePairStats>;
|
||||||
|
|
@ -218,13 +220,19 @@ export const useRTCStore = create<RTCState>(set => ({
|
||||||
isTurnServerInUse: false,
|
isTurnServerInUse: false,
|
||||||
setTurnServerInUse: inUse => set({ isTurnServerInUse: inUse }),
|
setTurnServerInUse: inUse => set({ isTurnServerInUse: inUse }),
|
||||||
|
|
||||||
inboundRtpStats: new Map(),
|
inboundVideoRtpStats: new Map(),
|
||||||
appendInboundRtpStats: stats => {
|
appendInboundVideoRtpStats: stats => {
|
||||||
set(prevState => ({
|
set(prevState => ({
|
||||||
inboundRtpStats: appendStatToMap(stats, prevState.inboundRtpStats),
|
inboundVideoRtpStats: appendStatToMap(stats, prevState.inboundVideoRtpStats),
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
clearInboundRtpStats: () => set({ inboundRtpStats: new Map() }),
|
inboundAudioRtpStats: new Map(),
|
||||||
|
appendInboundAudioRtpStats: stats => {
|
||||||
|
set(prevState => ({
|
||||||
|
inboundAudioRtpStats: appendStatToMap(stats, prevState.inboundAudioRtpStats),
|
||||||
|
}));
|
||||||
|
},
|
||||||
|
clearInboundRtpStats: () => set({ inboundVideoRtpStats: new Map(), inboundAudioRtpStats: new Map() }),
|
||||||
|
|
||||||
candidatePairStats: new Map(),
|
candidatePairStats: new Map(),
|
||||||
appendCandidatePairStats: stats => {
|
appendCandidatePairStats: stats => {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue