mirror of https://github.com/jetkvm/kvm.git
[WIP] Updates: audio output & input subprocesses memory & cpu usage
This commit is contained in:
parent
ddc2f90016
commit
27a999c58a
|
@ -92,32 +92,26 @@ var (
|
||||||
audioEventOnce sync.Once
|
audioEventOnce sync.Once
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// initializeBroadcaster creates and initializes the audio event broadcaster
|
||||||
|
func initializeBroadcaster() {
|
||||||
|
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
|
||||||
|
audioEventBroadcaster = &AudioEventBroadcaster{
|
||||||
|
subscribers: make(map[string]*AudioEventSubscriber),
|
||||||
|
logger: &l,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start metrics broadcasting goroutine
|
||||||
|
go audioEventBroadcaster.startMetricsBroadcasting()
|
||||||
|
}
|
||||||
|
|
||||||
// InitializeAudioEventBroadcaster initializes the global audio event broadcaster
|
// InitializeAudioEventBroadcaster initializes the global audio event broadcaster
|
||||||
func InitializeAudioEventBroadcaster() {
|
func InitializeAudioEventBroadcaster() {
|
||||||
audioEventOnce.Do(func() {
|
audioEventOnce.Do(initializeBroadcaster)
|
||||||
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
|
|
||||||
audioEventBroadcaster = &AudioEventBroadcaster{
|
|
||||||
subscribers: make(map[string]*AudioEventSubscriber),
|
|
||||||
logger: &l,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start metrics broadcasting goroutine
|
|
||||||
go audioEventBroadcaster.startMetricsBroadcasting()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAudioEventBroadcaster returns the singleton audio event broadcaster
|
// GetAudioEventBroadcaster returns the singleton audio event broadcaster
|
||||||
func GetAudioEventBroadcaster() *AudioEventBroadcaster {
|
func GetAudioEventBroadcaster() *AudioEventBroadcaster {
|
||||||
audioEventOnce.Do(func() {
|
audioEventOnce.Do(initializeBroadcaster)
|
||||||
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
|
|
||||||
audioEventBroadcaster = &AudioEventBroadcaster{
|
|
||||||
subscribers: make(map[string]*AudioEventSubscriber),
|
|
||||||
logger: &l,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start metrics broadcasting goroutine
|
|
||||||
go audioEventBroadcaster.startMetricsBroadcasting()
|
|
||||||
})
|
|
||||||
return audioEventBroadcaster
|
return audioEventBroadcaster
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,22 +151,16 @@ func (aeb *AudioEventBroadcaster) Unsubscribe(connectionID string) {
|
||||||
|
|
||||||
// BroadcastAudioMuteChanged broadcasts audio mute state changes
|
// BroadcastAudioMuteChanged broadcasts audio mute state changes
|
||||||
func (aeb *AudioEventBroadcaster) BroadcastAudioMuteChanged(muted bool) {
|
func (aeb *AudioEventBroadcaster) BroadcastAudioMuteChanged(muted bool) {
|
||||||
event := AudioEvent{
|
event := createAudioEvent(AudioEventMuteChanged, AudioMuteData{Muted: muted})
|
||||||
Type: AudioEventMuteChanged,
|
|
||||||
Data: AudioMuteData{Muted: muted},
|
|
||||||
}
|
|
||||||
aeb.broadcast(event)
|
aeb.broadcast(event)
|
||||||
}
|
}
|
||||||
|
|
||||||
// BroadcastMicrophoneStateChanged broadcasts microphone state changes
|
// BroadcastMicrophoneStateChanged broadcasts microphone state changes
|
||||||
func (aeb *AudioEventBroadcaster) BroadcastMicrophoneStateChanged(running, sessionActive bool) {
|
func (aeb *AudioEventBroadcaster) BroadcastMicrophoneStateChanged(running, sessionActive bool) {
|
||||||
event := AudioEvent{
|
event := createAudioEvent(AudioEventMicrophoneState, MicrophoneStateData{
|
||||||
Type: AudioEventMicrophoneState,
|
Running: running,
|
||||||
Data: MicrophoneStateData{
|
SessionActive: sessionActive,
|
||||||
Running: running,
|
})
|
||||||
SessionActive: sessionActive,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.broadcast(event)
|
aeb.broadcast(event)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -217,31 +205,121 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// getMicrophoneProcessMetrics returns microphone process metrics data, always providing a valid response
|
// getMicrophoneProcessMetrics returns microphone process metrics data, always providing a valid response
|
||||||
// getInactiveProcessMetrics returns ProcessMetricsData for an inactive audio input process
|
// convertAudioMetricsToEventData converts internal audio metrics to AudioMetricsData for events
|
||||||
func getInactiveProcessMetrics() ProcessMetricsData {
|
func convertAudioMetricsToEventData(metrics AudioMetrics) AudioMetricsData {
|
||||||
return ProcessMetricsData{
|
return AudioMetricsData{
|
||||||
PID: 0,
|
FramesReceived: metrics.FramesReceived,
|
||||||
CPUPercent: 0.0,
|
FramesDropped: metrics.FramesDropped,
|
||||||
MemoryRSS: 0,
|
BytesProcessed: metrics.BytesProcessed,
|
||||||
MemoryVMS: 0,
|
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
||||||
MemoryPercent: 0.0,
|
ConnectionDrops: metrics.ConnectionDrops,
|
||||||
Running: false,
|
AverageLatency: metrics.AverageLatency.String(),
|
||||||
ProcessName: "audio-input-server",
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aeb *AudioEventBroadcaster) getMicrophoneProcessMetrics() ProcessMetricsData {
|
// convertAudioMetricsToEventDataWithLatencyMs converts internal audio metrics to AudioMetricsData with millisecond latency formatting
|
||||||
|
func convertAudioMetricsToEventDataWithLatencyMs(metrics AudioMetrics) AudioMetricsData {
|
||||||
|
return AudioMetricsData{
|
||||||
|
FramesReceived: metrics.FramesReceived,
|
||||||
|
FramesDropped: metrics.FramesDropped,
|
||||||
|
BytesProcessed: metrics.BytesProcessed,
|
||||||
|
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
||||||
|
ConnectionDrops: metrics.ConnectionDrops,
|
||||||
|
AverageLatency: fmt.Sprintf("%.1fms", float64(metrics.AverageLatency.Nanoseconds())/1e6),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertAudioInputMetricsToEventData converts internal audio input metrics to MicrophoneMetricsData for events
|
||||||
|
func convertAudioInputMetricsToEventData(metrics AudioInputMetrics) MicrophoneMetricsData {
|
||||||
|
return MicrophoneMetricsData{
|
||||||
|
FramesSent: metrics.FramesSent,
|
||||||
|
FramesDropped: metrics.FramesDropped,
|
||||||
|
BytesProcessed: metrics.BytesProcessed,
|
||||||
|
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
||||||
|
ConnectionDrops: metrics.ConnectionDrops,
|
||||||
|
AverageLatency: metrics.AverageLatency.String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertAudioInputMetricsToEventDataWithLatencyMs converts internal audio input metrics to MicrophoneMetricsData with millisecond latency formatting
|
||||||
|
func convertAudioInputMetricsToEventDataWithLatencyMs(metrics AudioInputMetrics) MicrophoneMetricsData {
|
||||||
|
return MicrophoneMetricsData{
|
||||||
|
FramesSent: metrics.FramesSent,
|
||||||
|
FramesDropped: metrics.FramesDropped,
|
||||||
|
BytesProcessed: metrics.BytesProcessed,
|
||||||
|
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
||||||
|
ConnectionDrops: metrics.ConnectionDrops,
|
||||||
|
AverageLatency: fmt.Sprintf("%.1fms", float64(metrics.AverageLatency.Nanoseconds())/1e6),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertProcessMetricsToEventData converts internal process metrics to ProcessMetricsData for events
|
||||||
|
func convertProcessMetricsToEventData(metrics ProcessMetrics, running bool) ProcessMetricsData {
|
||||||
|
return ProcessMetricsData{
|
||||||
|
PID: metrics.PID,
|
||||||
|
CPUPercent: metrics.CPUPercent,
|
||||||
|
MemoryRSS: metrics.MemoryRSS,
|
||||||
|
MemoryVMS: metrics.MemoryVMS,
|
||||||
|
MemoryPercent: metrics.MemoryPercent,
|
||||||
|
Running: running,
|
||||||
|
ProcessName: metrics.ProcessName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// createProcessMetricsData creates ProcessMetricsData from ProcessMetrics with running status
|
||||||
|
func createProcessMetricsData(metrics *ProcessMetrics, running bool, processName string) ProcessMetricsData {
|
||||||
|
if metrics == nil {
|
||||||
|
return ProcessMetricsData{
|
||||||
|
PID: 0,
|
||||||
|
CPUPercent: 0.0,
|
||||||
|
MemoryRSS: 0,
|
||||||
|
MemoryVMS: 0,
|
||||||
|
MemoryPercent: 0.0,
|
||||||
|
Running: false,
|
||||||
|
ProcessName: processName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ProcessMetricsData{
|
||||||
|
PID: metrics.PID,
|
||||||
|
CPUPercent: metrics.CPUPercent,
|
||||||
|
MemoryRSS: metrics.MemoryRSS,
|
||||||
|
MemoryVMS: metrics.MemoryVMS,
|
||||||
|
MemoryPercent: metrics.MemoryPercent,
|
||||||
|
Running: running,
|
||||||
|
ProcessName: metrics.ProcessName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getInactiveProcessMetrics returns ProcessMetricsData for an inactive audio input process
|
||||||
|
func getInactiveProcessMetrics() ProcessMetricsData {
|
||||||
|
return createProcessMetricsData(nil, false, "audio-input-server")
|
||||||
|
}
|
||||||
|
|
||||||
|
// getActiveAudioInputSupervisor safely retrieves the audio input supervisor if session is active
|
||||||
|
func getActiveAudioInputSupervisor() *AudioInputSupervisor {
|
||||||
sessionProvider := GetSessionProvider()
|
sessionProvider := GetSessionProvider()
|
||||||
if !sessionProvider.IsSessionActive() {
|
if !sessionProvider.IsSessionActive() {
|
||||||
return getInactiveProcessMetrics()
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
inputManager := sessionProvider.GetAudioInputManager()
|
inputManager := sessionProvider.GetAudioInputManager()
|
||||||
if inputManager == nil {
|
if inputManager == nil {
|
||||||
return getInactiveProcessMetrics()
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
inputSupervisor := inputManager.GetSupervisor()
|
return inputManager.GetSupervisor()
|
||||||
|
}
|
||||||
|
|
||||||
|
// createAudioEvent creates an AudioEvent
|
||||||
|
func createAudioEvent(eventType AudioEventType, data interface{}) AudioEvent {
|
||||||
|
return AudioEvent{
|
||||||
|
Type: eventType,
|
||||||
|
Data: data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aeb *AudioEventBroadcaster) getMicrophoneProcessMetrics() ProcessMetricsData {
|
||||||
|
inputSupervisor := getActiveAudioInputSupervisor()
|
||||||
if inputSupervisor == nil {
|
if inputSupervisor == nil {
|
||||||
return getInactiveProcessMetrics()
|
return getInactiveProcessMetrics()
|
||||||
}
|
}
|
||||||
|
@ -252,63 +330,26 @@ func (aeb *AudioEventBroadcaster) getMicrophoneProcessMetrics() ProcessMetricsDa
|
||||||
}
|
}
|
||||||
|
|
||||||
// If process is running but CPU is 0%, it means we're waiting for the second sample
|
// If process is running but CPU is 0%, it means we're waiting for the second sample
|
||||||
// to calculate CPU percentage. Return metrics with correct running status but skip CPU data.
|
// to calculate CPU percentage. Return metrics with correct running status.
|
||||||
if inputSupervisor.IsRunning() && processMetrics.CPUPercent == 0.0 {
|
if inputSupervisor.IsRunning() && processMetrics.CPUPercent == 0.0 {
|
||||||
return ProcessMetricsData{
|
return createProcessMetricsData(processMetrics, true, processMetrics.ProcessName)
|
||||||
PID: processMetrics.PID,
|
|
||||||
CPUPercent: 0.0, // Keep 0% but with correct running status
|
|
||||||
MemoryRSS: processMetrics.MemoryRSS,
|
|
||||||
MemoryVMS: processMetrics.MemoryVMS,
|
|
||||||
MemoryPercent: processMetrics.MemoryPercent,
|
|
||||||
Running: true, // Correctly show as running
|
|
||||||
ProcessName: processMetrics.ProcessName,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Subprocess is running, return actual metrics
|
// Subprocess is running, return actual metrics
|
||||||
return ProcessMetricsData{
|
return createProcessMetricsData(processMetrics, inputSupervisor.IsRunning(), processMetrics.ProcessName)
|
||||||
PID: processMetrics.PID,
|
|
||||||
CPUPercent: processMetrics.CPUPercent,
|
|
||||||
MemoryRSS: processMetrics.MemoryRSS,
|
|
||||||
MemoryVMS: processMetrics.MemoryVMS,
|
|
||||||
MemoryPercent: processMetrics.MemoryPercent,
|
|
||||||
Running: inputSupervisor.IsRunning(),
|
|
||||||
ProcessName: processMetrics.ProcessName,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// sendCurrentMetrics sends current audio and microphone metrics to a subscriber
|
// sendCurrentMetrics sends current audio and microphone metrics to a subscriber
|
||||||
func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) {
|
func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) {
|
||||||
// Send audio metrics
|
// Send audio metrics
|
||||||
audioMetrics := GetAudioMetrics()
|
audioMetrics := GetAudioMetrics()
|
||||||
audioMetricsEvent := AudioEvent{
|
audioMetricsEvent := createAudioEvent(AudioEventMetricsUpdate, convertAudioMetricsToEventData(audioMetrics))
|
||||||
Type: AudioEventMetricsUpdate,
|
|
||||||
Data: AudioMetricsData{
|
|
||||||
FramesReceived: audioMetrics.FramesReceived,
|
|
||||||
FramesDropped: audioMetrics.FramesDropped,
|
|
||||||
BytesProcessed: audioMetrics.BytesProcessed,
|
|
||||||
LastFrameTime: audioMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
|
||||||
ConnectionDrops: audioMetrics.ConnectionDrops,
|
|
||||||
AverageLatency: audioMetrics.AverageLatency.String(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.sendToSubscriber(subscriber, audioMetricsEvent)
|
aeb.sendToSubscriber(subscriber, audioMetricsEvent)
|
||||||
|
|
||||||
// Send audio process metrics
|
// Send audio process metrics
|
||||||
if outputSupervisor := GetAudioOutputSupervisor(); outputSupervisor != nil {
|
if outputSupervisor := GetAudioOutputSupervisor(); outputSupervisor != nil {
|
||||||
if processMetrics := outputSupervisor.GetProcessMetrics(); processMetrics != nil {
|
if processMetrics := outputSupervisor.GetProcessMetrics(); processMetrics != nil {
|
||||||
audioProcessEvent := AudioEvent{
|
audioProcessEvent := createAudioEvent(AudioEventProcessMetrics, convertProcessMetricsToEventData(*processMetrics, outputSupervisor.IsRunning()))
|
||||||
Type: AudioEventProcessMetrics,
|
|
||||||
Data: ProcessMetricsData{
|
|
||||||
PID: processMetrics.PID,
|
|
||||||
CPUPercent: processMetrics.CPUPercent,
|
|
||||||
MemoryRSS: processMetrics.MemoryRSS,
|
|
||||||
MemoryVMS: processMetrics.MemoryVMS,
|
|
||||||
MemoryPercent: processMetrics.MemoryPercent,
|
|
||||||
Running: outputSupervisor.IsRunning(),
|
|
||||||
ProcessName: processMetrics.ProcessName,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.sendToSubscriber(subscriber, audioProcessEvent)
|
aeb.sendToSubscriber(subscriber, audioProcessEvent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -318,26 +359,13 @@ func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubsc
|
||||||
if sessionProvider.IsSessionActive() {
|
if sessionProvider.IsSessionActive() {
|
||||||
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
|
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
|
||||||
micMetrics := inputManager.GetMetrics()
|
micMetrics := inputManager.GetMetrics()
|
||||||
micMetricsEvent := AudioEvent{
|
micMetricsEvent := createAudioEvent(AudioEventMicrophoneMetrics, convertAudioInputMetricsToEventData(micMetrics))
|
||||||
Type: AudioEventMicrophoneMetrics,
|
|
||||||
Data: MicrophoneMetricsData{
|
|
||||||
FramesSent: micMetrics.FramesSent,
|
|
||||||
FramesDropped: micMetrics.FramesDropped,
|
|
||||||
BytesProcessed: micMetrics.BytesProcessed,
|
|
||||||
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
|
||||||
ConnectionDrops: micMetrics.ConnectionDrops,
|
|
||||||
AverageLatency: micMetrics.AverageLatency.String(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.sendToSubscriber(subscriber, micMetricsEvent)
|
aeb.sendToSubscriber(subscriber, micMetricsEvent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send microphone process metrics (always send, even when subprocess is not running)
|
// Send microphone process metrics (always send, even when subprocess is not running)
|
||||||
micProcessEvent := AudioEvent{
|
micProcessEvent := createAudioEvent(AudioEventMicProcessMetrics, aeb.getMicrophoneProcessMetrics())
|
||||||
Type: AudioEventMicProcessMetrics,
|
|
||||||
Data: aeb.getMicrophoneProcessMetrics(),
|
|
||||||
}
|
|
||||||
aeb.sendToSubscriber(subscriber, micProcessEvent)
|
aeb.sendToSubscriber(subscriber, micProcessEvent)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -379,17 +407,7 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
|
||||||
|
|
||||||
// Broadcast audio metrics
|
// Broadcast audio metrics
|
||||||
audioMetrics := GetAudioMetrics()
|
audioMetrics := GetAudioMetrics()
|
||||||
audioMetricsEvent := AudioEvent{
|
audioMetricsEvent := createAudioEvent(AudioEventMetricsUpdate, convertAudioMetricsToEventDataWithLatencyMs(audioMetrics))
|
||||||
Type: AudioEventMetricsUpdate,
|
|
||||||
Data: AudioMetricsData{
|
|
||||||
FramesReceived: audioMetrics.FramesReceived,
|
|
||||||
FramesDropped: audioMetrics.FramesDropped,
|
|
||||||
BytesProcessed: audioMetrics.BytesProcessed,
|
|
||||||
LastFrameTime: audioMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
|
||||||
ConnectionDrops: audioMetrics.ConnectionDrops,
|
|
||||||
AverageLatency: fmt.Sprintf("%.1fms", float64(audioMetrics.AverageLatency.Nanoseconds())/1e6),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.broadcast(audioMetricsEvent)
|
aeb.broadcast(audioMetricsEvent)
|
||||||
|
|
||||||
// Broadcast microphone metrics if available using session provider
|
// Broadcast microphone metrics if available using session provider
|
||||||
|
@ -397,17 +415,7 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
|
||||||
if sessionProvider.IsSessionActive() {
|
if sessionProvider.IsSessionActive() {
|
||||||
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
|
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
|
||||||
micMetrics := inputManager.GetMetrics()
|
micMetrics := inputManager.GetMetrics()
|
||||||
micMetricsEvent := AudioEvent{
|
micMetricsEvent := createAudioEvent(AudioEventMicrophoneMetrics, convertAudioInputMetricsToEventDataWithLatencyMs(micMetrics))
|
||||||
Type: AudioEventMicrophoneMetrics,
|
|
||||||
Data: MicrophoneMetricsData{
|
|
||||||
FramesSent: micMetrics.FramesSent,
|
|
||||||
FramesDropped: micMetrics.FramesDropped,
|
|
||||||
BytesProcessed: micMetrics.BytesProcessed,
|
|
||||||
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
|
|
||||||
ConnectionDrops: micMetrics.ConnectionDrops,
|
|
||||||
AverageLatency: fmt.Sprintf("%.1fms", float64(micMetrics.AverageLatency.Nanoseconds())/1e6),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.broadcast(micMetricsEvent)
|
aeb.broadcast(micMetricsEvent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -415,27 +423,13 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
|
||||||
// Broadcast audio process metrics
|
// Broadcast audio process metrics
|
||||||
if outputSupervisor := GetAudioOutputSupervisor(); outputSupervisor != nil {
|
if outputSupervisor := GetAudioOutputSupervisor(); outputSupervisor != nil {
|
||||||
if processMetrics := outputSupervisor.GetProcessMetrics(); processMetrics != nil {
|
if processMetrics := outputSupervisor.GetProcessMetrics(); processMetrics != nil {
|
||||||
audioProcessEvent := AudioEvent{
|
audioProcessEvent := createAudioEvent(AudioEventProcessMetrics, convertProcessMetricsToEventData(*processMetrics, outputSupervisor.IsRunning()))
|
||||||
Type: AudioEventProcessMetrics,
|
|
||||||
Data: ProcessMetricsData{
|
|
||||||
PID: processMetrics.PID,
|
|
||||||
CPUPercent: processMetrics.CPUPercent,
|
|
||||||
MemoryRSS: processMetrics.MemoryRSS,
|
|
||||||
MemoryVMS: processMetrics.MemoryVMS,
|
|
||||||
MemoryPercent: processMetrics.MemoryPercent,
|
|
||||||
Running: outputSupervisor.IsRunning(),
|
|
||||||
ProcessName: processMetrics.ProcessName,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
aeb.broadcast(audioProcessEvent)
|
aeb.broadcast(audioProcessEvent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Broadcast microphone process metrics (always broadcast, even when subprocess is not running)
|
// Broadcast microphone process metrics (always broadcast, even when subprocess is not running)
|
||||||
micProcessEvent := AudioEvent{
|
micProcessEvent := createAudioEvent(AudioEventMicProcessMetrics, aeb.getMicrophoneProcessMetrics())
|
||||||
Type: AudioEventMicProcessMetrics,
|
|
||||||
Data: aeb.getMicrophoneProcessMetrics(),
|
|
||||||
}
|
|
||||||
aeb.broadcast(micProcessEvent)
|
aeb.broadcast(micProcessEvent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue