mirror of https://github.com/jetkvm/kvm.git
[WIP] Cleanup: decrease PR complexity
This commit is contained in:
parent
a3702dadd9
commit
5dc04321a1
|
@ -1,560 +0,0 @@
|
|||
package kvm
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// Test validateFloat64Param function
|
||||
func TestValidateFloat64Param(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]interface{}
|
||||
paramName string
|
||||
methodName string
|
||||
min float64
|
||||
max float64
|
||||
expected float64
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "valid parameter",
|
||||
params: map[string]interface{}{"test": 50.0},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 50.0,
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "parameter at minimum boundary",
|
||||
params: map[string]interface{}{"test": 0.0},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 0.0,
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "parameter at maximum boundary",
|
||||
params: map[string]interface{}{"test": 100.0},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 100.0,
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "parameter below minimum",
|
||||
params: map[string]interface{}{"test": -1.0},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 0,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "parameter above maximum",
|
||||
params: map[string]interface{}{"test": 101.0},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 0,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "wrong parameter type",
|
||||
params: map[string]interface{}{"test": "not a number"},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 0,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "missing parameter",
|
||||
params: map[string]interface{}{},
|
||||
paramName: "test",
|
||||
methodName: "testMethod",
|
||||
min: 0,
|
||||
max: 100,
|
||||
expected: 0,
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := validateFloat64Param(tt.params, tt.paramName, tt.methodName, tt.min, tt.max)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test validateKeysArray function
|
||||
func TestValidateKeysArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]interface{}
|
||||
methodName string
|
||||
expected []uint8
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "valid keys array",
|
||||
params: map[string]interface{}{"keys": []interface{}{65.0, 66.0, 67.0}},
|
||||
methodName: "testMethod",
|
||||
expected: []uint8{65, 66, 67},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "empty keys array",
|
||||
params: map[string]interface{}{"keys": []interface{}{}},
|
||||
methodName: "testMethod",
|
||||
expected: []uint8{},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "maximum keys array",
|
||||
params: map[string]interface{}{"keys": []interface{}{1.0, 2.0, 3.0, 4.0, 5.0, 6.0}},
|
||||
methodName: "testMethod",
|
||||
expected: []uint8{1, 2, 3, 4, 5, 6},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "too many keys",
|
||||
params: map[string]interface{}{"keys": []interface{}{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0}},
|
||||
methodName: "testMethod",
|
||||
expected: nil,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid key type",
|
||||
params: map[string]interface{}{"keys": []interface{}{"not a number"}},
|
||||
methodName: "testMethod",
|
||||
expected: nil,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "key value out of range (negative)",
|
||||
params: map[string]interface{}{"keys": []interface{}{-1.0}},
|
||||
methodName: "testMethod",
|
||||
expected: nil,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "key value out of range (too high)",
|
||||
params: map[string]interface{}{"keys": []interface{}{256.0}},
|
||||
methodName: "testMethod",
|
||||
expected: nil,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "wrong parameter type",
|
||||
params: map[string]interface{}{"keys": "not an array"},
|
||||
methodName: "testMethod",
|
||||
expected: nil,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "missing keys parameter",
|
||||
params: map[string]interface{}{},
|
||||
methodName: "testMethod",
|
||||
expected: nil,
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := validateKeysArray(tt.params, tt.methodName)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test handleKeyboardReportDirect function
|
||||
func TestHandleKeyboardReportDirect(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]interface{}
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "valid keyboard report",
|
||||
params: map[string]interface{}{
|
||||
"modifier": 2.0, // Shift key
|
||||
"keys": []interface{}{65.0, 66.0}, // A, B keys
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "empty keys array",
|
||||
params: map[string]interface{}{
|
||||
"modifier": 0.0,
|
||||
"keys": []interface{}{},
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid modifier",
|
||||
params: map[string]interface{}{
|
||||
"modifier": 256.0, // Out of range
|
||||
"keys": []interface{}{65.0},
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid keys",
|
||||
params: map[string]interface{}{
|
||||
"modifier": 0.0,
|
||||
"keys": []interface{}{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0}, // Too many keys
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := handleKeyboardReportDirect(tt.params)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test handleAbsMouseReportDirect function
|
||||
func TestHandleAbsMouseReportDirect(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]interface{}
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "valid absolute mouse report",
|
||||
params: map[string]interface{}{
|
||||
"x": 1000.0,
|
||||
"y": 500.0,
|
||||
"buttons": 1.0, // Left button
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "boundary values",
|
||||
params: map[string]interface{}{
|
||||
"x": 0.0,
|
||||
"y": 32767.0,
|
||||
"buttons": 255.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid x coordinate",
|
||||
params: map[string]interface{}{
|
||||
"x": -1.0, // Out of range
|
||||
"y": 500.0,
|
||||
"buttons": 0.0,
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid y coordinate",
|
||||
params: map[string]interface{}{
|
||||
"x": 1000.0,
|
||||
"y": 32768.0, // Out of range
|
||||
"buttons": 0.0,
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid buttons",
|
||||
params: map[string]interface{}{
|
||||
"x": 1000.0,
|
||||
"y": 500.0,
|
||||
"buttons": 256.0, // Out of range
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := handleAbsMouseReportDirect(tt.params)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test handleRelMouseReportDirect function
|
||||
func TestHandleRelMouseReportDirect(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]interface{}
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "valid relative mouse report",
|
||||
params: map[string]interface{}{
|
||||
"dx": 10.0,
|
||||
"dy": -5.0,
|
||||
"buttons": 2.0, // Right button
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "boundary values",
|
||||
params: map[string]interface{}{
|
||||
"dx": -127.0,
|
||||
"dy": 127.0,
|
||||
"buttons": 0.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid dx",
|
||||
params: map[string]interface{}{
|
||||
"dx": -128.0, // Out of range
|
||||
"dy": 0.0,
|
||||
"buttons": 0.0,
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid dy",
|
||||
params: map[string]interface{}{
|
||||
"dx": 0.0,
|
||||
"dy": 128.0, // Out of range
|
||||
"buttons": 0.0,
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := handleRelMouseReportDirect(tt.params)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test handleWheelReportDirect function
|
||||
func TestHandleWheelReportDirect(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]interface{}
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "valid wheel report",
|
||||
params: map[string]interface{}{
|
||||
"wheelY": 3.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "boundary values",
|
||||
params: map[string]interface{}{
|
||||
"wheelY": -127.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid wheelY",
|
||||
params: map[string]interface{}{
|
||||
"wheelY": 128.0, // Out of range
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := handleWheelReportDirect(tt.params)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test handleInputRPCDirect function
|
||||
func TestHandleInputRPCDirect(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
method string
|
||||
params map[string]interface{}
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "keyboard report",
|
||||
method: "keyboardReport",
|
||||
params: map[string]interface{}{
|
||||
"modifier": 0.0,
|
||||
"keys": []interface{}{65.0},
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "absolute mouse report",
|
||||
method: "absMouseReport",
|
||||
params: map[string]interface{}{
|
||||
"x": 1000.0,
|
||||
"y": 500.0,
|
||||
"buttons": 1.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "relative mouse report",
|
||||
method: "relMouseReport",
|
||||
params: map[string]interface{}{
|
||||
"dx": 10.0,
|
||||
"dy": -5.0,
|
||||
"buttons": 2.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "wheel report",
|
||||
method: "wheelReport",
|
||||
params: map[string]interface{}{
|
||||
"wheelY": 3.0,
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "unknown method",
|
||||
method: "unknownMethod",
|
||||
params: map[string]interface{}{},
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := handleInputRPCDirect(tt.method, tt.params)
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test isInputMethod function
|
||||
func TestIsInputMethod(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
method string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "keyboard report method",
|
||||
method: "keyboardReport",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "absolute mouse report method",
|
||||
method: "absMouseReport",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "relative mouse report method",
|
||||
method: "relMouseReport",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "wheel report method",
|
||||
method: "wheelReport",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "non-input method",
|
||||
method: "someOtherMethod",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "empty method",
|
||||
method: "",
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isInputMethod(tt.method)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark tests to verify performance improvements
|
||||
func BenchmarkValidateFloat64Param(b *testing.B) {
|
||||
params := map[string]interface{}{"test": 50.0}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = validateFloat64Param(params, "test", "benchmarkMethod", 0, 100)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkValidateKeysArray(b *testing.B) {
|
||||
params := map[string]interface{}{"keys": []interface{}{65.0, 66.0, 67.0}}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = validateKeysArray(params, "benchmarkMethod")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHandleKeyboardReportDirect(b *testing.B) {
|
||||
params := map[string]interface{}{
|
||||
"modifier": 2.0,
|
||||
"keys": []interface{}{65.0, 66.0},
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = handleKeyboardReportDirect(params)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHandleInputRPCDirect(b *testing.B) {
|
||||
params := map[string]interface{}{
|
||||
"modifier": 2.0,
|
||||
"keys": []interface{}{65.0, 66.0},
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = handleInputRPCDirect("keyboardReport", params)
|
||||
}
|
||||
}
|
|
@ -23,8 +23,6 @@
|
|||
// SetAudioQuality(AudioQualityHigh)
|
||||
//
|
||||
// // Audio output will automatically start when frames are received
|
||||
// metrics := GetAudioMetrics()
|
||||
// fmt.Printf("Latency: %v, Frames: %d\n", metrics.AverageLatency, metrics.FramesReceived)
|
||||
package audio
|
||||
|
||||
import (
|
||||
|
@ -332,29 +330,6 @@ func GetMicrophoneConfig() AudioConfig {
|
|||
return currentMicrophoneConfig
|
||||
}
|
||||
|
||||
// GetAudioMetrics returns current audio metrics
|
||||
func GetAudioMetrics() AudioMetrics {
|
||||
// Get base metrics
|
||||
framesReceived := atomic.LoadInt64(&metrics.FramesReceived)
|
||||
framesDropped := atomic.LoadInt64(&metrics.FramesDropped)
|
||||
|
||||
// If audio relay is running, use relay stats instead
|
||||
if IsAudioRelayRunning() {
|
||||
relayReceived, relayDropped := GetAudioRelayStats()
|
||||
framesReceived = relayReceived
|
||||
framesDropped = relayDropped
|
||||
}
|
||||
|
||||
return AudioMetrics{
|
||||
FramesReceived: framesReceived,
|
||||
FramesDropped: framesDropped,
|
||||
BytesProcessed: atomic.LoadInt64(&metrics.BytesProcessed),
|
||||
LastFrameTime: metrics.LastFrameTime,
|
||||
ConnectionDrops: atomic.LoadInt64(&metrics.ConnectionDrops),
|
||||
AverageLatency: metrics.AverageLatency,
|
||||
}
|
||||
}
|
||||
|
||||
// Batched metrics to reduce atomic operations frequency
|
||||
var (
|
||||
batchedFramesReceived int64
|
||||
|
|
|
@ -71,45 +71,6 @@ func (bs *BaseSupervisor) GetLastExitInfo() (exitCode int, exitTime time.Time) {
|
|||
return bs.lastExitCode, bs.lastExitTime
|
||||
}
|
||||
|
||||
// GetProcessMetrics returns process metrics if available
|
||||
func (bs *BaseSupervisor) GetProcessMetrics() *ProcessMetrics {
|
||||
bs.mutex.RLock()
|
||||
defer bs.mutex.RUnlock()
|
||||
|
||||
if bs.cmd == nil || bs.cmd.Process == nil {
|
||||
return &ProcessMetrics{
|
||||
PID: 0,
|
||||
CPUPercent: 0.0,
|
||||
MemoryRSS: 0,
|
||||
MemoryVMS: 0,
|
||||
MemoryPercent: 0.0,
|
||||
Timestamp: time.Now(),
|
||||
ProcessName: "audio-server",
|
||||
}
|
||||
}
|
||||
|
||||
pid := bs.cmd.Process.Pid
|
||||
if bs.processMonitor != nil {
|
||||
metrics := bs.processMonitor.GetCurrentMetrics()
|
||||
for _, metric := range metrics {
|
||||
if metric.PID == pid {
|
||||
return &metric
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return default metrics if process not found in monitor
|
||||
return &ProcessMetrics{
|
||||
PID: pid,
|
||||
CPUPercent: 0.0,
|
||||
MemoryRSS: 0,
|
||||
MemoryVMS: 0,
|
||||
MemoryPercent: 0.0,
|
||||
Timestamp: time.Now(),
|
||||
ProcessName: "audio-server",
|
||||
}
|
||||
}
|
||||
|
||||
// logSupervisorStart logs supervisor start event
|
||||
func (bs *BaseSupervisor) logSupervisorStart() {
|
||||
bs.logger.Info().Msg("Supervisor starting")
|
||||
|
|
|
@ -195,26 +195,6 @@ func (aim *AudioInputManager) GetComprehensiveMetrics() map[string]interface{} {
|
|||
return comprehensiveMetrics
|
||||
}
|
||||
|
||||
// LogPerformanceStats logs current performance statistics
|
||||
func (aim *AudioInputManager) LogPerformanceStats() {
|
||||
metrics := aim.GetComprehensiveMetrics()
|
||||
|
||||
managerStats := metrics["manager"].(map[string]interface{})
|
||||
ipcStats := metrics["ipc"].(map[string]interface{})
|
||||
detailedStats := metrics["detailed"].(map[string]interface{})
|
||||
|
||||
aim.logger.Info().
|
||||
Int64("manager_frames_sent", managerStats["frames_sent"].(int64)).
|
||||
Int64("manager_frames_dropped", managerStats["frames_dropped"].(int64)).
|
||||
Float64("manager_latency_ms", managerStats["average_latency_ms"].(float64)).
|
||||
Int64("ipc_frames_sent", ipcStats["frames_sent"].(int64)).
|
||||
Int64("ipc_frames_dropped", ipcStats["frames_dropped"].(int64)).
|
||||
Float64("ipc_latency_ms", ipcStats["average_latency_ms"].(float64)).
|
||||
Float64("client_drop_rate", detailedStats["client_drop_rate"].(float64)).
|
||||
Float64("frames_per_second", detailedStats["frames_per_second"].(float64)).
|
||||
Msg("Audio input performance metrics")
|
||||
}
|
||||
|
||||
// IsRunning returns whether the audio input manager is running
|
||||
// This checks both the internal state and existing system processes
|
||||
func (aim *AudioInputManager) IsRunning() bool {
|
||||
|
|
|
@ -319,13 +319,6 @@ func (ais *AudioInputSupervisor) GetClient() *AudioInputClient {
|
|||
return ais.client
|
||||
}
|
||||
|
||||
// GetProcessMetrics returns current process metrics with audio-input-server name
|
||||
func (ais *AudioInputSupervisor) GetProcessMetrics() *ProcessMetrics {
|
||||
metrics := ais.BaseSupervisor.GetProcessMetrics()
|
||||
metrics.ProcessName = "audio-input-server"
|
||||
return metrics
|
||||
}
|
||||
|
||||
// monitorSubprocess monitors the subprocess and handles unexpected exits
|
||||
func (ais *AudioInputSupervisor) monitorSubprocess() {
|
||||
if ais.cmd == nil || ais.cmd.Process == nil {
|
||||
|
|
|
@ -97,7 +97,6 @@ type AudioSupervisorInterface interface {
|
|||
Stop() error
|
||||
IsRunning() bool
|
||||
GetProcessPID() int
|
||||
GetProcessMetrics() *ProcessMetrics
|
||||
}
|
||||
|
||||
type AudioServerInterface interface {
|
||||
|
|
|
@ -145,20 +145,6 @@ func (aom *AudioOutputManager) GetComprehensiveMetrics() map[string]interface{}
|
|||
return comprehensiveMetrics
|
||||
}
|
||||
|
||||
// LogPerformanceStats logs current performance statistics
|
||||
func (aom *AudioOutputManager) LogPerformanceStats() {
|
||||
metrics := aom.GetMetrics()
|
||||
aom.logger.Info().
|
||||
Int64("frames_received", metrics.FramesReceived).
|
||||
Int64("frames_dropped", metrics.FramesDropped).
|
||||
Int64("bytes_processed", metrics.BytesProcessed).
|
||||
Int64("connection_drops", metrics.ConnectionDrops).
|
||||
Float64("average_latency_ms", float64(metrics.AverageLatency.Nanoseconds())/1e6).
|
||||
Bool("running", aom.IsRunning()).
|
||||
Bool("ready", aom.IsReady()).
|
||||
Msg("Audio output manager performance stats")
|
||||
}
|
||||
|
||||
// GetStreamer returns the streamer for advanced operations
|
||||
func (aom *AudioOutputManager) GetStreamer() *AudioOutputStreamer {
|
||||
return aom.streamer
|
||||
|
|
|
@ -150,13 +150,6 @@ func (s *AudioOutputSupervisor) Stop() {
|
|||
s.logger.Info().Str("component", AudioOutputSupervisorComponent).Msg("component stopped")
|
||||
}
|
||||
|
||||
// GetProcessMetrics returns current process metrics with audio-output-server name
|
||||
func (s *AudioOutputSupervisor) GetProcessMetrics() *ProcessMetrics {
|
||||
metrics := s.BaseSupervisor.GetProcessMetrics()
|
||||
metrics.ProcessName = "audio-output-server"
|
||||
return metrics
|
||||
}
|
||||
|
||||
// supervisionLoop is the main supervision loop
|
||||
func (s *AudioOutputSupervisor) supervisionLoop() {
|
||||
defer func() {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { useEffect, useState } from "react";
|
||||
import { MdVolumeOff, MdVolumeUp, MdGraphicEq, MdMic, MdMicOff, MdRefresh } from "react-icons/md";
|
||||
import { LuActivity } from "react-icons/lu";
|
||||
|
||||
import { Button } from "@components/Button";
|
||||
import { cx } from "@/cva.config";
|
||||
|
@ -71,7 +70,6 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
|
|||
const {
|
||||
isMicrophoneActive,
|
||||
isMicrophoneMuted,
|
||||
microphoneStream,
|
||||
startMicrophone,
|
||||
stopMicrophone,
|
||||
toggleMicrophoneMute,
|
||||
|
@ -86,9 +84,7 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
|
|||
const isMuted = audioMuted ?? false;
|
||||
const isConnected = wsConnected;
|
||||
|
||||
// Simple audio level placeholder
|
||||
const audioLevel = 0;
|
||||
const isAnalyzing = isMicrophoneActive && !isMicrophoneMuted;
|
||||
|
||||
|
||||
// Audio devices
|
||||
const {
|
||||
|
@ -362,44 +358,7 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
|
|||
</div>
|
||||
</div>
|
||||
|
||||
{/* Audio Level Display */}
|
||||
{isMicrophoneActive && (
|
||||
<div className="rounded-lg bg-slate-50 p-3 dark:bg-slate-700">
|
||||
<div className="text-center">
|
||||
<div className="text-sm font-medium text-slate-700 dark:text-slate-300">
|
||||
Audio Level: {Math.round(audioLevel * 100)}%
|
||||
</div>
|
||||
<div className="text-xs text-slate-500 dark:text-slate-400 mt-1">
|
||||
{isMicrophoneMuted ? 'Muted' : isAnalyzing ? 'Active' : 'Inactive'}
|
||||
</div>
|
||||
</div>
|
||||
{/* Debug information */}
|
||||
<div className="mt-2 text-xs text-slate-500 dark:text-slate-400">
|
||||
<div className="grid grid-cols-2 gap-1">
|
||||
<span>Stream: {microphoneStream ? '✓' : '✗'}</span>
|
||||
<span>Analyzing: {isAnalyzing ? '✓' : '✗'}</span>
|
||||
<span>Active: {isMicrophoneActive ? '✓' : '✗'}</span>
|
||||
<span>Muted: {isMicrophoneMuted ? '✓' : '✗'}</span>
|
||||
</div>
|
||||
{microphoneStream && (
|
||||
<div className="mt-1">
|
||||
Tracks: {microphoneStream.getAudioTracks().length}
|
||||
{microphoneStream.getAudioTracks().length > 0 && (
|
||||
<span className="ml-2">
|
||||
(Enabled: {microphoneStream.getAudioTracks().filter((t: MediaStreamTrack) => t.enabled).length})
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<button
|
||||
onClick={syncMicrophoneState}
|
||||
className="mt-1 text-blue-500 hover:text-blue-600 dark:text-blue-400 dark:hover:text-blue-300"
|
||||
>
|
||||
Sync State
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
</div>
|
||||
|
||||
{/* Device Selection */}
|
||||
|
@ -549,23 +508,7 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
|
|||
)}
|
||||
</div>
|
||||
|
||||
{/* Audio Level Display */}
|
||||
{isMicrophoneActive && (
|
||||
<div className="rounded-lg border border-slate-200 p-3 dark:border-slate-600">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<LuActivity className="h-4 w-4 text-slate-600 dark:text-slate-400" />
|
||||
<span className="font-medium text-slate-900 dark:text-slate-100">
|
||||
Microphone Level
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="text-center py-2">
|
||||
<div className="text-sm text-slate-700 dark:text-slate-300">
|
||||
Level: {Math.round(audioLevel * 100)}%
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -3,7 +3,7 @@ import { useCallback, useEffect, useRef, useState } from "react";
|
|||
import { useRTCStore } from "@/hooks/stores";
|
||||
import api from "@/api";
|
||||
import { devLog, devInfo, devWarn, devError, devOnly } from "@/utils/debug";
|
||||
import { NETWORK_CONFIG, AUDIO_CONFIG } from "@/config/constants";
|
||||
import { AUDIO_CONFIG } from "@/config/constants";
|
||||
|
||||
export interface MicrophoneError {
|
||||
type: 'permission' | 'device' | 'network' | 'unknown';
|
||||
|
@ -84,53 +84,7 @@ export function useMicrophone() {
|
|||
setMicrophoneMuted(false);
|
||||
}, [microphoneSender, peerConnection, setMicrophoneStream, setMicrophoneSender, setMicrophoneActive, setMicrophoneMuted]);
|
||||
|
||||
// Debug function to check current state (can be called from browser console)
|
||||
const debugMicrophoneState = useCallback(() => {
|
||||
const refStream = microphoneStreamRef.current;
|
||||
const state = {
|
||||
isMicrophoneActive,
|
||||
isMicrophoneMuted,
|
||||
streamInRef: !!refStream,
|
||||
streamInStore: !!microphoneStream,
|
||||
senderInStore: !!microphoneSender,
|
||||
streamId: refStream?.id,
|
||||
storeStreamId: microphoneStream?.id,
|
||||
audioTracks: refStream?.getAudioTracks().length || 0,
|
||||
storeAudioTracks: microphoneStream?.getAudioTracks().length || 0,
|
||||
audioTrackDetails: refStream?.getAudioTracks().map(track => ({
|
||||
id: track.id,
|
||||
label: track.label,
|
||||
enabled: track.enabled,
|
||||
readyState: track.readyState,
|
||||
muted: track.muted
|
||||
})) || [],
|
||||
peerConnectionState: peerConnection ? {
|
||||
connectionState: peerConnection.connectionState,
|
||||
iceConnectionState: peerConnection.iceConnectionState,
|
||||
signalingState: peerConnection.signalingState
|
||||
} : "No peer connection",
|
||||
streamMatch: refStream === microphoneStream
|
||||
};
|
||||
devLog("Microphone Debug State:", state);
|
||||
|
||||
// Also check if streams are active
|
||||
if (refStream) {
|
||||
devLog("Ref stream active tracks:", refStream.getAudioTracks().filter(t => t.readyState === 'live').length);
|
||||
}
|
||||
if (microphoneStream && microphoneStream !== refStream) {
|
||||
devLog("Store stream active tracks:", microphoneStream.getAudioTracks().filter(t => t.readyState === 'live').length);
|
||||
}
|
||||
|
||||
return state;
|
||||
}, [isMicrophoneActive, isMicrophoneMuted, microphoneStream, microphoneSender, peerConnection]);
|
||||
|
||||
// Make debug function available globally for console access
|
||||
useEffect(() => {
|
||||
(window as Window & { debugMicrophoneState?: () => unknown }).debugMicrophoneState = debugMicrophoneState;
|
||||
return () => {
|
||||
delete (window as Window & { debugMicrophoneState?: () => unknown }).debugMicrophoneState;
|
||||
};
|
||||
}, [debugMicrophoneState]);
|
||||
|
||||
const lastSyncRef = useRef<number>(0);
|
||||
const isStartingRef = useRef<boolean>(false); // Track if we're in the middle of starting
|
||||
|
@ -495,51 +449,7 @@ export function useMicrophone() {
|
|||
}
|
||||
}, [peerConnection, setMicrophoneStream, setMicrophoneSender, setMicrophoneActive, setMicrophoneMuted, stopMicrophoneStream, isMicrophoneActive, isMicrophoneMuted, microphoneStream, isStarting, isStopping, isToggling]);
|
||||
|
||||
// Reset backend microphone state
|
||||
const resetBackendMicrophoneState = useCallback(async (): Promise<boolean> => {
|
||||
try {
|
||||
devLog("Resetting backend microphone state...");
|
||||
const response = await api.POST("/microphone/reset", {});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
devLog("Backend microphone reset successful:", data);
|
||||
|
||||
// Update frontend state to match backend
|
||||
setMicrophoneActive(false);
|
||||
setMicrophoneMuted(false);
|
||||
|
||||
// Clean up any orphaned streams
|
||||
if (microphoneStreamRef.current) {
|
||||
devLog("Cleaning up orphaned stream after reset");
|
||||
await stopMicrophoneStream();
|
||||
}
|
||||
|
||||
// Wait a bit for everything to settle
|
||||
await new Promise(resolve => setTimeout(resolve, 200));
|
||||
|
||||
// Sync state to ensure consistency
|
||||
await syncMicrophoneState();
|
||||
|
||||
return true;
|
||||
} else {
|
||||
devError("Backend microphone reset failed:", response.status);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
devWarn("Failed to reset backend microphone state:", error);
|
||||
// Fallback to old method
|
||||
try {
|
||||
devLog("Trying fallback reset method...");
|
||||
await api.POST("/microphone/stop", {});
|
||||
await new Promise(resolve => setTimeout(resolve, 300));
|
||||
return true;
|
||||
} catch (fallbackError) {
|
||||
devError("Fallback reset also failed:", fallbackError);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}, [setMicrophoneActive, setMicrophoneMuted, stopMicrophoneStream, syncMicrophoneState]);
|
||||
|
||||
|
||||
// Stop microphone
|
||||
const stopMicrophone = useCallback(async (): Promise<{ success: boolean; error?: MicrophoneError }> => {
|
||||
|
@ -679,173 +589,9 @@ export function useMicrophone() {
|
|||
}
|
||||
}, [microphoneStream, isMicrophoneActive, isMicrophoneMuted, setMicrophoneMuted, isStarting, isStopping, isToggling]);
|
||||
|
||||
// Function to check WebRTC audio transmission stats
|
||||
const checkAudioTransmissionStats = useCallback(async () => {
|
||||
if (!microphoneSender) {
|
||||
devLog("No microphone sender available");
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await microphoneSender.getStats();
|
||||
const audioStats: {
|
||||
id: string;
|
||||
type: string;
|
||||
kind: string;
|
||||
packetsSent?: number;
|
||||
bytesSent?: number;
|
||||
timestamp?: number;
|
||||
ssrc?: number;
|
||||
}[] = [];
|
||||
|
||||
stats.forEach((report, id) => {
|
||||
if (report.type === 'outbound-rtp' && report.kind === 'audio') {
|
||||
audioStats.push({
|
||||
id,
|
||||
type: report.type,
|
||||
kind: report.kind,
|
||||
packetsSent: report.packetsSent,
|
||||
bytesSent: report.bytesSent,
|
||||
timestamp: report.timestamp,
|
||||
ssrc: report.ssrc
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
devLog("Audio transmission stats:", audioStats);
|
||||
return audioStats;
|
||||
} catch (error) {
|
||||
devError("Failed to get audio transmission stats:", error);
|
||||
return null;
|
||||
}
|
||||
}, [microphoneSender]);
|
||||
|
||||
// Comprehensive test function to diagnose microphone issues
|
||||
const testMicrophoneAudio = useCallback(async () => {
|
||||
devLog("=== MICROPHONE AUDIO TEST ===");
|
||||
|
||||
// 1. Check if we have a stream
|
||||
const stream = microphoneStreamRef.current;
|
||||
if (!stream) {
|
||||
devLog("❌ No microphone stream available");
|
||||
return;
|
||||
}
|
||||
|
||||
devLog("✅ Microphone stream exists:", stream.id);
|
||||
|
||||
// 2. Check audio tracks
|
||||
const audioTracks = stream.getAudioTracks();
|
||||
devLog("Audio tracks:", audioTracks.length);
|
||||
|
||||
if (audioTracks.length === 0) {
|
||||
devLog("❌ No audio tracks in stream");
|
||||
return;
|
||||
}
|
||||
|
||||
const track = audioTracks[0];
|
||||
devLog("✅ Audio track details:", {
|
||||
id: track.id,
|
||||
label: track.label,
|
||||
enabled: track.enabled,
|
||||
readyState: track.readyState,
|
||||
muted: track.muted
|
||||
});
|
||||
|
||||
// 3. Test audio level detection manually
|
||||
try {
|
||||
const audioContext = new (window.AudioContext || (window as Window & { webkitAudioContext?: typeof AudioContext }).webkitAudioContext)();
|
||||
const analyser = audioContext.createAnalyser();
|
||||
const source = audioContext.createMediaStreamSource(stream);
|
||||
|
||||
analyser.fftSize = AUDIO_CONFIG.ANALYSIS_FFT_SIZE;
|
||||
source.connect(analyser);
|
||||
|
||||
const dataArray = new Uint8Array(analyser.frequencyBinCount);
|
||||
|
||||
devLog("🎤 Testing audio level detection for 5 seconds...");
|
||||
devLog("Please speak into your microphone now!");
|
||||
|
||||
let maxLevel = 0;
|
||||
let sampleCount = 0;
|
||||
|
||||
const testInterval = setInterval(() => {
|
||||
analyser.getByteFrequencyData(dataArray);
|
||||
|
||||
let sum = 0;
|
||||
for (const value of dataArray) {
|
||||
sum += value * value;
|
||||
}
|
||||
const rms = Math.sqrt(sum / dataArray.length);
|
||||
const level = Math.min(AUDIO_CONFIG.MAX_LEVEL_PERCENTAGE, (rms / AUDIO_CONFIG.LEVEL_SCALING_FACTOR) * AUDIO_CONFIG.MAX_LEVEL_PERCENTAGE);
|
||||
|
||||
maxLevel = Math.max(maxLevel, level);
|
||||
sampleCount++;
|
||||
|
||||
if (sampleCount % 10 === 0) { // Log every 10th sample
|
||||
devLog(`Audio level: ${level.toFixed(1)}% (max so far: ${maxLevel.toFixed(1)}%)`);
|
||||
}
|
||||
}, AUDIO_CONFIG.ANALYSIS_UPDATE_INTERVAL);
|
||||
|
||||
setTimeout(() => {
|
||||
clearInterval(testInterval);
|
||||
source.disconnect();
|
||||
audioContext.close();
|
||||
|
||||
devLog("🎤 Audio test completed!");
|
||||
devLog(`Maximum audio level detected: ${maxLevel.toFixed(1)}%`);
|
||||
|
||||
if (maxLevel > 5) {
|
||||
devLog("✅ Microphone is detecting audio!");
|
||||
} else {
|
||||
devLog("❌ No significant audio detected. Check microphone permissions and hardware.");
|
||||
}
|
||||
}, NETWORK_CONFIG.AUDIO_TEST_DURATION);
|
||||
|
||||
} catch (error) {
|
||||
devError("❌ Failed to test audio level:", error);
|
||||
}
|
||||
|
||||
// 4. Check WebRTC sender
|
||||
if (microphoneSender) {
|
||||
devLog("✅ WebRTC sender exists");
|
||||
devLog("Sender track:", {
|
||||
id: microphoneSender.track?.id,
|
||||
kind: microphoneSender.track?.kind,
|
||||
enabled: microphoneSender.track?.enabled,
|
||||
readyState: microphoneSender.track?.readyState
|
||||
});
|
||||
|
||||
// Check if sender track matches stream track
|
||||
if (microphoneSender.track === track) {
|
||||
devLog("✅ Sender track matches stream track");
|
||||
} else {
|
||||
devLog("❌ Sender track does NOT match stream track");
|
||||
}
|
||||
} else {
|
||||
devLog("❌ No WebRTC sender available");
|
||||
}
|
||||
|
||||
// 5. Check peer connection
|
||||
if (peerConnection) {
|
||||
devLog("✅ Peer connection exists");
|
||||
devLog("Connection state:", peerConnection.connectionState);
|
||||
devLog("ICE connection state:", peerConnection.iceConnectionState);
|
||||
|
||||
const transceivers = peerConnection.getTransceivers();
|
||||
const audioTransceivers = transceivers.filter(t =>
|
||||
t.sender.track?.kind === 'audio' || t.receiver.track?.kind === 'audio'
|
||||
);
|
||||
|
||||
devLog("Audio transceivers:", audioTransceivers.map(t => ({
|
||||
direction: t.direction,
|
||||
senderTrack: t.sender.track?.id,
|
||||
receiverTrack: t.receiver.track?.id
|
||||
})));
|
||||
} else {
|
||||
devLog("❌ No peer connection available");
|
||||
}
|
||||
|
||||
}, [microphoneSender, peerConnection]);
|
||||
|
||||
|
||||
const startMicrophoneDebounced = useCallback((deviceId?: string) => {
|
||||
debouncedOperation(async () => {
|
||||
|
@ -859,59 +605,7 @@ export function useMicrophone() {
|
|||
}, "stop");
|
||||
}, [stopMicrophone, debouncedOperation]);
|
||||
|
||||
// Make debug functions available globally for console access
|
||||
useEffect(() => {
|
||||
(window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).debugMicrophone = debugMicrophoneState;
|
||||
(window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).checkAudioStats = checkAudioTransmissionStats;
|
||||
(window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).testMicrophoneAudio = testMicrophoneAudio;
|
||||
(window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).resetBackendMicrophone = resetBackendMicrophoneState;
|
||||
return () => {
|
||||
delete (window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).debugMicrophone;
|
||||
delete (window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).checkAudioStats;
|
||||
delete (window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).testMicrophoneAudio;
|
||||
delete (window as Window & {
|
||||
debugMicrophone?: () => unknown;
|
||||
checkAudioStats?: () => unknown;
|
||||
testMicrophoneAudio?: () => unknown;
|
||||
resetBackendMicrophone?: () => unknown;
|
||||
}).resetBackendMicrophone;
|
||||
};
|
||||
}, [debugMicrophoneState, checkAudioTransmissionStats, testMicrophoneAudio, resetBackendMicrophoneState]);
|
||||
|
||||
|
||||
// Sync state on mount
|
||||
useEffect(() => {
|
||||
|
@ -941,7 +635,7 @@ export function useMicrophone() {
|
|||
startMicrophone,
|
||||
stopMicrophone,
|
||||
toggleMicrophoneMute,
|
||||
debugMicrophoneState,
|
||||
|
||||
// Expose debounced variants for UI handlers
|
||||
startMicrophoneDebounced,
|
||||
stopMicrophoneDebounced,
|
||||
|
|
Loading…
Reference in New Issue