@newgameplusinc/odyssey-audio-video-sdk-dev 1.0.259 → 1.0.260
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -63,18 +63,11 @@ class MLNoiseSuppressor {
|
|
|
63
63
|
// Set TensorFlow.js backend
|
|
64
64
|
await tf.setBackend('webgl');
|
|
65
65
|
await tf.ready();
|
|
66
|
-
// Load model
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
null,
|
|
72
|
-
257,
|
|
73
|
-
1,
|
|
74
|
-
1,
|
|
75
|
-
];
|
|
76
|
-
}
|
|
77
|
-
this.model = await tf.loadLayersModel(tf.io.fromMemory(modelJson.modelTopology, modelJson.weightsManifest[0].weights));
|
|
66
|
+
// Load model - use standard loadLayersModel instead of fromMemory
|
|
67
|
+
// This handles weight loading automatically
|
|
68
|
+
console.log(`[MLNoiseSuppressor] Loading model from: ${modelUrl}`);
|
|
69
|
+
this.model = await tf.loadLayersModel(modelUrl);
|
|
70
|
+
console.log(`[MLNoiseSuppressor] Model loaded successfully`);
|
|
78
71
|
// Load config
|
|
79
72
|
const baseUrl = modelUrl.substring(0, modelUrl.lastIndexOf('/'));
|
|
80
73
|
const configResponse = await fetch(`${baseUrl}/model_config.json`);
|
|
@@ -88,8 +81,10 @@ class MLNoiseSuppressor {
|
|
|
88
81
|
this.normStats = { mean: 0, std: 1 };
|
|
89
82
|
}
|
|
90
83
|
this.isInitialized = true;
|
|
84
|
+
console.log(`[MLNoiseSuppressor] Initialization complete`);
|
|
91
85
|
}
|
|
92
86
|
catch (error) {
|
|
87
|
+
console.error(`[MLNoiseSuppressor] Initialization failed:`, error);
|
|
93
88
|
throw error;
|
|
94
89
|
}
|
|
95
90
|
}
|
|
@@ -62,11 +62,25 @@ class SpatialAudioChannel {
|
|
|
62
62
|
* Setup spatial audio for a participant
|
|
63
63
|
*/
|
|
64
64
|
async setupParticipant(participantId, track, bypassSpatialization = false) {
|
|
65
|
+
console.log(`🎧 [SpatialAudioChannel] Setting up participant ${participantId.substring(0, 8)}`, {
|
|
66
|
+
trackKind: track.kind,
|
|
67
|
+
trackEnabled: track.enabled,
|
|
68
|
+
trackMuted: track.muted,
|
|
69
|
+
trackReadyState: track.readyState,
|
|
70
|
+
bypassSpatialization,
|
|
71
|
+
audioContextState: this.audioContext.state,
|
|
72
|
+
});
|
|
65
73
|
if (this.audioContext.state === 'suspended') {
|
|
74
|
+
console.log(`🎧 [SpatialAudioChannel] Resuming suspended AudioContext...`);
|
|
66
75
|
await this.audioContext.resume();
|
|
76
|
+
console.log(`🎧 [SpatialAudioChannel] AudioContext resumed, state: ${this.audioContext.state}`);
|
|
67
77
|
}
|
|
68
78
|
const stream = new MediaStream([track]);
|
|
69
79
|
const source = this.audioContext.createMediaStreamSource(stream);
|
|
80
|
+
console.log(`🎧 [SpatialAudioChannel] Created MediaStreamSource for ${participantId.substring(0, 8)}`, {
|
|
81
|
+
streamActive: stream.active,
|
|
82
|
+
streamTracks: stream.getTracks().length,
|
|
83
|
+
});
|
|
70
84
|
// Create all audio nodes
|
|
71
85
|
const panner = this.createPanner();
|
|
72
86
|
const stereoPanner = this.audioContext.createStereoPanner();
|
|
@@ -124,6 +138,13 @@ class SpatialAudioChannel {
|
|
|
124
138
|
dynamicLowpass,
|
|
125
139
|
stream,
|
|
126
140
|
});
|
|
141
|
+
console.log(`🎧 [SpatialAudioChannel] ✅ Audio pipeline setup complete for ${participantId.substring(0, 8)}`, {
|
|
142
|
+
bypassSpatialization,
|
|
143
|
+
masterGainValue: this.masterGainNode.gain.value,
|
|
144
|
+
participantGainValue: gain.gain.value,
|
|
145
|
+
audioContextState: this.audioContext.state,
|
|
146
|
+
connectedToDestination: true,
|
|
147
|
+
});
|
|
127
148
|
}
|
|
128
149
|
/**
|
|
129
150
|
* Update spatial audio for a participant
|
|
@@ -108,16 +108,24 @@ class MediasoupManager {
|
|
|
108
108
|
}
|
|
109
109
|
connectRecvTransport() {
|
|
110
110
|
this.recvTransport?.on('connect', async ({ dtlsParameters }, callback, errback) => {
|
|
111
|
+
console.log(`🎧 [MediasoupManager] Recv transport connecting, DTLS params:`, {
|
|
112
|
+
transportId: this.recvTransport.id.substring(0, 8),
|
|
113
|
+
});
|
|
111
114
|
this.socket.emit('connect-transport', { transportId: this.recvTransport.id, dtlsParameters }, (response) => {
|
|
112
|
-
if (response.error)
|
|
115
|
+
if (response.error) {
|
|
116
|
+
console.error(`🎧 [MediasoupManager] ❌ Recv transport connect failed:`, response.error);
|
|
113
117
|
errback(new Error(response.error));
|
|
114
|
-
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
console.log(`🎧 [MediasoupManager] ✅ Recv transport connected`);
|
|
115
121
|
callback();
|
|
122
|
+
}
|
|
116
123
|
});
|
|
117
124
|
});
|
|
118
125
|
this.recvTransport?.on('connectionstatechange', (state) => {
|
|
126
|
+
console.log(`🎧 [MediasoupManager] Recv transport connection state changed: ${state}`);
|
|
119
127
|
if (state === 'failed' || state === 'closed') {
|
|
120
|
-
|
|
128
|
+
console.error(`🎧 [MediasoupManager] ❌ Recv transport ${state}`);
|
|
121
129
|
}
|
|
122
130
|
});
|
|
123
131
|
}
|
|
@@ -149,12 +157,27 @@ class MediasoupManager {
|
|
|
149
157
|
async consume(data) {
|
|
150
158
|
if (!this.recvTransport)
|
|
151
159
|
throw new Error('Receive transport not set up');
|
|
160
|
+
console.log(`🎧 [MediasoupManager] Creating consumer for ${data.participantId.substring(0, 8)}`, {
|
|
161
|
+
consumerId: data.consumerId.substring(0, 8),
|
|
162
|
+
producerId: data.producerId.substring(0, 8),
|
|
163
|
+
kind: data.kind,
|
|
164
|
+
recvTransportId: this.recvTransport.id.substring(0, 8),
|
|
165
|
+
recvTransportConnectionState: this.recvTransport.connectionState,
|
|
166
|
+
});
|
|
152
167
|
const consumer = await this.recvTransport.consume({
|
|
153
168
|
id: data.consumerId,
|
|
154
169
|
producerId: data.producerId,
|
|
155
170
|
kind: data.kind,
|
|
156
171
|
rtpParameters: data.rtpParameters,
|
|
157
172
|
});
|
|
173
|
+
console.log(`🎧 [MediasoupManager] Consumer created for ${data.participantId.substring(0, 8)}`, {
|
|
174
|
+
consumerId: consumer.id.substring(0, 8),
|
|
175
|
+
trackKind: consumer.track.kind,
|
|
176
|
+
trackEnabled: consumer.track.enabled,
|
|
177
|
+
trackMuted: consumer.track.muted,
|
|
178
|
+
trackReadyState: consumer.track.readyState,
|
|
179
|
+
consumerPaused: consumer.paused,
|
|
180
|
+
});
|
|
158
181
|
consumer.on('transportclose', () => {
|
|
159
182
|
this.consumers.delete(consumer.id);
|
|
160
183
|
});
|
package/dist/index.js
CHANGED
|
@@ -399,11 +399,19 @@ class OdysseySpatialComms extends EventManager_1.EventManager {
|
|
|
399
399
|
participant.consumers.set(consumer.id, consumer);
|
|
400
400
|
if (track.kind === "audio") {
|
|
401
401
|
participant.audioTrack = track;
|
|
402
|
+
console.log(`🎧 [SDK] Received audio consumer for ${participant.participantId.substring(0, 8)}`, {
|
|
403
|
+
consumerId: consumer.id.substring(0, 8),
|
|
404
|
+
trackEnabled: track.enabled,
|
|
405
|
+
trackMuted: track.muted,
|
|
406
|
+
trackReadyState: track.readyState,
|
|
407
|
+
consumerPaused: consumer.paused,
|
|
408
|
+
});
|
|
402
409
|
// CRITICAL: Do NOT setup spatial audio for local participant (yourself)
|
|
403
410
|
// This prevents hearing your own microphone (loopback)
|
|
404
411
|
const isLocalParticipant = participant.participantId ===
|
|
405
412
|
this.localParticipant?.participantId;
|
|
406
413
|
if (isLocalParticipant) {
|
|
414
|
+
console.log(`🎧 [SDK] Skipping audio setup for local participant (prevent loopback)`);
|
|
407
415
|
// Do NOT connect this audio to Web Audio API
|
|
408
416
|
return; // Exit early to prevent any audio processing
|
|
409
417
|
}
|
|
@@ -411,18 +419,20 @@ class OdysseySpatialComms extends EventManager_1.EventManager {
|
|
|
411
419
|
// Check if participant is in a huddle (non-spatial channel)
|
|
412
420
|
const participantChannel = participant.currentChannel || "spatial";
|
|
413
421
|
const isInHuddle = participantChannel !== "spatial";
|
|
422
|
+
console.log(`🎧 [SDK] Setting up spatial audio for ${participant.participantId.substring(0, 8)}, huddle: ${isInHuddle}`);
|
|
414
423
|
// Setup spatial audio - bypass 3D positioning for huddle members
|
|
415
424
|
await this.spatialAudioManager.setupSpatialAudioForParticipant(participant.participantId, track, isInHuddle // Bypass spatialization if in huddle
|
|
416
425
|
);
|
|
417
426
|
}
|
|
418
427
|
// NOW resume the consumer after audio pipeline is ready
|
|
428
|
+
console.log(`🎧 [SDK] Resuming consumer ${consumer.id.substring(0, 8)}...`);
|
|
419
429
|
this.mediasoupManager
|
|
420
430
|
.resumeConsumer(consumer.id)
|
|
421
431
|
.then(() => {
|
|
422
|
-
|
|
432
|
+
console.log(`🎧 [SDK] ✅ Consumer ${consumer.id.substring(0, 8)} resumed successfully`);
|
|
423
433
|
})
|
|
424
|
-
.catch(() => {
|
|
425
|
-
|
|
434
|
+
.catch((err) => {
|
|
435
|
+
console.error(`🎧 [SDK] ❌ Failed to resume consumer ${consumer.id.substring(0, 8)}:`, err);
|
|
426
436
|
});
|
|
427
437
|
}
|
|
428
438
|
else if (track.kind === "video") {
|
package/package.json
CHANGED