@tensamin/audio 0.1.2 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -4
- package/dist/{chunk-FS635GMR.mjs → chunk-6P2RDBW5.mjs} +2 -2
- package/dist/chunk-EXH2PNUE.mjs +212 -0
- package/dist/{chunk-UMU2KIB6.mjs → chunk-R5JVHKWA.mjs} +36 -6
- package/dist/chunk-XMTQPMQ6.mjs +91 -0
- package/dist/chunk-XO6B3D4A.mjs +67 -0
- package/dist/extensibility/plugins.js +78 -20
- package/dist/extensibility/plugins.mjs +3 -3
- package/dist/index.js +293 -76
- package/dist/index.mjs +5 -5
- package/dist/livekit/integration.js +293 -76
- package/dist/livekit/integration.mjs +5 -5
- package/dist/noise-suppression/rnnoise-node.js +42 -14
- package/dist/noise-suppression/rnnoise-node.mjs +1 -1
- package/dist/pipeline/audio-pipeline.js +226 -62
- package/dist/pipeline/audio-pipeline.mjs +4 -4
- package/dist/vad/vad-node.js +36 -6
- package/dist/vad/vad-node.mjs +1 -1
- package/package.json +1 -1
- package/dist/chunk-HFSKQ33X.mjs +0 -38
- package/dist/chunk-QU7E5HBA.mjs +0 -106
- package/dist/chunk-SDTOKWM2.mjs +0 -39
|
@@ -65,32 +65,60 @@ var RNNoisePlugin = class {
|
|
|
65
65
|
async createNode(context, config) {
|
|
66
66
|
const { loadRnnoise, RnnoiseWorkletNode } = await import("@sapphi-red/web-noise-suppressor");
|
|
67
67
|
if (!config?.enabled) {
|
|
68
|
+
console.log("Noise suppression disabled, using passthrough node");
|
|
68
69
|
const pass = context.createGain();
|
|
69
70
|
return pass;
|
|
70
71
|
}
|
|
71
72
|
if (!config?.wasmUrl || !config?.simdUrl || !config?.workletUrl) {
|
|
72
|
-
|
|
73
|
-
|
|
73
|
+
const error = new Error(
|
|
74
|
+
`RNNoisePlugin requires 'wasmUrl', 'simdUrl', and 'workletUrl' to be configured. Please download the assets from @sapphi-red/web-noise-suppressor and provide the URLs in the config. Current config: wasmUrl=${config?.wasmUrl}, simdUrl=${config?.simdUrl}, workletUrl=${config?.workletUrl}
|
|
75
|
+
To disable noise suppression, set noiseSuppression.enabled to false.`
|
|
74
76
|
);
|
|
77
|
+
console.error(error.message);
|
|
78
|
+
throw error;
|
|
75
79
|
}
|
|
76
|
-
|
|
77
|
-
this.wasmBuffer
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
80
|
+
try {
|
|
81
|
+
if (!this.wasmBuffer) {
|
|
82
|
+
console.log("Loading RNNoise WASM binary...");
|
|
83
|
+
this.wasmBuffer = await loadRnnoise({
|
|
84
|
+
url: config.wasmUrl,
|
|
85
|
+
simdUrl: config.simdUrl
|
|
86
|
+
});
|
|
87
|
+
console.log("RNNoise WASM loaded successfully");
|
|
88
|
+
}
|
|
89
|
+
} catch (error) {
|
|
90
|
+
const err = new Error(
|
|
91
|
+
`Failed to load RNNoise WASM binary: ${error instanceof Error ? error.message : String(error)}`
|
|
92
|
+
);
|
|
93
|
+
console.error(err);
|
|
94
|
+
throw err;
|
|
81
95
|
}
|
|
82
96
|
const workletUrl = config.workletUrl;
|
|
83
97
|
try {
|
|
84
98
|
await context.audioWorklet.addModule(workletUrl);
|
|
99
|
+
console.log("RNNoise worklet loaded successfully");
|
|
85
100
|
} catch (e) {
|
|
86
|
-
|
|
101
|
+
const error = new Error(
|
|
102
|
+
`Failed to load RNNoise worklet from ${workletUrl}: ${e instanceof Error ? e.message : String(e)}. Ensure the workletUrl points to a valid RNNoise worklet script.`
|
|
103
|
+
);
|
|
104
|
+
console.error(error.message);
|
|
105
|
+
throw error;
|
|
106
|
+
}
|
|
107
|
+
try {
|
|
108
|
+
const node = new RnnoiseWorkletNode(context, {
|
|
109
|
+
wasmBinary: this.wasmBuffer,
|
|
110
|
+
maxChannels: 1
|
|
111
|
+
// Mono for now
|
|
112
|
+
});
|
|
113
|
+
console.log("RNNoise worklet node created successfully");
|
|
114
|
+
return node;
|
|
115
|
+
} catch (error) {
|
|
116
|
+
const err = new Error(
|
|
117
|
+
`Failed to create RNNoise worklet node: ${error instanceof Error ? error.message : String(error)}`
|
|
118
|
+
);
|
|
119
|
+
console.error(err);
|
|
120
|
+
throw err;
|
|
87
121
|
}
|
|
88
|
-
const node = new RnnoiseWorkletNode(context, {
|
|
89
|
-
wasmBinary: this.wasmBuffer,
|
|
90
|
-
maxChannels: 1
|
|
91
|
-
// Mono for now
|
|
92
|
-
});
|
|
93
|
-
return node;
|
|
94
122
|
}
|
|
95
123
|
};
|
|
96
124
|
|
|
@@ -138,22 +166,52 @@ registerProcessor('energy-vad-processor', EnergyVadProcessor);
|
|
|
138
166
|
var EnergyVADPlugin = class {
|
|
139
167
|
name = "energy-vad";
|
|
140
168
|
async createNode(context, config, onDecision) {
|
|
169
|
+
if (!config?.enabled) {
|
|
170
|
+
console.log("VAD disabled, using passthrough node");
|
|
171
|
+
const pass = context.createGain();
|
|
172
|
+
return pass;
|
|
173
|
+
}
|
|
141
174
|
const blob = new Blob([energyVadWorkletCode], {
|
|
142
175
|
type: "application/javascript"
|
|
143
176
|
});
|
|
144
177
|
const url = URL.createObjectURL(blob);
|
|
145
178
|
try {
|
|
146
179
|
await context.audioWorklet.addModule(url);
|
|
180
|
+
console.log("Energy VAD worklet loaded successfully");
|
|
147
181
|
} catch (e) {
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
182
|
+
const error = new Error(
|
|
183
|
+
`Failed to load Energy VAD worklet: ${e instanceof Error ? e.message : String(e)}`
|
|
184
|
+
);
|
|
185
|
+
console.error(error.message);
|
|
151
186
|
URL.revokeObjectURL(url);
|
|
187
|
+
throw error;
|
|
188
|
+
}
|
|
189
|
+
URL.revokeObjectURL(url);
|
|
190
|
+
let node;
|
|
191
|
+
try {
|
|
192
|
+
node = new AudioWorkletNode(context, "energy-vad-processor");
|
|
193
|
+
console.log("Energy VAD node created successfully");
|
|
194
|
+
} catch (e) {
|
|
195
|
+
const error = new Error(
|
|
196
|
+
`Failed to create Energy VAD node: ${e instanceof Error ? e.message : String(e)}`
|
|
197
|
+
);
|
|
198
|
+
console.error(error.message);
|
|
199
|
+
throw error;
|
|
152
200
|
}
|
|
153
|
-
const node = new AudioWorkletNode(context, "energy-vad-processor");
|
|
154
201
|
node.port.onmessage = (event) => {
|
|
155
|
-
|
|
156
|
-
|
|
202
|
+
try {
|
|
203
|
+
const { probability } = event.data;
|
|
204
|
+
if (typeof probability === "number" && !isNaN(probability)) {
|
|
205
|
+
onDecision(probability);
|
|
206
|
+
} else {
|
|
207
|
+
console.warn("Invalid VAD probability received:", event.data);
|
|
208
|
+
}
|
|
209
|
+
} catch (error) {
|
|
210
|
+
console.error("Error in VAD message handler:", error);
|
|
211
|
+
}
|
|
212
|
+
};
|
|
213
|
+
node.port.onmessageerror = (event) => {
|
|
214
|
+
console.error("VAD port message error:", event);
|
|
157
215
|
};
|
|
158
216
|
return node;
|
|
159
217
|
}
|
|
@@ -247,42 +305,84 @@ var VADStateMachine = class {
|
|
|
247
305
|
async function createAudioPipeline(sourceTrack, config = {}) {
|
|
248
306
|
const context = getAudioContext();
|
|
249
307
|
registerPipeline();
|
|
308
|
+
const nsEnabled = config.noiseSuppression?.enabled !== false && Boolean(config.noiseSuppression?.wasmUrl && config.noiseSuppression?.simdUrl && config.noiseSuppression?.workletUrl);
|
|
309
|
+
const vadEnabled = config.vad?.enabled !== false;
|
|
250
310
|
const fullConfig = {
|
|
251
|
-
noiseSuppression: {
|
|
252
|
-
|
|
311
|
+
noiseSuppression: {
|
|
312
|
+
enabled: nsEnabled,
|
|
313
|
+
...config.noiseSuppression
|
|
314
|
+
},
|
|
315
|
+
vad: {
|
|
316
|
+
enabled: vadEnabled,
|
|
317
|
+
...config.vad
|
|
318
|
+
},
|
|
253
319
|
output: {
|
|
254
320
|
speechGain: 1,
|
|
255
|
-
silenceGain: 0,
|
|
321
|
+
silenceGain: vadEnabled ? 0 : 1,
|
|
322
|
+
// If no VAD, always output audio
|
|
256
323
|
gainRampTime: 0.02,
|
|
257
324
|
...config.output
|
|
258
325
|
},
|
|
259
326
|
livekit: { manageTrackMute: false, ...config.livekit }
|
|
260
327
|
};
|
|
328
|
+
console.log("Audio pipeline config:", {
|
|
329
|
+
noiseSuppression: fullConfig.noiseSuppression?.enabled,
|
|
330
|
+
vad: fullConfig.vad?.enabled,
|
|
331
|
+
output: fullConfig.output
|
|
332
|
+
});
|
|
333
|
+
if (!sourceTrack || sourceTrack.kind !== "audio") {
|
|
334
|
+
throw new Error("createAudioPipeline requires a valid audio MediaStreamTrack");
|
|
335
|
+
}
|
|
336
|
+
if (sourceTrack.readyState === "ended") {
|
|
337
|
+
throw new Error("Cannot create pipeline from an ended MediaStreamTrack");
|
|
338
|
+
}
|
|
261
339
|
const sourceStream = new MediaStream([sourceTrack]);
|
|
262
340
|
const sourceNode = context.createMediaStreamSource(sourceStream);
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
);
|
|
266
|
-
const nsNode = await nsPlugin.createNode(
|
|
267
|
-
context,
|
|
268
|
-
fullConfig.noiseSuppression
|
|
269
|
-
);
|
|
270
|
-
const vadPlugin = getVADPlugin(fullConfig.vad?.pluginName);
|
|
271
|
-
const vadStateMachine = new VADStateMachine(fullConfig.vad);
|
|
341
|
+
let nsNode;
|
|
342
|
+
let vadNode;
|
|
272
343
|
const emitter = (0, import_mitt.default)();
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
344
|
+
try {
|
|
345
|
+
const nsPlugin = getNoiseSuppressionPlugin(
|
|
346
|
+
fullConfig.noiseSuppression?.pluginName
|
|
347
|
+
);
|
|
348
|
+
nsNode = await nsPlugin.createNode(
|
|
349
|
+
context,
|
|
350
|
+
fullConfig.noiseSuppression
|
|
351
|
+
);
|
|
352
|
+
} catch (error) {
|
|
353
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
354
|
+
console.error("Failed to create noise suppression node:", err);
|
|
355
|
+
emitter.emit("error", err);
|
|
356
|
+
throw err;
|
|
357
|
+
}
|
|
358
|
+
const vadStateMachine = new VADStateMachine(fullConfig.vad);
|
|
359
|
+
try {
|
|
360
|
+
const vadPlugin = getVADPlugin(fullConfig.vad?.pluginName);
|
|
361
|
+
vadNode = await vadPlugin.createNode(
|
|
362
|
+
context,
|
|
363
|
+
fullConfig.vad,
|
|
364
|
+
(prob) => {
|
|
365
|
+
try {
|
|
366
|
+
const timestamp = context.currentTime * 1e3;
|
|
367
|
+
const newState = vadStateMachine.processFrame(prob, timestamp);
|
|
368
|
+
if (newState.state !== lastVadState.state || Math.abs(newState.probability - lastVadState.probability) > 0.1) {
|
|
369
|
+
emitter.emit("vadChange", newState);
|
|
370
|
+
lastVadState = newState;
|
|
371
|
+
updateGain(newState);
|
|
372
|
+
}
|
|
373
|
+
} catch (vadError) {
|
|
374
|
+
const err = vadError instanceof Error ? vadError : new Error(String(vadError));
|
|
375
|
+
console.error("Error in VAD callback:", err);
|
|
376
|
+
emitter.emit("error", err);
|
|
377
|
+
}
|
|
283
378
|
}
|
|
284
|
-
|
|
285
|
-
)
|
|
379
|
+
);
|
|
380
|
+
} catch (error) {
|
|
381
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
382
|
+
console.error("Failed to create VAD node:", err);
|
|
383
|
+
emitter.emit("error", err);
|
|
384
|
+
throw err;
|
|
385
|
+
}
|
|
286
386
|
let lastVadState = {
|
|
287
387
|
isSpeaking: false,
|
|
288
388
|
probability: 0,
|
|
@@ -298,34 +398,98 @@ async function createAudioPipeline(sourceTrack, config = {}) {
|
|
|
298
398
|
const gainNode = context.createGain();
|
|
299
399
|
gainNode.gain.value = fullConfig.output?.silenceGain ?? 0;
|
|
300
400
|
const destination = context.createMediaStreamDestination();
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
401
|
+
try {
|
|
402
|
+
splitter.connect(delayNode);
|
|
403
|
+
delayNode.connect(gainNode);
|
|
404
|
+
gainNode.connect(destination);
|
|
405
|
+
} catch (error) {
|
|
406
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
407
|
+
console.error("Failed to wire audio pipeline:", err);
|
|
408
|
+
emitter.emit("error", err);
|
|
409
|
+
throw err;
|
|
410
|
+
}
|
|
304
411
|
function updateGain(state) {
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
412
|
+
try {
|
|
413
|
+
const { speechGain, silenceGain, gainRampTime } = fullConfig.output;
|
|
414
|
+
const targetGain = state.isSpeaking ? speechGain ?? 1 : silenceGain ?? 0;
|
|
415
|
+
const now = context.currentTime;
|
|
416
|
+
gainNode.gain.setTargetAtTime(targetGain, now, gainRampTime ?? 0.02);
|
|
417
|
+
} catch (error) {
|
|
418
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
419
|
+
console.error("Failed to update gain:", err);
|
|
420
|
+
emitter.emit("error", err);
|
|
421
|
+
}
|
|
309
422
|
}
|
|
423
|
+
const audioTracks = destination.stream.getAudioTracks();
|
|
424
|
+
console.log("Destination stream tracks:", {
|
|
425
|
+
count: audioTracks.length,
|
|
426
|
+
tracks: audioTracks.map((t) => ({
|
|
427
|
+
id: t.id,
|
|
428
|
+
label: t.label,
|
|
429
|
+
enabled: t.enabled,
|
|
430
|
+
readyState: t.readyState
|
|
431
|
+
}))
|
|
432
|
+
});
|
|
433
|
+
if (audioTracks.length === 0) {
|
|
434
|
+
const err = new Error(
|
|
435
|
+
"Failed to create processed audio track: destination stream has no audio tracks. This may indicate an issue with the audio graph connection."
|
|
436
|
+
);
|
|
437
|
+
console.error(err);
|
|
438
|
+
emitter.emit("error", err);
|
|
439
|
+
throw err;
|
|
440
|
+
}
|
|
441
|
+
const processedTrack = audioTracks[0];
|
|
442
|
+
if (!processedTrack || processedTrack.readyState === "ended") {
|
|
443
|
+
const err = new Error("Processed audio track is invalid or ended");
|
|
444
|
+
console.error(err);
|
|
445
|
+
emitter.emit("error", err);
|
|
446
|
+
throw err;
|
|
447
|
+
}
|
|
448
|
+
console.log("Audio pipeline created successfully:", {
|
|
449
|
+
sourceTrack: {
|
|
450
|
+
id: sourceTrack.id,
|
|
451
|
+
label: sourceTrack.label,
|
|
452
|
+
readyState: sourceTrack.readyState
|
|
453
|
+
},
|
|
454
|
+
processedTrack: {
|
|
455
|
+
id: processedTrack.id,
|
|
456
|
+
label: processedTrack.label,
|
|
457
|
+
readyState: processedTrack.readyState
|
|
458
|
+
},
|
|
459
|
+
config: {
|
|
460
|
+
noiseSuppression: fullConfig.noiseSuppression?.enabled,
|
|
461
|
+
vad: fullConfig.vad?.enabled
|
|
462
|
+
}
|
|
463
|
+
});
|
|
310
464
|
function dispose() {
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
465
|
+
try {
|
|
466
|
+
sourceNode.disconnect();
|
|
467
|
+
nsNode.disconnect();
|
|
468
|
+
splitter.disconnect();
|
|
469
|
+
vadNode.disconnect();
|
|
470
|
+
delayNode.disconnect();
|
|
471
|
+
gainNode.disconnect();
|
|
472
|
+
destination.stream.getTracks().forEach((t) => t.stop());
|
|
473
|
+
unregisterPipeline();
|
|
474
|
+
} catch (error) {
|
|
475
|
+
console.error("Error during pipeline disposal:", error);
|
|
476
|
+
}
|
|
319
477
|
}
|
|
320
478
|
return {
|
|
321
|
-
processedTrack
|
|
479
|
+
processedTrack,
|
|
322
480
|
events: emitter,
|
|
323
481
|
get state() {
|
|
324
482
|
return lastVadState;
|
|
325
483
|
},
|
|
326
484
|
setConfig: (newConfig) => {
|
|
327
|
-
|
|
328
|
-
|
|
485
|
+
try {
|
|
486
|
+
if (newConfig.vad) {
|
|
487
|
+
vadStateMachine.updateConfig(newConfig.vad);
|
|
488
|
+
}
|
|
489
|
+
} catch (error) {
|
|
490
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
491
|
+
console.error("Failed to update config:", err);
|
|
492
|
+
emitter.emit("error", err);
|
|
329
493
|
}
|
|
330
494
|
},
|
|
331
495
|
dispose
|
|
@@ -334,31 +498,84 @@ async function createAudioPipeline(sourceTrack, config = {}) {
|
|
|
334
498
|
|
|
335
499
|
// src/livekit/integration.ts
|
|
336
500
|
async function attachProcessingToTrack(track, config = {}) {
|
|
501
|
+
if (!track) {
|
|
502
|
+
throw new Error("attachProcessingToTrack requires a valid LocalAudioTrack");
|
|
503
|
+
}
|
|
337
504
|
const originalTrack = track.mediaStreamTrack;
|
|
338
|
-
|
|
339
|
-
|
|
505
|
+
if (!originalTrack) {
|
|
506
|
+
throw new Error("LocalAudioTrack has no underlying MediaStreamTrack");
|
|
507
|
+
}
|
|
508
|
+
if (originalTrack.readyState === "ended") {
|
|
509
|
+
throw new Error("Cannot attach processing to an ended MediaStreamTrack");
|
|
510
|
+
}
|
|
511
|
+
let pipeline;
|
|
512
|
+
try {
|
|
513
|
+
console.log("Creating audio processing pipeline...");
|
|
514
|
+
pipeline = await createAudioPipeline(originalTrack, config);
|
|
515
|
+
console.log("Audio processing pipeline created successfully");
|
|
516
|
+
} catch (error) {
|
|
517
|
+
const err = new Error(
|
|
518
|
+
`Failed to create audio pipeline: ${error instanceof Error ? error.message : String(error)}`
|
|
519
|
+
);
|
|
520
|
+
console.error(err);
|
|
521
|
+
throw err;
|
|
522
|
+
}
|
|
523
|
+
if (!pipeline.processedTrack) {
|
|
524
|
+
throw new Error("Pipeline did not return a processed track");
|
|
525
|
+
}
|
|
526
|
+
try {
|
|
527
|
+
console.log("Replacing LiveKit track with processed track...");
|
|
528
|
+
await track.replaceTrack(pipeline.processedTrack);
|
|
529
|
+
console.log("LiveKit track replaced successfully");
|
|
530
|
+
} catch (error) {
|
|
531
|
+
pipeline.dispose();
|
|
532
|
+
const err = new Error(
|
|
533
|
+
`Failed to replace LiveKit track: ${error instanceof Error ? error.message : String(error)}`
|
|
534
|
+
);
|
|
535
|
+
console.error(err);
|
|
536
|
+
throw err;
|
|
537
|
+
}
|
|
340
538
|
if (config.livekit?.manageTrackMute) {
|
|
341
539
|
let isVadMuted = false;
|
|
342
540
|
pipeline.events.on("vadChange", async (state) => {
|
|
343
|
-
|
|
344
|
-
if (
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
541
|
+
try {
|
|
542
|
+
if (state.isSpeaking) {
|
|
543
|
+
if (isVadMuted) {
|
|
544
|
+
await track.unmute();
|
|
545
|
+
isVadMuted = false;
|
|
546
|
+
}
|
|
547
|
+
} else {
|
|
548
|
+
if (!track.isMuted) {
|
|
549
|
+
await track.mute();
|
|
550
|
+
isVadMuted = true;
|
|
551
|
+
}
|
|
352
552
|
}
|
|
553
|
+
} catch (error) {
|
|
554
|
+
console.error("Error handling VAD-based track muting:", error);
|
|
353
555
|
}
|
|
354
556
|
});
|
|
355
557
|
}
|
|
558
|
+
pipeline.events.on("error", (error) => {
|
|
559
|
+
console.error("Audio pipeline error:", error);
|
|
560
|
+
});
|
|
356
561
|
const originalDispose = pipeline.dispose;
|
|
357
562
|
pipeline.dispose = () => {
|
|
358
|
-
|
|
359
|
-
|
|
563
|
+
try {
|
|
564
|
+
if (originalTrack.readyState === "live") {
|
|
565
|
+
console.log("Restoring original track...");
|
|
566
|
+
track.replaceTrack(originalTrack).catch((error) => {
|
|
567
|
+
console.error("Failed to restore original track:", error);
|
|
568
|
+
});
|
|
569
|
+
}
|
|
570
|
+
originalDispose();
|
|
571
|
+
} catch (error) {
|
|
572
|
+
console.error("Error during pipeline disposal:", error);
|
|
573
|
+
try {
|
|
574
|
+
originalDispose();
|
|
575
|
+
} catch (disposeError) {
|
|
576
|
+
console.error("Error calling original dispose:", disposeError);
|
|
577
|
+
}
|
|
360
578
|
}
|
|
361
|
-
originalDispose();
|
|
362
579
|
};
|
|
363
580
|
return pipeline;
|
|
364
581
|
}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import {
|
|
2
2
|
attachProcessingToTrack
|
|
3
|
-
} from "../chunk-
|
|
4
|
-
import "../chunk-
|
|
3
|
+
} from "../chunk-XMTQPMQ6.mjs";
|
|
4
|
+
import "../chunk-EXH2PNUE.mjs";
|
|
5
5
|
import "../chunk-JJASCVEW.mjs";
|
|
6
6
|
import "../chunk-OZ7KMC4S.mjs";
|
|
7
|
-
import "../chunk-
|
|
8
|
-
import "../chunk-
|
|
9
|
-
import "../chunk-
|
|
7
|
+
import "../chunk-6P2RDBW5.mjs";
|
|
8
|
+
import "../chunk-XO6B3D4A.mjs";
|
|
9
|
+
import "../chunk-R5JVHKWA.mjs";
|
|
10
10
|
export {
|
|
11
11
|
attachProcessingToTrack
|
|
12
12
|
};
|
|
@@ -39,32 +39,60 @@ var RNNoisePlugin = class {
|
|
|
39
39
|
async createNode(context, config) {
|
|
40
40
|
const { loadRnnoise, RnnoiseWorkletNode } = await import("@sapphi-red/web-noise-suppressor");
|
|
41
41
|
if (!config?.enabled) {
|
|
42
|
+
console.log("Noise suppression disabled, using passthrough node");
|
|
42
43
|
const pass = context.createGain();
|
|
43
44
|
return pass;
|
|
44
45
|
}
|
|
45
46
|
if (!config?.wasmUrl || !config?.simdUrl || !config?.workletUrl) {
|
|
46
|
-
|
|
47
|
-
|
|
47
|
+
const error = new Error(
|
|
48
|
+
`RNNoisePlugin requires 'wasmUrl', 'simdUrl', and 'workletUrl' to be configured. Please download the assets from @sapphi-red/web-noise-suppressor and provide the URLs in the config. Current config: wasmUrl=${config?.wasmUrl}, simdUrl=${config?.simdUrl}, workletUrl=${config?.workletUrl}
|
|
49
|
+
To disable noise suppression, set noiseSuppression.enabled to false.`
|
|
48
50
|
);
|
|
51
|
+
console.error(error.message);
|
|
52
|
+
throw error;
|
|
49
53
|
}
|
|
50
|
-
|
|
51
|
-
this.wasmBuffer
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
54
|
+
try {
|
|
55
|
+
if (!this.wasmBuffer) {
|
|
56
|
+
console.log("Loading RNNoise WASM binary...");
|
|
57
|
+
this.wasmBuffer = await loadRnnoise({
|
|
58
|
+
url: config.wasmUrl,
|
|
59
|
+
simdUrl: config.simdUrl
|
|
60
|
+
});
|
|
61
|
+
console.log("RNNoise WASM loaded successfully");
|
|
62
|
+
}
|
|
63
|
+
} catch (error) {
|
|
64
|
+
const err = new Error(
|
|
65
|
+
`Failed to load RNNoise WASM binary: ${error instanceof Error ? error.message : String(error)}`
|
|
66
|
+
);
|
|
67
|
+
console.error(err);
|
|
68
|
+
throw err;
|
|
55
69
|
}
|
|
56
70
|
const workletUrl = config.workletUrl;
|
|
57
71
|
try {
|
|
58
72
|
await context.audioWorklet.addModule(workletUrl);
|
|
73
|
+
console.log("RNNoise worklet loaded successfully");
|
|
59
74
|
} catch (e) {
|
|
60
|
-
|
|
75
|
+
const error = new Error(
|
|
76
|
+
`Failed to load RNNoise worklet from ${workletUrl}: ${e instanceof Error ? e.message : String(e)}. Ensure the workletUrl points to a valid RNNoise worklet script.`
|
|
77
|
+
);
|
|
78
|
+
console.error(error.message);
|
|
79
|
+
throw error;
|
|
80
|
+
}
|
|
81
|
+
try {
|
|
82
|
+
const node = new RnnoiseWorkletNode(context, {
|
|
83
|
+
wasmBinary: this.wasmBuffer,
|
|
84
|
+
maxChannels: 1
|
|
85
|
+
// Mono for now
|
|
86
|
+
});
|
|
87
|
+
console.log("RNNoise worklet node created successfully");
|
|
88
|
+
return node;
|
|
89
|
+
} catch (error) {
|
|
90
|
+
const err = new Error(
|
|
91
|
+
`Failed to create RNNoise worklet node: ${error instanceof Error ? error.message : String(error)}`
|
|
92
|
+
);
|
|
93
|
+
console.error(err);
|
|
94
|
+
throw err;
|
|
61
95
|
}
|
|
62
|
-
const node = new RnnoiseWorkletNode(context, {
|
|
63
|
-
wasmBinary: this.wasmBuffer,
|
|
64
|
-
maxChannels: 1
|
|
65
|
-
// Mono for now
|
|
66
|
-
});
|
|
67
|
-
return node;
|
|
68
96
|
}
|
|
69
97
|
};
|
|
70
98
|
// Annotate the CommonJS export names for ESM import in node:
|