webtalk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.gitattributes +35 -0
- package/.github/workflows/publish.yml +26 -0
- package/README.md +1 -0
- package/app.html +519 -0
- package/assets/index-ClpvH5Vn.js +40 -0
- package/assets/index-DUYekU7u.css +1 -0
- package/assets/worker-BPxxCWVT.js +2679 -0
- package/config.js +36 -0
- package/debug.js +21 -0
- package/download-lock.js +26 -0
- package/hot-reload.js +78 -0
- package/middleware.js +62 -0
- package/package.json +33 -0
- package/persistent-state.js +62 -0
- package/sdk.js +22 -0
- package/serve-static.js +45 -0
- package/server.js +177 -0
- package/setup-npm-publishing.sh +140 -0
- package/stt.js +141 -0
- package/test.mp3 +0 -0
- package/tts/EventEmitter.js +59 -0
- package/tts/PCMPlayerWorklet.js +563 -0
- package/tts/inference-worker.js +1121 -0
- package/tts/onnx-streaming.js +721 -0
- package/tts-models.js +97 -0
- package/tts-utils.js +52 -0
- package/tts.js +167 -0
- package/whisper-models.js +161 -0
- package/worker-patch.js +32 -0
|
@@ -0,0 +1,563 @@
|
|
|
1
|
+
import { EventEmitter, CustomEvent } from './EventEmitter.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* PCMPlayerWorklet - Drop-in replacement for PCMPlayer using AudioWorklet
|
|
5
|
+
* Uses dynamic buffer management with backpressure for smooth playback
|
|
6
|
+
*/
|
|
7
|
+
export class PCMPlayerWorklet extends EventEmitter {
|
|
8
|
+
constructor(audioContext, options = {}) {
|
|
9
|
+
super();
|
|
10
|
+
this.audioContext = audioContext;
|
|
11
|
+
this.options = options;
|
|
12
|
+
this.workletNode = null;
|
|
13
|
+
this.isInitialized = false;
|
|
14
|
+
this.playbackTime = 0; // For API compatibility
|
|
15
|
+
|
|
16
|
+
// Audio nodes
|
|
17
|
+
this.gainNode = this.audioContext.createGain();
|
|
18
|
+
this.gainNode.connect(this.audioContext.destination);
|
|
19
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
20
|
+
this.gainNode.connect(this.analyser);
|
|
21
|
+
|
|
22
|
+
// Queue for chunks waiting to be sent
|
|
23
|
+
this.pendingChunks = [];
|
|
24
|
+
this.availableCapacity = 0;
|
|
25
|
+
this.isWorkletReady = false;
|
|
26
|
+
this.hasReceivedInitialCapacity = false;
|
|
27
|
+
|
|
28
|
+
// Metrics
|
|
29
|
+
this.metrics = {
|
|
30
|
+
chunksPlayed: 0,
|
|
31
|
+
underruns: 0,
|
|
32
|
+
bufferLevel: 0,
|
|
33
|
+
samplesPlayed: 0
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
// Initialize worklet
|
|
37
|
+
this.initPromise = this.initialize();
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async initialize() {
|
|
41
|
+
if (this.isInitialized) return;
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
// Calculate buffer parameters
|
|
45
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
46
|
+
const minBufferMs = this.options.minBufferBeforePlaybackMs || 300;
|
|
47
|
+
const minBufferSamples = Math.floor(minBufferMs * sampleRate / 1000);
|
|
48
|
+
|
|
49
|
+
// Buffer size: enough for smooth playback but not excessive
|
|
50
|
+
// Target 60 seconds of buffer to prevent any overflow issues
|
|
51
|
+
const bufferSizeSamples = sampleRate * 60;
|
|
52
|
+
|
|
53
|
+
// Create the worklet processor code
|
|
54
|
+
const processorCode = `
|
|
55
|
+
class PCMProcessor extends AudioWorkletProcessor {
|
|
56
|
+
constructor() {
|
|
57
|
+
super();
|
|
58
|
+
|
|
59
|
+
// Ring buffer - sized appropriately
|
|
60
|
+
this.bufferSize = ${bufferSizeSamples};
|
|
61
|
+
this.ringBuffer = new Float32Array(this.bufferSize);
|
|
62
|
+
this.readPos = 0;
|
|
63
|
+
this.writePos = 0;
|
|
64
|
+
this.isPlaying = false;
|
|
65
|
+
|
|
66
|
+
// Configuration
|
|
67
|
+
this.minBufferSamples = ${minBufferSamples};
|
|
68
|
+
this.targetBufferSamples = ${minBufferSamples * 2}; // Target 2x min for stability
|
|
69
|
+
|
|
70
|
+
// State
|
|
71
|
+
this.streamEnded = false;
|
|
72
|
+
this.playbackCompleteReported = false;
|
|
73
|
+
|
|
74
|
+
// Stats reporting
|
|
75
|
+
this.frameCount = 0;
|
|
76
|
+
this.reportInterval = 256; // Report every ~5ms at 48kHz
|
|
77
|
+
|
|
78
|
+
this.port.onmessage = (e) => {
|
|
79
|
+
switch(e.data.type) {
|
|
80
|
+
case 'audio':
|
|
81
|
+
this.addAudio(e.data.data);
|
|
82
|
+
break;
|
|
83
|
+
case 'reset':
|
|
84
|
+
this.reset();
|
|
85
|
+
break;
|
|
86
|
+
case 'stream-ended':
|
|
87
|
+
this.streamEnded = true;
|
|
88
|
+
break;
|
|
89
|
+
}
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
// Send initial capacity
|
|
93
|
+
this.sendCapacityUpdate();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
addAudio(float32Data) {
|
|
97
|
+
const samples = float32Data.length;
|
|
98
|
+
const available = this.getAvailableSpace();
|
|
99
|
+
const bufferedBefore = this.getBufferedSamples();
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
if (samples > available) {
|
|
103
|
+
// This shouldn't happen with proper backpressure
|
|
104
|
+
console.error('Buffer overflow - bug in backpressure. Samples:', samples, 'Available:', available, 'Buffered:', this.getBufferedSamples());
|
|
105
|
+
// Drop oldest data to recover
|
|
106
|
+
const overflow = samples - available;
|
|
107
|
+
this.readPos = (this.readPos + overflow) % this.bufferSize;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Write to ring buffer
|
|
111
|
+
if (this.writePos + samples <= this.bufferSize) {
|
|
112
|
+
this.ringBuffer.set(float32Data, this.writePos);
|
|
113
|
+
this.writePos += samples;
|
|
114
|
+
if (this.writePos >= this.bufferSize) {
|
|
115
|
+
this.writePos = 0;
|
|
116
|
+
}
|
|
117
|
+
} else {
|
|
118
|
+
const firstPart = this.bufferSize - this.writePos;
|
|
119
|
+
const secondPart = samples - firstPart;
|
|
120
|
+
this.ringBuffer.set(float32Data.slice(0, firstPart), this.writePos);
|
|
121
|
+
this.ringBuffer.set(float32Data.slice(firstPart), 0);
|
|
122
|
+
this.writePos = secondPart;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Auto-start when we have enough buffered
|
|
126
|
+
const buffered = this.getBufferedSamples();
|
|
127
|
+
|
|
128
|
+
if (!this.isPlaying && buffered >= this.minBufferSamples) {
|
|
129
|
+
const now = currentTime;
|
|
130
|
+
this.isPlaying = true;
|
|
131
|
+
// Notify that playback has started
|
|
132
|
+
this.port.postMessage({
|
|
133
|
+
type: 'playback-started',
|
|
134
|
+
buffered: buffered,
|
|
135
|
+
audioTime: now
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Report capacity after adding
|
|
140
|
+
this.sendCapacityUpdate();
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
getAvailableSpace() {
|
|
144
|
+
const used = this.getBufferedSamples();
|
|
145
|
+
return this.bufferSize - used - 128; // Leave small safety margin
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
getBufferedSamples() {
|
|
149
|
+
if (this.writePos >= this.readPos) {
|
|
150
|
+
return this.writePos - this.readPos;
|
|
151
|
+
} else {
|
|
152
|
+
return this.bufferSize - this.readPos + this.writePos;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
sendCapacityUpdate() {
|
|
157
|
+
const buffered = this.getBufferedSamples();
|
|
158
|
+
const capacity = this.getAvailableSpace();
|
|
159
|
+
|
|
160
|
+
// Calculate how much we want to receive
|
|
161
|
+
// If buffer is low, request more; if it's full, request nothing
|
|
162
|
+
let requestSamples = 0;
|
|
163
|
+
if (buffered < this.targetBufferSamples) {
|
|
164
|
+
requestSamples = Math.min(capacity, this.targetBufferSamples - buffered);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
this.port.postMessage({
|
|
168
|
+
type: 'capacity',
|
|
169
|
+
buffered: buffered,
|
|
170
|
+
capacity: capacity,
|
|
171
|
+
requestSamples: requestSamples,
|
|
172
|
+
isPlaying: this.isPlaying
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
process(inputs, outputs, parameters) {
|
|
177
|
+
const output = outputs[0];
|
|
178
|
+
if (!output || !output[0]) return true;
|
|
179
|
+
|
|
180
|
+
const outputChannel = output[0];
|
|
181
|
+
const numSamples = outputChannel.length;
|
|
182
|
+
|
|
183
|
+
// Report stats periodically
|
|
184
|
+
if (++this.frameCount % this.reportInterval === 0) {
|
|
185
|
+
this.sendCapacityUpdate();
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
if (!this.isPlaying) {
|
|
189
|
+
outputChannel.fill(0);
|
|
190
|
+
return true;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const buffered = this.getBufferedSamples();
|
|
194
|
+
|
|
195
|
+
if (buffered < numSamples) {
|
|
196
|
+
// Underrun - play what we have and fill rest with silence
|
|
197
|
+
let samplesRead = 0;
|
|
198
|
+
|
|
199
|
+
if (buffered > 0) {
|
|
200
|
+
// Play whatever samples we DO have
|
|
201
|
+
if (this.readPos + buffered <= this.bufferSize) {
|
|
202
|
+
for (let i = 0; i < buffered; i++) {
|
|
203
|
+
outputChannel[i] = this.ringBuffer[this.readPos + i];
|
|
204
|
+
}
|
|
205
|
+
this.readPos += buffered;
|
|
206
|
+
if (this.readPos >= this.bufferSize) {
|
|
207
|
+
this.readPos = 0;
|
|
208
|
+
}
|
|
209
|
+
} else {
|
|
210
|
+
// Wrap-around case
|
|
211
|
+
const firstPart = this.bufferSize - this.readPos;
|
|
212
|
+
const secondPart = buffered - firstPart;
|
|
213
|
+
|
|
214
|
+
for (let i = 0; i < firstPart; i++) {
|
|
215
|
+
outputChannel[i] = this.ringBuffer[this.readPos + i];
|
|
216
|
+
}
|
|
217
|
+
for (let i = 0; i < secondPart; i++) {
|
|
218
|
+
outputChannel[firstPart + i] = this.ringBuffer[i];
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
this.readPos = secondPart;
|
|
222
|
+
}
|
|
223
|
+
samplesRead = buffered;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Fill remaining with silence
|
|
227
|
+
for (let i = samplesRead; i < numSamples; i++) {
|
|
228
|
+
outputChannel[i] = 0;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Check for playback complete
|
|
232
|
+
if (this.streamEnded && buffered === 0) {
|
|
233
|
+
if (!this.playbackCompleteReported) {
|
|
234
|
+
this.port.postMessage({
|
|
235
|
+
type: 'playback-complete'
|
|
236
|
+
});
|
|
237
|
+
this.playbackCompleteReported = true;
|
|
238
|
+
}
|
|
239
|
+
this.isPlaying = false;
|
|
240
|
+
this.streamEnded = false;
|
|
241
|
+
} else {
|
|
242
|
+
// Request more data urgently
|
|
243
|
+
this.port.postMessage({
|
|
244
|
+
type: 'underrun',
|
|
245
|
+
buffered: buffered,
|
|
246
|
+
needed: numSamples
|
|
247
|
+
});
|
|
248
|
+
this.sendCapacityUpdate();
|
|
249
|
+
}
|
|
250
|
+
} else {
|
|
251
|
+
// Normal playback - read from ring buffer
|
|
252
|
+
if (this.readPos + numSamples <= this.bufferSize) {
|
|
253
|
+
for (let i = 0; i < numSamples; i++) {
|
|
254
|
+
outputChannel[i] = this.ringBuffer[this.readPos + i];
|
|
255
|
+
}
|
|
256
|
+
this.readPos += numSamples;
|
|
257
|
+
if (this.readPos >= this.bufferSize) {
|
|
258
|
+
this.readPos = 0;
|
|
259
|
+
}
|
|
260
|
+
} else {
|
|
261
|
+
// Wrap-around case
|
|
262
|
+
const firstPart = this.bufferSize - this.readPos;
|
|
263
|
+
const secondPart = numSamples - firstPart;
|
|
264
|
+
|
|
265
|
+
for (let i = 0; i < firstPart; i++) {
|
|
266
|
+
outputChannel[i] = this.ringBuffer[this.readPos + i];
|
|
267
|
+
}
|
|
268
|
+
for (let i = 0; i < secondPart; i++) {
|
|
269
|
+
outputChannel[firstPart + i] = this.ringBuffer[i];
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
this.readPos = secondPart;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
return true;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
reset() {
|
|
280
|
+
this.readPos = 0;
|
|
281
|
+
this.writePos = 0;
|
|
282
|
+
this.ringBuffer.fill(0);
|
|
283
|
+
this.isPlaying = false;
|
|
284
|
+
this.streamEnded = false;
|
|
285
|
+
this.playbackCompleteReported = false;
|
|
286
|
+
this.sendCapacityUpdate();
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
registerProcessor('pcm-processor', PCMProcessor);
|
|
291
|
+
`;
|
|
292
|
+
|
|
293
|
+
// Create and load worklet
|
|
294
|
+
const blob = new Blob([processorCode], { type: 'application/javascript' });
|
|
295
|
+
const workletUrl = URL.createObjectURL(blob);
|
|
296
|
+
|
|
297
|
+
await this.audioContext.audioWorklet.addModule(workletUrl);
|
|
298
|
+
URL.revokeObjectURL(workletUrl);
|
|
299
|
+
|
|
300
|
+
// Create worklet node
|
|
301
|
+
this.workletNode = new AudioWorkletNode(this.audioContext, 'pcm-processor');
|
|
302
|
+
this.workletNode.connect(this.gainNode);
|
|
303
|
+
|
|
304
|
+
// Handle messages from worklet
|
|
305
|
+
this.workletNode.port.onmessage = (e) => {
|
|
306
|
+
switch (e.data.type) {
|
|
307
|
+
case 'capacity':
|
|
308
|
+
this.handleCapacityUpdate(e.data);
|
|
309
|
+
break;
|
|
310
|
+
|
|
311
|
+
case 'underrun':
|
|
312
|
+
this.metrics.underruns++;
|
|
313
|
+
console.warn(`[MAIN THREAD] ⚠️ UNDERRUN #${this.metrics.underruns} detected! buffered=${e.data.buffered} samples, needed=${e.data.needed} samples`);
|
|
314
|
+
// Try to send more data immediately
|
|
315
|
+
this.processPendingChunks();
|
|
316
|
+
break;
|
|
317
|
+
|
|
318
|
+
case 'playback-started':
|
|
319
|
+
console.log(`[MAIN THREAD] Received playback-started at performance.now=${performance.now().toFixed(2)}ms, audioContext.currentTime=${this.audioContext.currentTime.toFixed(3)}s, worklet reported audioTime=${e.data.audioTime}s`);
|
|
320
|
+
this.emit('firstPlayback', {
|
|
321
|
+
startTime: this.audioContext.currentTime,
|
|
322
|
+
bufferedSamples: e.data.buffered
|
|
323
|
+
});
|
|
324
|
+
break;
|
|
325
|
+
|
|
326
|
+
case 'playback-complete':
|
|
327
|
+
this.emit('audioEnded', {
|
|
328
|
+
endTime: this.audioContext.currentTime
|
|
329
|
+
});
|
|
330
|
+
break;
|
|
331
|
+
}
|
|
332
|
+
};
|
|
333
|
+
|
|
334
|
+
this.isInitialized = true;
|
|
335
|
+
this.isWorkletReady = true;
|
|
336
|
+
} catch (error) {
|
|
337
|
+
console.error('Failed to initialize PCMPlayerWorklet:', error);
|
|
338
|
+
throw error;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
handleCapacityUpdate(data) {
|
|
343
|
+
this.availableCapacity = data.capacity;
|
|
344
|
+
this.metrics.bufferLevel = data.buffered;
|
|
345
|
+
|
|
346
|
+
// console.log(`[CAPACITY] Update at ${performance.now().toFixed(2)}ms: capacity=${data.capacity}, buffered=${data.buffered}, pending=${this.pendingChunks.length}`);
|
|
347
|
+
|
|
348
|
+
// Mark that we've received initial capacity
|
|
349
|
+
if (!this.hasReceivedInitialCapacity) {
|
|
350
|
+
this.hasReceivedInitialCapacity = true;
|
|
351
|
+
// console.log(`[CAPACITY] *** FIRST capacity received at ${performance.now().toFixed(2)}ms, processing ${this.pendingChunks.length} pending chunks`);
|
|
352
|
+
// Process any chunks that were waiting for initial capacity
|
|
353
|
+
if (this.pendingChunks.length > 0) {
|
|
354
|
+
this.processPendingChunks();
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// If worklet is requesting data, try to send it
|
|
359
|
+
if (data.requestSamples > 0 && this.pendingChunks.length > 0) {
|
|
360
|
+
this.processPendingChunks();
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
processPendingChunks() {
|
|
365
|
+
if (!this.isWorkletReady || this.pendingChunks.length === 0) {
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// Don't send if we don't know capacity yet
|
|
370
|
+
if (this.availableCapacity <= 0) {
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
// Send ONE chunk if it fits, then wait for next capacity update
|
|
375
|
+
// This prevents race conditions from sending multiple chunks before worklet updates
|
|
376
|
+
const chunk = this.pendingChunks[0];
|
|
377
|
+
|
|
378
|
+
if (chunk.length <= this.availableCapacity) {
|
|
379
|
+
// Send the whole chunk
|
|
380
|
+
this.pendingChunks.shift();
|
|
381
|
+
this.workletNode.port.postMessage({
|
|
382
|
+
type: 'audio',
|
|
383
|
+
data: chunk
|
|
384
|
+
});
|
|
385
|
+
// Set capacity to 0 to prevent sending more until we get an update
|
|
386
|
+
this.availableCapacity = 0;
|
|
387
|
+
} else if (this.availableCapacity > 4096) {
|
|
388
|
+
// Send partial chunk only if we have significant space
|
|
389
|
+
const partial = chunk.slice(0, this.availableCapacity);
|
|
390
|
+
console.log(`Sending partial: ${partial.length} samples from ${chunk.length} (capacity: ${this.availableCapacity})`);
|
|
391
|
+
this.pendingChunks[0] = chunk.slice(this.availableCapacity);
|
|
392
|
+
this.workletNode.port.postMessage({
|
|
393
|
+
type: 'audio',
|
|
394
|
+
data: partial
|
|
395
|
+
});
|
|
396
|
+
// Set capacity to 0 to prevent sending more until we get an update
|
|
397
|
+
this.availableCapacity = 0;
|
|
398
|
+
} else {
|
|
399
|
+
console.log(`Not sending - chunk ${chunk.length} samples, capacity ${this.availableCapacity}`);
|
|
400
|
+
}
|
|
401
|
+
// else: Not enough space, wait for next capacity update
|
|
402
|
+
|
|
403
|
+
// If all chunks sent and stream ended, notify worklet
|
|
404
|
+
if (this.pendingChunks.length === 0 && this.pendingStreamEnd) {
|
|
405
|
+
this.workletNode.port.postMessage({ type: 'stream-ended' });
|
|
406
|
+
this.pendingStreamEnd = false;
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
playAudio(data) {
|
|
411
|
+
if (!this.isInitialized) {
|
|
412
|
+
// Queue the data if not initialized yet
|
|
413
|
+
if (!this.initPendingQueue) {
|
|
414
|
+
this.initPendingQueue = [];
|
|
415
|
+
this.initPromise.then(() => {
|
|
416
|
+
// Process queued data
|
|
417
|
+
const queue = this.initPendingQueue;
|
|
418
|
+
this.initPendingQueue = null;
|
|
419
|
+
for (const queuedData of queue) {
|
|
420
|
+
this.playAudio(queuedData);
|
|
421
|
+
}
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
this.initPendingQueue.push(data);
|
|
425
|
+
return;
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
if (this.audioContext.state !== 'running') {
|
|
429
|
+
return;
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
// Convert to Float32Array if needed
|
|
433
|
+
const float32Array = data instanceof Int16Array
|
|
434
|
+
? this.pcm16ToFloat32(data)
|
|
435
|
+
: data;
|
|
436
|
+
|
|
437
|
+
// Add to pending queue
|
|
438
|
+
this.pendingChunks.push(float32Array);
|
|
439
|
+
|
|
440
|
+
// Only try to process if we've received initial capacity and have space
|
|
441
|
+
// Otherwise wait for capacity update from worklet
|
|
442
|
+
if (this.hasReceivedInitialCapacity && this.availableCapacity > 0) {
|
|
443
|
+
this.processPendingChunks();
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// Update metrics
|
|
447
|
+
this.metrics.chunksPlayed++;
|
|
448
|
+
|
|
449
|
+
// Update playback time for compatibility
|
|
450
|
+
const duration = float32Array.length / this.audioContext.sampleRate;
|
|
451
|
+
this.playbackTime = this.audioContext.currentTime + duration;
|
|
452
|
+
|
|
453
|
+
// Emit events for compatibility
|
|
454
|
+
this.emit('audioStarted', {
|
|
455
|
+
startTime: this.audioContext.currentTime,
|
|
456
|
+
duration: duration,
|
|
457
|
+
samples: float32Array.length
|
|
458
|
+
});
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
notifyStreamEnded() {
|
|
462
|
+
if (this.pendingChunks.length > 0) {
|
|
463
|
+
// Still have chunks to send, mark for later
|
|
464
|
+
this.pendingStreamEnd = true;
|
|
465
|
+
} else {
|
|
466
|
+
// No chunks left, send immediately
|
|
467
|
+
if (this.workletNode) {
|
|
468
|
+
this.workletNode.port.postMessage({ type: 'stream-ended' });
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
pcm16ToFloat32(pcm16) {
|
|
474
|
+
const float32 = new Float32Array(pcm16.length);
|
|
475
|
+
for (let i = 0; i < pcm16.length; i++) {
|
|
476
|
+
float32[i] = pcm16[i] / 32768;
|
|
477
|
+
}
|
|
478
|
+
return float32;
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
reset() {
|
|
482
|
+
this.playbackTime = 0;
|
|
483
|
+
this.pendingChunks = [];
|
|
484
|
+
this.pendingStreamEnd = false;
|
|
485
|
+
this.availableCapacity = 0;
|
|
486
|
+
|
|
487
|
+
if (this.workletNode) {
|
|
488
|
+
this.workletNode.port.postMessage({ type: 'reset' });
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Quick fade out to avoid clicks
|
|
492
|
+
if (this.gainNode) {
|
|
493
|
+
const now = this.audioContext.currentTime;
|
|
494
|
+
this.gainNode.gain.setValueAtTime(this.gainNode.gain.value, now);
|
|
495
|
+
this.gainNode.gain.linearRampToValueAtTime(0, now + 0.05);
|
|
496
|
+
setTimeout(() => {
|
|
497
|
+
this.gainNode.gain.value = 1;
|
|
498
|
+
}, 100);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
stopAllSources() {
|
|
503
|
+
this.reset();
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
async resume() {
|
|
507
|
+
if (this.audioContext.state === 'suspended') {
|
|
508
|
+
await this.audioContext.resume();
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
get volume() {
|
|
513
|
+
return this.gainNode.gain.value;
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
set volume(value) {
|
|
517
|
+
const clampedValue = Math.max(0, Math.min(1, value));
|
|
518
|
+
this.gainNode.gain.value = clampedValue;
|
|
519
|
+
this.emit('volumeChange', { volume: clampedValue });
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
get volumePercentage() {
|
|
523
|
+
return this.volume * 100;
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
set volumePercentage(percentage) {
|
|
527
|
+
this.volume = percentage / 100;
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
getAnalyserData() {
|
|
531
|
+
const bufferLength = this.analyser.frequencyBinCount;
|
|
532
|
+
const dataArray = new Uint8Array(bufferLength);
|
|
533
|
+
this.analyser.getByteFrequencyData(dataArray);
|
|
534
|
+
return dataArray;
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
getTimeDomainData() {
|
|
538
|
+
const bufferLength = this.analyser.frequencyBinCount;
|
|
539
|
+
const dataArray = new Uint8Array(bufferLength);
|
|
540
|
+
this.analyser.getByteTimeDomainData(dataArray);
|
|
541
|
+
return dataArray;
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
getPlaybackStatus() {
|
|
545
|
+
const bufferMs = this.metrics.bufferLevel
|
|
546
|
+
? (this.metrics.bufferLevel / this.audioContext.sampleRate) * 1000
|
|
547
|
+
: 0;
|
|
548
|
+
|
|
549
|
+
return {
|
|
550
|
+
currentTime: this.audioContext.currentTime,
|
|
551
|
+
scheduledTime: this.playbackTime,
|
|
552
|
+
bufferedDuration: bufferMs / 1000,
|
|
553
|
+
state: this.audioContext.state,
|
|
554
|
+
worklet: {
|
|
555
|
+
bufferLevelSamples: this.metrics.bufferLevel,
|
|
556
|
+
bufferLevelMs: bufferMs,
|
|
557
|
+
underruns: this.metrics.underruns,
|
|
558
|
+
chunksPlayed: this.metrics.chunksPlayed,
|
|
559
|
+
pendingChunks: this.pendingChunks.length
|
|
560
|
+
}
|
|
561
|
+
};
|
|
562
|
+
}
|
|
563
|
+
}
|