@pipecat-ai/websocket-transport 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,4052 @@
1
+ var $fkNis$dailycodailyjs = require("@daily-co/daily-js");
2
+ var $fkNis$pipecataiclientjs = require("@pipecat-ai/client-js");
3
+ var $fkNis$events = require("events");
4
+ var $fkNis$protobuftsruntime = require("@protobuf-ts/runtime");
5
+
6
+
7
+ function $parcel$export(e, n, v, s) {
8
+ Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});
9
+ }
10
+
11
+ function $parcel$interopDefault(a) {
12
+ return a && a.__esModule ? a.default : a;
13
+ }
14
+
15
+ $parcel$export(module.exports, "WavMediaManager", () => $23859655abfc5f5c$export$45c5b9bfba2f6304);
16
+ $parcel$export(module.exports, "DailyMediaManager", () => $1c088932741d88e6$export$c95c65abc5f47125);
17
+ $parcel$export(module.exports, "WebSocketTransport", () => $bee70417e8ead9ed$export$de21836fc42c6f9c);
18
+ // export * from "./realTimeWebSocketTransport";
19
+ // export * from "../../../lib/wavtools/dist/index.d.ts";
20
+ /**
21
+ * Raw wav audio file contents
22
+ * @typedef {Object} WavPackerAudioType
23
+ * @property {Blob} blob
24
+ * @property {string} url
25
+ * @property {number} channelCount
26
+ * @property {number} sampleRate
27
+ * @property {number} duration
28
+ */ /**
29
+ * Utility class for assembling PCM16 "audio/wav" data
30
+ * @class
31
+ */ class $a61750b8fbee4dae$export$13afda237b1c9846 {
32
+ /**
33
+ * Converts Float32Array of amplitude data to ArrayBuffer in Int16Array format
34
+ * @param {Float32Array} float32Array
35
+ * @returns {ArrayBuffer}
36
+ */ static floatTo16BitPCM(float32Array) {
37
+ const buffer = new ArrayBuffer(float32Array.length * 2);
38
+ const view = new DataView(buffer);
39
+ let offset = 0;
40
+ for(let i = 0; i < float32Array.length; i++, offset += 2){
41
+ let s = Math.max(-1, Math.min(1, float32Array[i]));
42
+ view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
43
+ }
44
+ return buffer;
45
+ }
46
+ /**
47
+ * Concatenates two ArrayBuffers
48
+ * @param {ArrayBuffer} leftBuffer
49
+ * @param {ArrayBuffer} rightBuffer
50
+ * @returns {ArrayBuffer}
51
+ */ static mergeBuffers(leftBuffer, rightBuffer) {
52
+ const tmpArray = new Uint8Array(leftBuffer.byteLength + rightBuffer.byteLength);
53
+ tmpArray.set(new Uint8Array(leftBuffer), 0);
54
+ tmpArray.set(new Uint8Array(rightBuffer), leftBuffer.byteLength);
55
+ return tmpArray.buffer;
56
+ }
57
+ /**
58
+ * Packs data into an Int16 format
59
+ * @private
60
+ * @param {number} size 0 = 1x Int16, 1 = 2x Int16
61
+ * @param {number} arg value to pack
62
+ * @returns
63
+ */ _packData(size, arg) {
64
+ return [
65
+ new Uint8Array([
66
+ arg,
67
+ arg >> 8
68
+ ]),
69
+ new Uint8Array([
70
+ arg,
71
+ arg >> 8,
72
+ arg >> 16,
73
+ arg >> 24
74
+ ])
75
+ ][size];
76
+ }
77
+ /**
78
+ * Packs audio into "audio/wav" Blob
79
+ * @param {number} sampleRate
80
+ * @param {{bitsPerSample: number, channels: Array<Float32Array>, data: Int16Array}} audio
81
+ * @returns {WavPackerAudioType}
82
+ */ pack(sampleRate, audio) {
83
+ if (!audio?.bitsPerSample) throw new Error(`Missing "bitsPerSample"`);
84
+ else if (!audio?.channels) throw new Error(`Missing "channels"`);
85
+ else if (!audio?.data) throw new Error(`Missing "data"`);
86
+ const { bitsPerSample: bitsPerSample, channels: channels, data: data } = audio;
87
+ const output = [
88
+ // Header
89
+ 'RIFF',
90
+ this._packData(1, 52),
91
+ 'WAVE',
92
+ // chunk 1
93
+ 'fmt ',
94
+ this._packData(1, 16),
95
+ this._packData(0, 1),
96
+ this._packData(0, channels.length),
97
+ this._packData(1, sampleRate),
98
+ this._packData(1, sampleRate * channels.length * bitsPerSample / 8),
99
+ this._packData(0, channels.length * bitsPerSample / 8),
100
+ this._packData(0, bitsPerSample),
101
+ // chunk 2
102
+ 'data',
103
+ this._packData(1, channels[0].length * channels.length * bitsPerSample / 8),
104
+ data
105
+ ];
106
+ const blob = new Blob(output, {
107
+ type: 'audio/mpeg'
108
+ });
109
+ const url = URL.createObjectURL(blob);
110
+ return {
111
+ blob: blob,
112
+ url: url,
113
+ channelCount: channels.length,
114
+ sampleRate: sampleRate,
115
+ duration: data.byteLength / (channels.length * sampleRate * 2)
116
+ };
117
+ }
118
+ }
119
+ globalThis.WavPacker = $a61750b8fbee4dae$export$13afda237b1c9846;
120
+
121
+
122
+ /**
123
+ * Constants for help with visualization
124
+ * Helps map frequency ranges from Fast Fourier Transform
125
+ * to human-interpretable ranges, notably music ranges and
126
+ * human vocal ranges.
127
+ */ // Eighth octave frequencies
128
+ const $e10a9de47f58e137$var$octave8Frequencies = [
129
+ 4186.01,
130
+ 4434.92,
131
+ 4698.63,
132
+ 4978.03,
133
+ 5274.04,
134
+ 5587.65,
135
+ 5919.91,
136
+ 6271.93,
137
+ 6644.88,
138
+ 7040.0,
139
+ 7458.62,
140
+ 7902.13
141
+ ];
142
+ // Labels for each of the above frequencies
143
+ const $e10a9de47f58e137$var$octave8FrequencyLabels = [
144
+ 'C',
145
+ 'C#',
146
+ 'D',
147
+ 'D#',
148
+ 'E',
149
+ 'F',
150
+ 'F#',
151
+ 'G',
152
+ 'G#',
153
+ 'A',
154
+ 'A#',
155
+ 'B'
156
+ ];
157
+ const $e10a9de47f58e137$export$776c63898ae5b636 = [];
158
+ const $e10a9de47f58e137$export$facd167cc27ea9b0 = [];
159
+ for(let i = 1; i <= 8; i++)for(let f = 0; f < $e10a9de47f58e137$var$octave8Frequencies.length; f++){
160
+ const freq = $e10a9de47f58e137$var$octave8Frequencies[f];
161
+ $e10a9de47f58e137$export$776c63898ae5b636.push(freq / Math.pow(2, 8 - i));
162
+ $e10a9de47f58e137$export$facd167cc27ea9b0.push($e10a9de47f58e137$var$octave8FrequencyLabels[f] + i);
163
+ }
164
+ /**
165
+ * Subset of the note frequencies between 32 and 2000 Hz
166
+ * 6 octave range: C1 to B6
167
+ */ const $e10a9de47f58e137$var$voiceFrequencyRange = [
168
+ 32.0,
169
+ 2000.0
170
+ ];
171
+ const $e10a9de47f58e137$export$dbc1581ed2cfa183 = $e10a9de47f58e137$export$776c63898ae5b636.filter((_, i)=>{
172
+ return $e10a9de47f58e137$export$776c63898ae5b636[i] > $e10a9de47f58e137$var$voiceFrequencyRange[0] && $e10a9de47f58e137$export$776c63898ae5b636[i] < $e10a9de47f58e137$var$voiceFrequencyRange[1];
173
+ });
174
+ const $e10a9de47f58e137$export$30a6f2881311088f = $e10a9de47f58e137$export$facd167cc27ea9b0.filter((_, i)=>{
175
+ return $e10a9de47f58e137$export$776c63898ae5b636[i] > $e10a9de47f58e137$var$voiceFrequencyRange[0] && $e10a9de47f58e137$export$776c63898ae5b636[i] < $e10a9de47f58e137$var$voiceFrequencyRange[1];
176
+ });
177
+
178
+
179
+ class $5853841ab58516d4$export$2c3136da0bf130f9 {
180
+ /**
181
+ * Retrieves frequency domain data from an AnalyserNode adjusted to a decibel range
182
+ * returns human-readable formatting and labels
183
+ * @param {AnalyserNode} analyser
184
+ * @param {number} sampleRate
185
+ * @param {Float32Array} [fftResult]
186
+ * @param {"frequency"|"music"|"voice"} [analysisType]
187
+ * @param {number} [minDecibels] default -100
188
+ * @param {number} [maxDecibels] default -30
189
+ * @returns {AudioAnalysisOutputType}
190
+ */ static getFrequencies(analyser, sampleRate, fftResult, analysisType = 'frequency', minDecibels = -100, maxDecibels = -30) {
191
+ if (!fftResult) {
192
+ fftResult = new Float32Array(analyser.frequencyBinCount);
193
+ analyser.getFloatFrequencyData(fftResult);
194
+ }
195
+ const nyquistFrequency = sampleRate / 2;
196
+ const frequencyStep = 1 / fftResult.length * nyquistFrequency;
197
+ let outputValues;
198
+ let frequencies;
199
+ let labels;
200
+ if (analysisType === 'music' || analysisType === 'voice') {
201
+ const useFrequencies = analysisType === 'voice' ? (0, $e10a9de47f58e137$export$dbc1581ed2cfa183) : (0, $e10a9de47f58e137$export$776c63898ae5b636);
202
+ const aggregateOutput = Array(useFrequencies.length).fill(minDecibels);
203
+ for(let i = 0; i < fftResult.length; i++){
204
+ const frequency = i * frequencyStep;
205
+ const amplitude = fftResult[i];
206
+ for(let n = useFrequencies.length - 1; n >= 0; n--)if (frequency > useFrequencies[n]) {
207
+ aggregateOutput[n] = Math.max(aggregateOutput[n], amplitude);
208
+ break;
209
+ }
210
+ }
211
+ outputValues = aggregateOutput;
212
+ frequencies = analysisType === 'voice' ? (0, $e10a9de47f58e137$export$dbc1581ed2cfa183) : (0, $e10a9de47f58e137$export$776c63898ae5b636);
213
+ labels = analysisType === 'voice' ? (0, $e10a9de47f58e137$export$30a6f2881311088f) : (0, $e10a9de47f58e137$export$facd167cc27ea9b0);
214
+ } else {
215
+ outputValues = Array.from(fftResult);
216
+ frequencies = outputValues.map((_, i)=>frequencyStep * i);
217
+ labels = frequencies.map((f)=>`${f.toFixed(2)} Hz`);
218
+ }
219
+ // We normalize to {0, 1}
220
+ const normalizedOutput = outputValues.map((v)=>{
221
+ return Math.max(0, Math.min((v - minDecibels) / (maxDecibels - minDecibels), 1));
222
+ });
223
+ const values = new Float32Array(normalizedOutput);
224
+ return {
225
+ values: values,
226
+ frequencies: frequencies,
227
+ labels: labels
228
+ };
229
+ }
230
+ /**
231
+ * Creates a new AudioAnalysis instance for an HTMLAudioElement
232
+ * @param {HTMLAudioElement} audioElement
233
+ * @param {AudioBuffer|null} [audioBuffer] If provided, will cache all frequency domain data from the buffer
234
+ * @returns {AudioAnalysis}
235
+ */ constructor(audioElement, audioBuffer = null){
236
+ this.fftResults = [];
237
+ if (audioBuffer) {
238
+ /**
239
+ * Modified from
240
+ * https://stackoverflow.com/questions/75063715/using-the-web-audio-api-to-analyze-a-song-without-playing
241
+ *
242
+ * We do this to populate FFT values for the audio if provided an `audioBuffer`
243
+ * The reason to do this is that Safari fails when using `createMediaElementSource`
244
+ * This has a non-zero RAM cost so we only opt-in to run it on Safari, Chrome is better
245
+ */ const { length: length, sampleRate: sampleRate } = audioBuffer;
246
+ const offlineAudioContext = new OfflineAudioContext({
247
+ length: length,
248
+ sampleRate: sampleRate
249
+ });
250
+ const source = offlineAudioContext.createBufferSource();
251
+ source.buffer = audioBuffer;
252
+ const analyser = offlineAudioContext.createAnalyser();
253
+ analyser.fftSize = 8192;
254
+ analyser.smoothingTimeConstant = 0.1;
255
+ source.connect(analyser);
256
+ // limit is :: 128 / sampleRate;
257
+ // but we just want 60fps - cuts ~1s from 6MB to 1MB of RAM
258
+ const renderQuantumInSeconds = 1 / 60;
259
+ const durationInSeconds = length / sampleRate;
260
+ const analyze = (index)=>{
261
+ const suspendTime = renderQuantumInSeconds * index;
262
+ if (suspendTime < durationInSeconds) offlineAudioContext.suspend(suspendTime).then(()=>{
263
+ const fftResult = new Float32Array(analyser.frequencyBinCount);
264
+ analyser.getFloatFrequencyData(fftResult);
265
+ this.fftResults.push(fftResult);
266
+ analyze(index + 1);
267
+ });
268
+ if (index === 1) offlineAudioContext.startRendering();
269
+ else offlineAudioContext.resume();
270
+ };
271
+ source.start(0);
272
+ analyze(1);
273
+ this.audio = audioElement;
274
+ this.context = offlineAudioContext;
275
+ this.analyser = analyser;
276
+ this.sampleRate = sampleRate;
277
+ this.audioBuffer = audioBuffer;
278
+ } else {
279
+ const audioContext = new AudioContext();
280
+ const track = audioContext.createMediaElementSource(audioElement);
281
+ const analyser = audioContext.createAnalyser();
282
+ analyser.fftSize = 8192;
283
+ analyser.smoothingTimeConstant = 0.1;
284
+ track.connect(analyser);
285
+ analyser.connect(audioContext.destination);
286
+ this.audio = audioElement;
287
+ this.context = audioContext;
288
+ this.analyser = analyser;
289
+ this.sampleRate = this.context.sampleRate;
290
+ this.audioBuffer = null;
291
+ }
292
+ }
293
+ /**
294
+ * Gets the current frequency domain data from the playing audio track
295
+ * @param {"frequency"|"music"|"voice"} [analysisType]
296
+ * @param {number} [minDecibels] default -100
297
+ * @param {number} [maxDecibels] default -30
298
+ * @returns {AudioAnalysisOutputType}
299
+ */ getFrequencies(analysisType = 'frequency', minDecibels = -100, maxDecibels = -30) {
300
+ let fftResult = null;
301
+ if (this.audioBuffer && this.fftResults.length) {
302
+ const pct = this.audio.currentTime / this.audio.duration;
303
+ const index = Math.min(pct * this.fftResults.length | 0, this.fftResults.length - 1);
304
+ fftResult = this.fftResults[index];
305
+ }
306
+ return $5853841ab58516d4$export$2c3136da0bf130f9.getFrequencies(this.analyser, this.sampleRate, fftResult, analysisType, minDecibels, maxDecibels);
307
+ }
308
+ /**
309
+ * Resume the internal AudioContext if it was suspended due to the lack of
310
+ * user interaction when the AudioAnalysis was instantiated.
311
+ * @returns {Promise<true>}
312
+ */ async resumeIfSuspended() {
313
+ if (this.context.state === 'suspended') await this.context.resume();
314
+ return true;
315
+ }
316
+ }
317
+ globalThis.AudioAnalysis = $5853841ab58516d4$export$2c3136da0bf130f9;
318
+
319
+
320
+ const $d6d3743ef65c7519$export$50b76700e2b15e9 = `
321
+ class StreamProcessor extends AudioWorkletProcessor {
322
+ constructor() {
323
+ super();
324
+ this.hasStarted = false;
325
+ this.hasInterrupted = false;
326
+ this.outputBuffers = [];
327
+ this.bufferLength = 128;
328
+ this.write = { buffer: new Float32Array(this.bufferLength), trackId: null };
329
+ this.writeOffset = 0;
330
+ this.trackSampleOffsets = {};
331
+ this.port.onmessage = (event) => {
332
+ if (event.data) {
333
+ const payload = event.data;
334
+ if (payload.event === 'write') {
335
+ const int16Array = payload.buffer;
336
+ const float32Array = new Float32Array(int16Array.length);
337
+ for (let i = 0; i < int16Array.length; i++) {
338
+ float32Array[i] = int16Array[i] / 0x8000; // Convert Int16 to Float32
339
+ }
340
+ this.writeData(float32Array, payload.trackId);
341
+ } else if (
342
+ payload.event === 'offset' ||
343
+ payload.event === 'interrupt'
344
+ ) {
345
+ const requestId = payload.requestId;
346
+ const trackId = this.write.trackId;
347
+ const offset = this.trackSampleOffsets[trackId] || 0;
348
+ this.port.postMessage({
349
+ event: 'offset',
350
+ requestId,
351
+ trackId,
352
+ offset,
353
+ });
354
+ if (payload.event === 'interrupt') {
355
+ this.hasInterrupted = true;
356
+ }
357
+ } else {
358
+ throw new Error(\`Unhandled event "\${payload.event}"\`);
359
+ }
360
+ }
361
+ };
362
+ }
363
+
364
+ writeData(float32Array, trackId = null) {
365
+ let { buffer } = this.write;
366
+ let offset = this.writeOffset;
367
+ for (let i = 0; i < float32Array.length; i++) {
368
+ buffer[offset++] = float32Array[i];
369
+ if (offset >= buffer.length) {
370
+ this.outputBuffers.push(this.write);
371
+ this.write = { buffer: new Float32Array(this.bufferLength), trackId };
372
+ buffer = this.write.buffer;
373
+ offset = 0;
374
+ }
375
+ }
376
+ this.writeOffset = offset;
377
+ return true;
378
+ }
379
+
380
+ process(inputs, outputs, parameters) {
381
+ const output = outputs[0];
382
+ const outputChannelData = output[0];
383
+ const outputBuffers = this.outputBuffers;
384
+ if (this.hasInterrupted) {
385
+ this.port.postMessage({ event: 'stop' });
386
+ return false;
387
+ } else if (outputBuffers.length) {
388
+ this.hasStarted = true;
389
+ const { buffer, trackId } = outputBuffers.shift();
390
+ for (let i = 0; i < outputChannelData.length; i++) {
391
+ outputChannelData[i] = buffer[i] || 0;
392
+ }
393
+ if (trackId) {
394
+ this.trackSampleOffsets[trackId] =
395
+ this.trackSampleOffsets[trackId] || 0;
396
+ this.trackSampleOffsets[trackId] += buffer.length;
397
+ }
398
+ return true;
399
+ } else if (this.hasStarted) {
400
+ this.port.postMessage({ event: 'stop' });
401
+ return false;
402
+ } else {
403
+ return true;
404
+ }
405
+ }
406
+ }
407
+
408
+ registerProcessor('stream_processor', StreamProcessor);
409
+ `;
410
+ const $d6d3743ef65c7519$var$script = new Blob([
411
+ $d6d3743ef65c7519$export$50b76700e2b15e9
412
+ ], {
413
+ type: 'application/javascript'
414
+ });
415
+ const $d6d3743ef65c7519$var$src = URL.createObjectURL($d6d3743ef65c7519$var$script);
416
+ const $d6d3743ef65c7519$export$bfa8c596114d74df = $d6d3743ef65c7519$var$src;
417
+
418
+
419
+
420
+ class $1e7ce9484a3c4077$export$9698d62c78b8f366 {
421
+ /**
422
+ * Creates a new WavStreamPlayer instance
423
+ * @param {{sampleRate?: number}} options
424
+ * @returns {WavStreamPlayer}
425
+ */ constructor({ sampleRate: sampleRate = 44100 } = {}){
426
+ this.scriptSrc = (0, $d6d3743ef65c7519$export$bfa8c596114d74df);
427
+ this.sampleRate = sampleRate;
428
+ this.context = null;
429
+ this.stream = null;
430
+ this.analyser = null;
431
+ this.trackSampleOffsets = {};
432
+ this.interruptedTrackIds = {};
433
+ }
434
+ /**
435
+ * Connects the audio context and enables output to speakers
436
+ * @returns {Promise<true>}
437
+ */ async connect() {
438
+ this.context = new AudioContext({
439
+ sampleRate: this.sampleRate
440
+ });
441
+ if (this._speakerID) this.context.setSinkId(this._speakerID);
442
+ if (this.context.state === "suspended") await this.context.resume();
443
+ try {
444
+ await this.context.audioWorklet.addModule(this.scriptSrc);
445
+ } catch (e) {
446
+ console.error(e);
447
+ throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
448
+ }
449
+ const analyser = this.context.createAnalyser();
450
+ analyser.fftSize = 8192;
451
+ analyser.smoothingTimeConstant = 0.1;
452
+ this.analyser = analyser;
453
+ return true;
454
+ }
455
+ /**
456
+ * Gets the current frequency domain data from the playing track
457
+ * @param {"frequency"|"music"|"voice"} [analysisType]
458
+ * @param {number} [minDecibels] default -100
459
+ * @param {number} [maxDecibels] default -30
460
+ * @returns {import('./analysis/audio_analysis.js').AudioAnalysisOutputType}
461
+ */ getFrequencies(analysisType = "frequency", minDecibels = -100, maxDecibels = -30) {
462
+ if (!this.analyser) throw new Error("Not connected, please call .connect() first");
463
+ return (0, $5853841ab58516d4$export$2c3136da0bf130f9).getFrequencies(this.analyser, this.sampleRate, null, analysisType, minDecibels, maxDecibels);
464
+ }
465
+ /**
466
+ * @param {string} speaker deviceId
467
+ */ async updateSpeaker(speaker) {
468
+ const _prevSpeaker = this._speakerID;
469
+ this._speakerID = speaker;
470
+ if (this.context) try {
471
+ if (speaker === "default") await this.context.setSinkId();
472
+ else await this.context.setSinkId(speaker);
473
+ } catch (e) {
474
+ console.error(`Could not set sinkId to ${speaker}: ${e}`);
475
+ this._speakerID = _prevSpeaker;
476
+ }
477
+ }
478
+ /**
479
+ * Starts audio streaming
480
+ * @private
481
+ * @returns {Promise<true>}
482
+ */ _start() {
483
+ const streamNode = new AudioWorkletNode(this.context, "stream_processor");
484
+ streamNode.connect(this.context.destination);
485
+ streamNode.port.onmessage = (e)=>{
486
+ const { event: event } = e.data;
487
+ if (event === "stop") {
488
+ streamNode.disconnect();
489
+ this.stream = null;
490
+ } else if (event === "offset") {
491
+ const { requestId: requestId, trackId: trackId, offset: offset } = e.data;
492
+ const currentTime = offset / this.sampleRate;
493
+ this.trackSampleOffsets[requestId] = {
494
+ trackId: trackId,
495
+ offset: offset,
496
+ currentTime: currentTime
497
+ };
498
+ }
499
+ };
500
+ this.analyser.disconnect();
501
+ streamNode.connect(this.analyser);
502
+ this.stream = streamNode;
503
+ return true;
504
+ }
505
+ /**
506
+ * Adds 16BitPCM data to the currently playing audio stream
507
+ * You can add chunks beyond the current play point and they will be queued for play
508
+ * @param {ArrayBuffer|Int16Array} arrayBuffer
509
+ * @param {string} [trackId]
510
+ * @returns {Int16Array}
511
+ */ add16BitPCM(arrayBuffer, trackId = "default") {
512
+ if (typeof trackId !== "string") throw new Error(`trackId must be a string`);
513
+ else if (this.interruptedTrackIds[trackId]) return;
514
+ if (!this.stream) this._start();
515
+ let buffer;
516
+ if (arrayBuffer instanceof Int16Array) buffer = arrayBuffer;
517
+ else if (arrayBuffer instanceof ArrayBuffer) buffer = new Int16Array(arrayBuffer);
518
+ else throw new Error(`argument must be Int16Array or ArrayBuffer`);
519
+ this.stream.port.postMessage({
520
+ event: "write",
521
+ buffer: buffer,
522
+ trackId: trackId
523
+ });
524
+ return buffer;
525
+ }
526
+ /**
527
+ * Gets the offset (sample count) of the currently playing stream
528
+ * @param {boolean} [interrupt]
529
+ * @returns {{trackId: string|null, offset: number, currentTime: number}}
530
+ */ async getTrackSampleOffset(interrupt = false) {
531
+ if (!this.stream) return null;
532
+ const requestId = crypto.randomUUID();
533
+ this.stream.port.postMessage({
534
+ event: interrupt ? "interrupt" : "offset",
535
+ requestId: requestId
536
+ });
537
+ let trackSampleOffset;
538
+ while(!trackSampleOffset){
539
+ trackSampleOffset = this.trackSampleOffsets[requestId];
540
+ await new Promise((r)=>setTimeout(()=>r(), 1));
541
+ }
542
+ const { trackId: trackId } = trackSampleOffset;
543
+ if (interrupt && trackId) this.interruptedTrackIds[trackId] = true;
544
+ return trackSampleOffset;
545
+ }
546
+ /**
547
+ * Strips the current stream and returns the sample offset of the audio
548
+ * @param {boolean} [interrupt]
549
+ * @returns {{trackId: string|null, offset: number, currentTime: number}}
550
+ */ async interrupt() {
551
+ return this.getTrackSampleOffset(true);
552
+ }
553
+ }
554
+ globalThis.WavStreamPlayer = $1e7ce9484a3c4077$export$9698d62c78b8f366;
555
+
556
+
557
+ const $2cf6a2d8a6d031bc$var$AudioProcessorWorklet = `
558
+ class AudioProcessor extends AudioWorkletProcessor {
559
+
560
+ constructor() {
561
+ super();
562
+ this.port.onmessage = this.receive.bind(this);
563
+ this.initialize();
564
+ }
565
+
566
+ initialize() {
567
+ this.foundAudio = false;
568
+ this.recording = false;
569
+ this.chunks = [];
570
+ }
571
+
572
+ /**
573
+ * Concatenates sampled chunks into channels
574
+ * Format is chunk[Left[], Right[]]
575
+ */
576
+ readChannelData(chunks, channel = -1, maxChannels = 9) {
577
+ let channelLimit;
578
+ if (channel !== -1) {
579
+ if (chunks[0] && chunks[0].length - 1 < channel) {
580
+ throw new Error(
581
+ \`Channel \${channel} out of range: max \${chunks[0].length}\`
582
+ );
583
+ }
584
+ channelLimit = channel + 1;
585
+ } else {
586
+ channel = 0;
587
+ channelLimit = Math.min(chunks[0] ? chunks[0].length : 1, maxChannels);
588
+ }
589
+ const channels = [];
590
+ for (let n = channel; n < channelLimit; n++) {
591
+ const length = chunks.reduce((sum, chunk) => {
592
+ return sum + chunk[n].length;
593
+ }, 0);
594
+ const buffers = chunks.map((chunk) => chunk[n]);
595
+ const result = new Float32Array(length);
596
+ let offset = 0;
597
+ for (let i = 0; i < buffers.length; i++) {
598
+ result.set(buffers[i], offset);
599
+ offset += buffers[i].length;
600
+ }
601
+ channels[n] = result;
602
+ }
603
+ return channels;
604
+ }
605
+
606
+ /**
607
+ * Combines parallel audio data into correct format,
608
+ * channels[Left[], Right[]] to float32Array[LRLRLRLR...]
609
+ */
610
+ formatAudioData(channels) {
611
+ if (channels.length === 1) {
612
+ // Simple case is only one channel
613
+ const float32Array = channels[0].slice();
614
+ const meanValues = channels[0].slice();
615
+ return { float32Array, meanValues };
616
+ } else {
617
+ const float32Array = new Float32Array(
618
+ channels[0].length * channels.length
619
+ );
620
+ const meanValues = new Float32Array(channels[0].length);
621
+ for (let i = 0; i < channels[0].length; i++) {
622
+ const offset = i * channels.length;
623
+ let meanValue = 0;
624
+ for (let n = 0; n < channels.length; n++) {
625
+ float32Array[offset + n] = channels[n][i];
626
+ meanValue += channels[n][i];
627
+ }
628
+ meanValues[i] = meanValue / channels.length;
629
+ }
630
+ return { float32Array, meanValues };
631
+ }
632
+ }
633
+
634
+ /**
635
+ * Converts 32-bit float data to 16-bit integers
636
+ */
637
+ floatTo16BitPCM(float32Array) {
638
+ const buffer = new ArrayBuffer(float32Array.length * 2);
639
+ const view = new DataView(buffer);
640
+ let offset = 0;
641
+ for (let i = 0; i < float32Array.length; i++, offset += 2) {
642
+ let s = Math.max(-1, Math.min(1, float32Array[i]));
643
+ view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
644
+ }
645
+ return buffer;
646
+ }
647
+
648
+ /**
649
+ * Retrieves the most recent amplitude values from the audio stream
650
+ * @param {number} channel
651
+ */
652
+ getValues(channel = -1) {
653
+ const channels = this.readChannelData(this.chunks, channel);
654
+ const { meanValues } = this.formatAudioData(channels);
655
+ return { meanValues, channels };
656
+ }
657
+
658
+ /**
659
+ * Exports chunks as an audio/wav file
660
+ */
661
+ export() {
662
+ const channels = this.readChannelData(this.chunks);
663
+ const { float32Array, meanValues } = this.formatAudioData(channels);
664
+ const audioData = this.floatTo16BitPCM(float32Array);
665
+ return {
666
+ meanValues: meanValues,
667
+ audio: {
668
+ bitsPerSample: 16,
669
+ channels: channels,
670
+ data: audioData,
671
+ },
672
+ };
673
+ }
674
+
675
+ receive(e) {
676
+ const { event, id } = e.data;
677
+ let receiptData = {};
678
+ switch (event) {
679
+ case 'start':
680
+ this.recording = true;
681
+ break;
682
+ case 'stop':
683
+ this.recording = false;
684
+ break;
685
+ case 'clear':
686
+ this.initialize();
687
+ break;
688
+ case 'export':
689
+ receiptData = this.export();
690
+ break;
691
+ case 'read':
692
+ receiptData = this.getValues();
693
+ break;
694
+ default:
695
+ break;
696
+ }
697
+ // Always send back receipt
698
+ this.port.postMessage({ event: 'receipt', id, data: receiptData });
699
+ }
700
+
701
+ sendChunk(chunk) {
702
+ const channels = this.readChannelData([chunk]);
703
+ const { float32Array, meanValues } = this.formatAudioData(channels);
704
+ const rawAudioData = this.floatTo16BitPCM(float32Array);
705
+ const monoAudioData = this.floatTo16BitPCM(meanValues);
706
+ this.port.postMessage({
707
+ event: 'chunk',
708
+ data: {
709
+ mono: monoAudioData,
710
+ raw: rawAudioData,
711
+ },
712
+ });
713
+ }
714
+
715
+ process(inputList, outputList, parameters) {
716
+ // Copy input to output (e.g. speakers)
717
+ // Note that this creates choppy sounds with Mac products
718
+ const sourceLimit = Math.min(inputList.length, outputList.length);
719
+ for (let inputNum = 0; inputNum < sourceLimit; inputNum++) {
720
+ const input = inputList[inputNum];
721
+ const output = outputList[inputNum];
722
+ const channelCount = Math.min(input.length, output.length);
723
+ for (let channelNum = 0; channelNum < channelCount; channelNum++) {
724
+ input[channelNum].forEach((sample, i) => {
725
+ output[channelNum][i] = sample;
726
+ });
727
+ }
728
+ }
729
+ const inputs = inputList[0];
730
+ // There's latency at the beginning of a stream before recording starts
731
+ // Make sure we actually receive audio data before we start storing chunks
732
+ let sliceIndex = 0;
733
+ if (!this.foundAudio) {
734
+ for (const channel of inputs) {
735
+ sliceIndex = 0; // reset for each channel
736
+ if (this.foundAudio) {
737
+ break;
738
+ }
739
+ if (channel) {
740
+ for (const value of channel) {
741
+ if (value !== 0) {
742
+ // find only one non-zero entry in any channel
743
+ this.foundAudio = true;
744
+ break;
745
+ } else {
746
+ sliceIndex++;
747
+ }
748
+ }
749
+ }
750
+ }
751
+ }
752
+ if (inputs && inputs[0] && this.foundAudio && this.recording) {
753
+ // We need to copy the TypedArray, because the \`process\`
754
+ // internals will reuse the same buffer to hold each input
755
+ const chunk = inputs.map((input) => input.slice(sliceIndex));
756
+ this.chunks.push(chunk);
757
+ this.sendChunk(chunk);
758
+ }
759
+ return true;
760
+ }
761
+ }
762
+
763
+ registerProcessor('audio_processor', AudioProcessor);
764
+ `;
765
+ const $2cf6a2d8a6d031bc$var$script = new Blob([
766
+ $2cf6a2d8a6d031bc$var$AudioProcessorWorklet
767
+ ], {
768
+ type: 'application/javascript'
769
+ });
770
+ const $2cf6a2d8a6d031bc$var$src = URL.createObjectURL($2cf6a2d8a6d031bc$var$script);
771
+ const $2cf6a2d8a6d031bc$export$1f65f50a8cbff43c = $2cf6a2d8a6d031bc$var$src;
772
+
773
+
774
+
775
+
776
+ class $0d471c38435bdab6$export$439b217ca659a877 {
777
+ /**
778
+ * Create a new WavRecorder instance
779
+ * @param {{sampleRate?: number, outputToSpeakers?: boolean, debug?: boolean}} [options]
780
+ * @returns {WavRecorder}
781
+ */ constructor({ sampleRate: sampleRate = 44100, outputToSpeakers: outputToSpeakers = false, debug: debug = false } = {}){
782
+ // Script source
783
+ this.scriptSrc = (0, $2cf6a2d8a6d031bc$export$1f65f50a8cbff43c);
784
+ // Config
785
+ this.sampleRate = sampleRate;
786
+ this.outputToSpeakers = outputToSpeakers;
787
+ this.debug = !!debug;
788
+ this._deviceChangeCallback = null;
789
+ this._devices = [];
790
+ this.deviceSelection = null;
791
+ // State variables
792
+ this.stream = null;
793
+ this.processor = null;
794
+ this.source = null;
795
+ this.node = null;
796
+ this.recording = false;
797
+ // Event handling with AudioWorklet
798
+ this._lastEventId = 0;
799
+ this.eventReceipts = {};
800
+ this.eventTimeout = 5000;
801
+ // Process chunks of audio
802
+ this._chunkProcessor = ()=>{};
803
+ this._chunkProcessorSize = void 0;
804
+ this._chunkProcessorBuffer = {
805
+ raw: new ArrayBuffer(0),
806
+ mono: new ArrayBuffer(0)
807
+ };
808
+ }
809
+ /**
810
+ * Decodes audio data from multiple formats to a Blob, url, Float32Array and AudioBuffer
811
+ * @param {Blob|Float32Array|Int16Array|ArrayBuffer|number[]} audioData
812
+ * @param {number} sampleRate
813
+ * @param {number} fromSampleRate
814
+ * @returns {Promise<DecodedAudioType>}
815
+ */ static async decode(audioData, sampleRate = 44100, fromSampleRate = -1) {
816
+ const context = new AudioContext({
817
+ sampleRate: sampleRate
818
+ });
819
+ let arrayBuffer;
820
+ let blob;
821
+ if (audioData instanceof Blob) {
822
+ if (fromSampleRate !== -1) throw new Error(`Can not specify "fromSampleRate" when reading from Blob`);
823
+ blob = audioData;
824
+ arrayBuffer = await blob.arrayBuffer();
825
+ } else if (audioData instanceof ArrayBuffer) {
826
+ if (fromSampleRate !== -1) throw new Error(`Can not specify "fromSampleRate" when reading from ArrayBuffer`);
827
+ arrayBuffer = audioData;
828
+ blob = new Blob([
829
+ arrayBuffer
830
+ ], {
831
+ type: 'audio/wav'
832
+ });
833
+ } else {
834
+ let float32Array;
835
+ let data;
836
+ if (audioData instanceof Int16Array) {
837
+ data = audioData;
838
+ float32Array = new Float32Array(audioData.length);
839
+ for(let i = 0; i < audioData.length; i++)float32Array[i] = audioData[i] / 0x8000;
840
+ } else if (audioData instanceof Float32Array) float32Array = audioData;
841
+ else if (audioData instanceof Array) float32Array = new Float32Array(audioData);
842
+ else throw new Error(`"audioData" must be one of: Blob, Float32Arrray, Int16Array, ArrayBuffer, Array<number>`);
843
+ if (fromSampleRate === -1) throw new Error(`Must specify "fromSampleRate" when reading from Float32Array, In16Array or Array`);
844
+ else if (fromSampleRate < 3000) throw new Error(`Minimum "fromSampleRate" is 3000 (3kHz)`);
845
+ if (!data) data = (0, $a61750b8fbee4dae$export$13afda237b1c9846).floatTo16BitPCM(float32Array);
846
+ const audio = {
847
+ bitsPerSample: 16,
848
+ channels: [
849
+ float32Array
850
+ ],
851
+ data: data
852
+ };
853
+ const packer = new (0, $a61750b8fbee4dae$export$13afda237b1c9846)();
854
+ const result = packer.pack(fromSampleRate, audio);
855
+ blob = result.blob;
856
+ arrayBuffer = await blob.arrayBuffer();
857
+ }
858
+ const audioBuffer = await context.decodeAudioData(arrayBuffer);
859
+ const values = audioBuffer.getChannelData(0);
860
+ const url = URL.createObjectURL(blob);
861
+ return {
862
+ blob: blob,
863
+ url: url,
864
+ values: values,
865
+ audioBuffer: audioBuffer
866
+ };
867
+ }
868
+ /**
869
+ * Logs data in debug mode
870
+ * @param {...any} arguments
871
+ * @returns {true}
872
+ */ log() {
873
+ if (this.debug) this.log(...arguments);
874
+ return true;
875
+ }
876
+ /**
877
+ * Retrieves the current sampleRate for the recorder
878
+ * @returns {number}
879
+ */ getSampleRate() {
880
+ return this.sampleRate;
881
+ }
882
+ /**
883
+ * Retrieves the current status of the recording
884
+ * @returns {"ended"|"paused"|"recording"}
885
+ */ getStatus() {
886
+ if (!this.processor) return 'ended';
887
+ else if (!this.recording) return 'paused';
888
+ else return 'recording';
889
+ }
890
+ /**
891
+ * Sends an event to the AudioWorklet
892
+ * @private
893
+ * @param {string} name
894
+ * @param {{[key: string]: any}} data
895
+ * @param {AudioWorkletNode} [_processor]
896
+ * @returns {Promise<{[key: string]: any}>}
897
+ */ async _event(name, data = {}, _processor = null) {
898
+ _processor = _processor || this.processor;
899
+ if (!_processor) throw new Error('Can not send events without recording first');
900
+ const message = {
901
+ event: name,
902
+ id: this._lastEventId++,
903
+ data: data
904
+ };
905
+ _processor.port.postMessage(message);
906
+ const t0 = new Date().valueOf();
907
+ while(!this.eventReceipts[message.id]){
908
+ if (new Date().valueOf() - t0 > this.eventTimeout) throw new Error(`Timeout waiting for "${name}" event`);
909
+ await new Promise((res)=>setTimeout(()=>res(true), 1));
910
+ }
911
+ const payload = this.eventReceipts[message.id];
912
+ delete this.eventReceipts[message.id];
913
+ return payload;
914
+ }
915
+ /**
916
+ * Sets device change callback, remove if callback provided is `null`
917
+ * @param {(Array<MediaDeviceInfo & {default: boolean}>): void|null} callback
918
+ * @returns {true}
919
+ */ listenForDeviceChange(callback) {
920
+ if (callback === null && this._deviceChangeCallback) {
921
+ navigator.mediaDevices.removeEventListener('devicechange', this._deviceChangeCallback);
922
+ this._deviceChangeCallback = null;
923
+ } else if (callback !== null) {
924
+ // Basically a debounce; we only want this called once when devices change
925
+ // And we only want the most recent callback() to be executed
926
+ // if a few are operating at the same time
927
+ let lastId = 0;
928
+ let lastDevices = [];
929
+ const serializeDevices = (devices)=>devices.map((d)=>d.deviceId).sort().join(',');
930
+ const cb = async ()=>{
931
+ let id = ++lastId;
932
+ const devices = await this.listDevices();
933
+ if (id === lastId) {
934
+ if (serializeDevices(lastDevices) !== serializeDevices(devices)) {
935
+ lastDevices = devices;
936
+ callback(devices.slice());
937
+ }
938
+ }
939
+ };
940
+ navigator.mediaDevices.addEventListener('devicechange', cb);
941
+ cb();
942
+ this._deviceChangeCallback = cb;
943
+ }
944
+ return true;
945
+ }
946
+ /**
947
+ * Manually request permission to use the microphone
948
+ * @returns {Promise<true>}
949
+ */ async requestPermission() {
950
+ const permissionStatus = await navigator.permissions.query({
951
+ name: 'microphone'
952
+ });
953
+ if (permissionStatus.state === 'denied') window.alert('You must grant microphone access to use this feature.');
954
+ else if (permissionStatus.state === 'prompt') try {
955
+ const stream = await navigator.mediaDevices.getUserMedia({
956
+ audio: true
957
+ });
958
+ const tracks = stream.getTracks();
959
+ tracks.forEach((track)=>track.stop());
960
+ } catch (e) {
961
+ window.alert('You must grant microphone access to use this feature.');
962
+ }
963
+ return true;
964
+ }
965
+ /**
966
+ * List all eligible devices for recording, will request permission to use microphone
967
+ * @returns {Promise<Array<MediaDeviceInfo & {default: boolean}>>}
968
+ */ async listDevices() {
969
+ if (!navigator.mediaDevices || !('enumerateDevices' in navigator.mediaDevices)) throw new Error('Could not request user devices');
970
+ await this.requestPermission();
971
+ const devices = await navigator.mediaDevices.enumerateDevices();
972
+ const audioDevices = devices.filter((device)=>device.kind === 'audioinput');
973
+ return audioDevices;
974
+ // const defaultDeviceIndex = audioDevices.findIndex(
975
+ // (device) => device.deviceId === 'default'
976
+ // );
977
+ // const deviceList = [];
978
+ // if (defaultDeviceIndex !== -1) {
979
+ // let defaultDevice = audioDevices.splice(defaultDeviceIndex, 1)[0];
980
+ // let existingIndex = audioDevices.findIndex(
981
+ // (device) => device.groupId === defaultDevice.groupId
982
+ // );
983
+ // if (existingIndex !== -1) {
984
+ // defaultDevice = audioDevices.splice(existingIndex, 1)[0];
985
+ // }
986
+ // defaultDevice.default = true;
987
+ // deviceList.push(defaultDevice);
988
+ // }
989
+ // return deviceList.concat(audioDevices);
990
+ }
991
+ /**
992
+ * Begins a recording session and requests microphone permissions if not already granted
993
+ * Microphone recording indicator will appear on browser tab but status will be "paused"
994
+ * @param {string} [deviceId] if no device provided, default device will be used
995
+ * @returns {Promise<true>}
996
+ */ async begin(deviceId) {
997
+ if (this.processor) throw new Error(`Already connected: please call .end() to start a new session`);
998
+ if (!navigator.mediaDevices || !('getUserMedia' in navigator.mediaDevices)) throw new Error('Could not request user media');
999
+ deviceId = deviceId ?? this.deviceSelection?.deviceId;
1000
+ try {
1001
+ const config = {
1002
+ audio: true
1003
+ };
1004
+ if (deviceId) config.audio = {
1005
+ deviceId: {
1006
+ exact: deviceId
1007
+ }
1008
+ };
1009
+ this.stream = await navigator.mediaDevices.getUserMedia(config);
1010
+ } catch (err) {
1011
+ throw new Error('Could not start media stream');
1012
+ }
1013
+ this.listDevices().then((devices)=>{
1014
+ deviceId = this.stream.getAudioTracks()[0].getSettings().deviceId;
1015
+ console.log('find current device', devices, deviceId, this.stream.getAudioTracks()[0].getSettings());
1016
+ this.deviceSelection = devices.find((d)=>d.deviceId === deviceId);
1017
+ console.log('current device', this.deviceSelection);
1018
+ });
1019
+ const context = new AudioContext({
1020
+ sampleRate: this.sampleRate
1021
+ });
1022
+ const source = context.createMediaStreamSource(this.stream);
1023
+ // Load and execute the module script.
1024
+ try {
1025
+ await context.audioWorklet.addModule(this.scriptSrc);
1026
+ } catch (e) {
1027
+ console.error(e);
1028
+ throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
1029
+ }
1030
+ const processor = new AudioWorkletNode(context, 'audio_processor');
1031
+ processor.port.onmessage = (e)=>{
1032
+ const { event: event, id: id, data: data } = e.data;
1033
+ if (event === 'receipt') this.eventReceipts[id] = data;
1034
+ else if (event === 'chunk') {
1035
+ if (this._chunkProcessorSize) {
1036
+ const buffer = this._chunkProcessorBuffer;
1037
+ this._chunkProcessorBuffer = {
1038
+ raw: (0, $a61750b8fbee4dae$export$13afda237b1c9846).mergeBuffers(buffer.raw, data.raw),
1039
+ mono: (0, $a61750b8fbee4dae$export$13afda237b1c9846).mergeBuffers(buffer.mono, data.mono)
1040
+ };
1041
+ if (this._chunkProcessorBuffer.mono.byteLength >= this._chunkProcessorSize) {
1042
+ this._chunkProcessor(this._chunkProcessorBuffer);
1043
+ this._chunkProcessorBuffer = {
1044
+ raw: new ArrayBuffer(0),
1045
+ mono: new ArrayBuffer(0)
1046
+ };
1047
+ }
1048
+ } else this._chunkProcessor(data);
1049
+ }
1050
+ };
1051
+ const node = source.connect(processor);
1052
+ const analyser = context.createAnalyser();
1053
+ analyser.fftSize = 8192;
1054
+ analyser.smoothingTimeConstant = 0.1;
1055
+ node.connect(analyser);
1056
+ if (this.outputToSpeakers) {
1057
+ // eslint-disable-next-line no-console
1058
+ console.warn("Warning: Output to speakers may affect sound quality,\nespecially due to system audio feedback preventative measures.\nuse only for debugging");
1059
+ analyser.connect(context.destination);
1060
+ }
1061
+ this.source = source;
1062
+ this.node = node;
1063
+ this.analyser = analyser;
1064
+ this.processor = processor;
1065
+ console.log('begin completed');
1066
+ return true;
1067
+ }
1068
+ /**
1069
+ * Gets the current frequency domain data from the recording track
1070
+ * @param {"frequency"|"music"|"voice"} [analysisType]
1071
+ * @param {number} [minDecibels] default -100
1072
+ * @param {number} [maxDecibels] default -30
1073
+ * @returns {import('./analysis/audio_analysis.js').AudioAnalysisOutputType}
1074
+ */ getFrequencies(analysisType = 'frequency', minDecibels = -100, maxDecibels = -30) {
1075
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1076
+ return (0, $5853841ab58516d4$export$2c3136da0bf130f9).getFrequencies(this.analyser, this.sampleRate, null, analysisType, minDecibels, maxDecibels);
1077
+ }
1078
+ /**
1079
+ * Pauses the recording
1080
+ * Keeps microphone stream open but halts storage of audio
1081
+ * @returns {Promise<true>}
1082
+ */ async pause() {
1083
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1084
+ else if (!this.recording) throw new Error('Already paused: please call .record() first');
1085
+ if (this._chunkProcessorBuffer.raw.byteLength) this._chunkProcessor(this._chunkProcessorBuffer);
1086
+ this.log('Pausing ...');
1087
+ await this._event('stop');
1088
+ this.recording = false;
1089
+ return true;
1090
+ }
1091
+ /**
1092
+ * Start recording stream and storing to memory from the connected audio source
1093
+ * @param {(data: { mono: Int16Array; raw: Int16Array }) => any} [chunkProcessor]
1094
+ * @param {number} [chunkSize] chunkProcessor will not be triggered until this size threshold met in mono audio
1095
+ * @returns {Promise<true>}
1096
+ */ async record(chunkProcessor = ()=>{}, chunkSize = 8192) {
1097
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1098
+ else if (this.recording) throw new Error('Already recording: please call .pause() first');
1099
+ else if (typeof chunkProcessor !== 'function') throw new Error(`chunkProcessor must be a function`);
1100
+ this._chunkProcessor = chunkProcessor;
1101
+ this._chunkProcessorSize = chunkSize;
1102
+ this._chunkProcessorBuffer = {
1103
+ raw: new ArrayBuffer(0),
1104
+ mono: new ArrayBuffer(0)
1105
+ };
1106
+ this.log('Recording ...');
1107
+ await this._event('start');
1108
+ this.recording = true;
1109
+ return true;
1110
+ }
1111
+ /**
1112
+ * Clears the audio buffer, empties stored recording
1113
+ * @returns {Promise<true>}
1114
+ */ async clear() {
1115
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1116
+ await this._event('clear');
1117
+ return true;
1118
+ }
1119
+ /**
1120
+ * Reads the current audio stream data
1121
+ * @returns {Promise<{meanValues: Float32Array, channels: Array<Float32Array>}>}
1122
+ */ async read() {
1123
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1124
+ this.log('Reading ...');
1125
+ const result = await this._event('read');
1126
+ return result;
1127
+ }
1128
+ /**
1129
+ * Saves the current audio stream to a file
1130
+ * @param {boolean} [force] Force saving while still recording
1131
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1132
+ */ async save(force = false) {
1133
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1134
+ if (!force && this.recording) throw new Error('Currently recording: please call .pause() first, or call .save(true) to force');
1135
+ this.log('Exporting ...');
1136
+ const exportData = await this._event('export');
1137
+ const packer = new (0, $a61750b8fbee4dae$export$13afda237b1c9846)();
1138
+ const result = packer.pack(this.sampleRate, exportData.audio);
1139
+ return result;
1140
+ }
1141
+ /**
1142
+ * Ends the current recording session and saves the result
1143
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1144
+ */ async end() {
1145
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1146
+ const _processor = this.processor;
1147
+ this.log('Stopping ...');
1148
+ await this._event('stop');
1149
+ this.recording = false;
1150
+ const tracks = this.stream.getTracks();
1151
+ tracks.forEach((track)=>track.stop());
1152
+ this.log('Exporting ...');
1153
+ const exportData = await this._event('export', {}, _processor);
1154
+ this.processor.disconnect();
1155
+ this.source.disconnect();
1156
+ this.node.disconnect();
1157
+ this.analyser.disconnect();
1158
+ this.stream = null;
1159
+ this.processor = null;
1160
+ this.source = null;
1161
+ this.node = null;
1162
+ const packer = new (0, $a61750b8fbee4dae$export$13afda237b1c9846)();
1163
+ const result = packer.pack(this.sampleRate, exportData.audio);
1164
+ return result;
1165
+ }
1166
+ /**
1167
+ * Performs a full cleanup of WavRecorder instance
1168
+ * Stops actively listening via microphone and removes existing listeners
1169
+ * @returns {Promise<true>}
1170
+ */ async quit() {
1171
+ this.listenForDeviceChange(null);
1172
+ // we do not reset this on end so that selections persist across starts
1173
+ this.deviceSelection = null;
1174
+ if (this.processor) await this.end();
1175
+ return true;
1176
+ }
1177
+ }
1178
+ globalThis.WavRecorder = $0d471c38435bdab6$export$439b217ca659a877;
1179
+
1180
+
1181
+
1182
+
1183
+
1184
+ class $7cef7a69bdf8f84d$export$2934cf2d25c67a48 {
1185
+ /**
1186
+ * Create a new MediaStreamRecorder instance
1187
+ * @param {{sampleRate?: number, outputToSpeakers?: boolean, debug?: boolean}} [options]
1188
+ * @returns {MediaStreamRecorder}
1189
+ */ constructor({ sampleRate: sampleRate = 44100, outputToSpeakers: outputToSpeakers = false, debug: debug = false } = {}){
1190
+ // Script source
1191
+ this.scriptSrc = (0, $2cf6a2d8a6d031bc$export$1f65f50a8cbff43c);
1192
+ // Config
1193
+ this.sampleRate = sampleRate;
1194
+ this.outputToSpeakers = outputToSpeakers;
1195
+ this.debug = !!debug;
1196
+ // State variables
1197
+ this.stream = null;
1198
+ this.processor = null;
1199
+ this.source = null;
1200
+ this.node = null;
1201
+ this.recording = false;
1202
+ // Event handling with AudioWorklet
1203
+ this._lastEventId = 0;
1204
+ this.eventReceipts = {};
1205
+ this.eventTimeout = 5000;
1206
+ // Process chunks of audio
1207
+ this._chunkProcessor = ()=>{};
1208
+ this._chunkProcessorSize = void 0;
1209
+ this._chunkProcessorBuffer = {
1210
+ raw: new ArrayBuffer(0),
1211
+ mono: new ArrayBuffer(0)
1212
+ };
1213
+ }
1214
+ /**
1215
+ * Logs data in debug mode
1216
+ * @param {...any} arguments
1217
+ * @returns {true}
1218
+ */ log() {
1219
+ if (this.debug) this.log(...arguments);
1220
+ return true;
1221
+ }
1222
+ /**
1223
+ * Retrieves the current sampleRate for the recorder
1224
+ * @returns {number}
1225
+ */ getSampleRate() {
1226
+ return this.sampleRate;
1227
+ }
1228
+ /**
1229
+ * Retrieves the current status of the recording
1230
+ * @returns {"ended"|"paused"|"recording"}
1231
+ */ getStatus() {
1232
+ if (!this.processor) return "ended";
1233
+ else if (!this.recording) return "paused";
1234
+ else return "recording";
1235
+ }
1236
+ /**
1237
+ * Sends an event to the AudioWorklet
1238
+ * @private
1239
+ * @param {string} name
1240
+ * @param {{[key: string]: any}} data
1241
+ * @param {AudioWorkletNode} [_processor]
1242
+ * @returns {Promise<{[key: string]: any}>}
1243
+ */ async _event(name, data = {}, _processor = null) {
1244
+ _processor = _processor || this.processor;
1245
+ if (!_processor) throw new Error("Can not send events without recording first");
1246
+ const message = {
1247
+ event: name,
1248
+ id: this._lastEventId++,
1249
+ data: data
1250
+ };
1251
+ _processor.port.postMessage(message);
1252
+ const t0 = new Date().valueOf();
1253
+ while(!this.eventReceipts[message.id]){
1254
+ if (new Date().valueOf() - t0 > this.eventTimeout) throw new Error(`Timeout waiting for "${name}" event`);
1255
+ await new Promise((res)=>setTimeout(()=>res(true), 1));
1256
+ }
1257
+ const payload = this.eventReceipts[message.id];
1258
+ delete this.eventReceipts[message.id];
1259
+ return payload;
1260
+ }
1261
+ /**
1262
+ * Begins a recording session for the given audioTrack
1263
+ * Microphone recording indicator will appear on browser tab but status will be "paused"
1264
+ * @param {MediaStreamTrack} [audioTrack] if no device provided, default device will be used
1265
+ * @returns {Promise<true>}
1266
+ */ async begin(audioTrack) {
1267
+ if (this.processor) throw new Error(`Already connected: please call .end() to start a new session`);
1268
+ if (!audioTrack || audioTrack.kind !== "audio") throw new Error("No audio track provided");
1269
+ this.stream = new MediaStream([
1270
+ audioTrack
1271
+ ]);
1272
+ const context = new AudioContext({
1273
+ sampleRate: this.sampleRate
1274
+ });
1275
+ const source = context.createMediaStreamSource(this.stream);
1276
+ // Load and execute the module script.
1277
+ try {
1278
+ await context.audioWorklet.addModule(this.scriptSrc);
1279
+ } catch (e) {
1280
+ console.error(e);
1281
+ throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
1282
+ }
1283
+ const processor = new AudioWorkletNode(context, "audio_processor");
1284
+ processor.port.onmessage = (e)=>{
1285
+ const { event: event, id: id, data: data } = e.data;
1286
+ if (event === "receipt") this.eventReceipts[id] = data;
1287
+ else if (event === "chunk") {
1288
+ if (this._chunkProcessorSize) {
1289
+ const buffer = this._chunkProcessorBuffer;
1290
+ this._chunkProcessorBuffer = {
1291
+ raw: (0, $a61750b8fbee4dae$export$13afda237b1c9846).mergeBuffers(buffer.raw, data.raw),
1292
+ mono: (0, $a61750b8fbee4dae$export$13afda237b1c9846).mergeBuffers(buffer.mono, data.mono)
1293
+ };
1294
+ if (this._chunkProcessorBuffer.mono.byteLength >= this._chunkProcessorSize) {
1295
+ this._chunkProcessor(this._chunkProcessorBuffer);
1296
+ this._chunkProcessorBuffer = {
1297
+ raw: new ArrayBuffer(0),
1298
+ mono: new ArrayBuffer(0)
1299
+ };
1300
+ }
1301
+ } else this._chunkProcessor(data);
1302
+ }
1303
+ };
1304
+ const node = source.connect(processor);
1305
+ const analyser = context.createAnalyser();
1306
+ analyser.fftSize = 8192;
1307
+ analyser.smoothingTimeConstant = 0.1;
1308
+ node.connect(analyser);
1309
+ if (this.outputToSpeakers) {
1310
+ // eslint-disable-next-line no-console
1311
+ console.warn("Warning: Output to speakers may affect sound quality,\nespecially due to system audio feedback preventative measures.\nuse only for debugging");
1312
+ analyser.connect(context.destination);
1313
+ }
1314
+ this.source = source;
1315
+ this.node = node;
1316
+ this.analyser = analyser;
1317
+ this.processor = processor;
1318
+ return true;
1319
+ }
1320
+ /**
1321
+ * Gets the current frequency domain data from the recording track
1322
+ * @param {"frequency"|"music"|"voice"} [analysisType]
1323
+ * @param {number} [minDecibels] default -100
1324
+ * @param {number} [maxDecibels] default -30
1325
+ * @returns {import('./analysis/audio_analysis.js').AudioAnalysisOutputType}
1326
+ */ getFrequencies(analysisType = "frequency", minDecibels = -100, maxDecibels = -30) {
1327
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1328
+ return (0, $5853841ab58516d4$export$2c3136da0bf130f9).getFrequencies(this.analyser, this.sampleRate, null, analysisType, minDecibels, maxDecibels);
1329
+ }
1330
+ /**
1331
+ * Pauses the recording
1332
+ * Keeps microphone stream open but halts storage of audio
1333
+ * @returns {Promise<true>}
1334
+ */ async pause() {
1335
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1336
+ else if (!this.recording) throw new Error("Already paused: please call .record() first");
1337
+ if (this._chunkProcessorBuffer.raw.byteLength) this._chunkProcessor(this._chunkProcessorBuffer);
1338
+ this.log("Pausing ...");
1339
+ await this._event("stop");
1340
+ this.recording = false;
1341
+ return true;
1342
+ }
1343
+ /**
1344
+ * Start recording stream and storing to memory from the connected audio source
1345
+ * @param {(data: { mono: Int16Array; raw: Int16Array }) => any} [chunkProcessor]
1346
+ * @param {number} [chunkSize] chunkProcessor will not be triggered until this size threshold met in mono audio
1347
+ * @returns {Promise<true>}
1348
+ */ async record(chunkProcessor = ()=>{}, chunkSize = 8192) {
1349
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1350
+ else if (this.recording) throw new Error("Already recording: HELLO please call .pause() first");
1351
+ else if (typeof chunkProcessor !== "function") throw new Error(`chunkProcessor must be a function`);
1352
+ this._chunkProcessor = chunkProcessor;
1353
+ this._chunkProcessorSize = chunkSize;
1354
+ this._chunkProcessorBuffer = {
1355
+ raw: new ArrayBuffer(0),
1356
+ mono: new ArrayBuffer(0)
1357
+ };
1358
+ this.log("Recording ...");
1359
+ await this._event("start");
1360
+ this.recording = true;
1361
+ return true;
1362
+ }
1363
+ /**
1364
+ * Clears the audio buffer, empties stored recording
1365
+ * @returns {Promise<true>}
1366
+ */ async clear() {
1367
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1368
+ await this._event("clear");
1369
+ return true;
1370
+ }
1371
+ /**
1372
+ * Reads the current audio stream data
1373
+ * @returns {Promise<{meanValues: Float32Array, channels: Array<Float32Array>}>}
1374
+ */ async read() {
1375
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1376
+ this.log("Reading ...");
1377
+ const result = await this._event("read");
1378
+ return result;
1379
+ }
1380
+ /**
1381
+ * Saves the current audio stream to a file
1382
+ * @param {boolean} [force] Force saving while still recording
1383
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1384
+ */ async save(force = false) {
1385
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1386
+ if (!force && this.recording) throw new Error("Currently recording: please call .pause() first, or call .save(true) to force");
1387
+ this.log("Exporting ...");
1388
+ const exportData = await this._event("export");
1389
+ const packer = new (0, $a61750b8fbee4dae$export$13afda237b1c9846)();
1390
+ const result = packer.pack(this.sampleRate, exportData.audio);
1391
+ return result;
1392
+ }
1393
+ /**
1394
+ * Ends the current recording session and saves the result
1395
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1396
+ */ async end() {
1397
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1398
+ const _processor = this.processor;
1399
+ this.log("Stopping ...");
1400
+ await this._event("stop");
1401
+ this.recording = false;
1402
+ this.log("Exporting ...");
1403
+ const exportData = await this._event("export", {}, _processor);
1404
+ this.processor.disconnect();
1405
+ this.source.disconnect();
1406
+ this.node.disconnect();
1407
+ this.analyser.disconnect();
1408
+ this.stream = null;
1409
+ this.processor = null;
1410
+ this.source = null;
1411
+ this.node = null;
1412
+ const packer = new (0, $a61750b8fbee4dae$export$13afda237b1c9846)();
1413
+ const result = packer.pack(this.sampleRate, exportData.audio);
1414
+ return result;
1415
+ }
1416
+ /**
1417
+ * Performs a full cleanup of WavRecorder instance
1418
+ * Stops actively listening via microphone and removes existing listeners
1419
+ * @returns {Promise<true>}
1420
+ */ async quit() {
1421
+ this.listenForDeviceChange(null);
1422
+ if (this.processor) await this.end();
1423
+ return true;
1424
+ }
1425
+ }
1426
+ globalThis.WavRecorder = WavRecorder;
1427
+
1428
+
1429
+
1430
+
1431
+ var $23859655abfc5f5c$var$__extends = undefined && undefined.__extends || function() {
1432
+ var extendStatics = function(d, b) {
1433
+ extendStatics = Object.setPrototypeOf || ({
1434
+ __proto__: []
1435
+ }) instanceof Array && function(d, b) {
1436
+ d.__proto__ = b;
1437
+ } || function(d, b) {
1438
+ for(var p in b)if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
1439
+ };
1440
+ return extendStatics(d, b);
1441
+ };
1442
+ return function(d, b) {
1443
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
1444
+ extendStatics(d, b);
1445
+ function __() {
1446
+ this.constructor = d;
1447
+ }
1448
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
1449
+ };
1450
+ }();
1451
+ var $23859655abfc5f5c$var$__awaiter = undefined && undefined.__awaiter || function(thisArg, _arguments, P, generator) {
1452
+ function adopt(value) {
1453
+ return value instanceof P ? value : new P(function(resolve) {
1454
+ resolve(value);
1455
+ });
1456
+ }
1457
+ return new (P || (P = Promise))(function(resolve, reject) {
1458
+ function fulfilled(value) {
1459
+ try {
1460
+ step(generator.next(value));
1461
+ } catch (e) {
1462
+ reject(e);
1463
+ }
1464
+ }
1465
+ function rejected(value) {
1466
+ try {
1467
+ step(generator["throw"](value));
1468
+ } catch (e) {
1469
+ reject(e);
1470
+ }
1471
+ }
1472
+ function step(result) {
1473
+ result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
1474
+ }
1475
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
1476
+ });
1477
+ };
1478
+ var $23859655abfc5f5c$var$__generator = undefined && undefined.__generator || function(thisArg, body) {
1479
+ var _ = {
1480
+ label: 0,
1481
+ sent: function() {
1482
+ if (t[0] & 1) throw t[1];
1483
+ return t[1];
1484
+ },
1485
+ trys: [],
1486
+ ops: []
1487
+ }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
1488
+ return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() {
1489
+ return this;
1490
+ }), g;
1491
+ function verb(n) {
1492
+ return function(v) {
1493
+ return step([
1494
+ n,
1495
+ v
1496
+ ]);
1497
+ };
1498
+ }
1499
+ function step(op) {
1500
+ if (f) throw new TypeError("Generator is already executing.");
1501
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
1502
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
1503
+ if (y = 0, t) op = [
1504
+ op[0] & 2,
1505
+ t.value
1506
+ ];
1507
+ switch(op[0]){
1508
+ case 0:
1509
+ case 1:
1510
+ t = op;
1511
+ break;
1512
+ case 4:
1513
+ _.label++;
1514
+ return {
1515
+ value: op[1],
1516
+ done: false
1517
+ };
1518
+ case 5:
1519
+ _.label++;
1520
+ y = op[1];
1521
+ op = [
1522
+ 0
1523
+ ];
1524
+ continue;
1525
+ case 7:
1526
+ op = _.ops.pop();
1527
+ _.trys.pop();
1528
+ continue;
1529
+ default:
1530
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
1531
+ _ = 0;
1532
+ continue;
1533
+ }
1534
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
1535
+ _.label = op[1];
1536
+ break;
1537
+ }
1538
+ if (op[0] === 6 && _.label < t[1]) {
1539
+ _.label = t[1];
1540
+ t = op;
1541
+ break;
1542
+ }
1543
+ if (t && _.label < t[2]) {
1544
+ _.label = t[2];
1545
+ _.ops.push(op);
1546
+ break;
1547
+ }
1548
+ if (t[2]) _.ops.pop();
1549
+ _.trys.pop();
1550
+ continue;
1551
+ }
1552
+ op = body.call(thisArg, _);
1553
+ } catch (e) {
1554
+ op = [
1555
+ 6,
1556
+ e
1557
+ ];
1558
+ y = 0;
1559
+ } finally{
1560
+ f = t = 0;
1561
+ }
1562
+ if (op[0] & 5) throw op[1];
1563
+ return {
1564
+ value: op[0] ? op[1] : void 0,
1565
+ done: true
1566
+ };
1567
+ }
1568
+ };
1569
+ var $23859655abfc5f5c$export$4a0c46dbbe2ddb67 = /** @class */ function() {
1570
+ function MediaManager() {
1571
+ this._callbacks = {};
1572
+ this._micEnabled = true;
1573
+ this._camEnabled = false;
1574
+ }
1575
+ MediaManager.prototype.setUserAudioCallback = function(userAudioCallback) {
1576
+ this._userAudioCallback = userAudioCallback;
1577
+ };
1578
+ MediaManager.prototype.setRTVIOptions = function(options, override) {
1579
+ var _a, _b, _c;
1580
+ if (override === void 0) override = false;
1581
+ if (this._options && !override) return;
1582
+ this._options = options;
1583
+ this._callbacks = (_a = options.callbacks) !== null && _a !== void 0 ? _a : {};
1584
+ this._micEnabled = (_b = options.enableMic) !== null && _b !== void 0 ? _b : true;
1585
+ this._camEnabled = (_c = options.enableCam) !== null && _c !== void 0 ? _c : false;
1586
+ };
1587
+ return MediaManager;
1588
+ }();
1589
+ var $23859655abfc5f5c$export$45c5b9bfba2f6304 = /** @class */ function(_super) {
1590
+ $23859655abfc5f5c$var$__extends(WavMediaManager, _super);
1591
+ function WavMediaManager(recorderChunkSize, recorderSampleRate) {
1592
+ if (recorderChunkSize === void 0) recorderChunkSize = undefined;
1593
+ if (recorderSampleRate === void 0) recorderSampleRate = 24000;
1594
+ var _this = _super.call(this) || this;
1595
+ _this._initialized = false;
1596
+ _this._recorderChunkSize = undefined;
1597
+ _this._recorderChunkSize = recorderChunkSize;
1598
+ _this._wavRecorder = new (0, $0d471c38435bdab6$export$439b217ca659a877)({
1599
+ sampleRate: recorderSampleRate
1600
+ });
1601
+ _this._wavStreamPlayer = new (0, $1e7ce9484a3c4077$export$9698d62c78b8f366)({
1602
+ sampleRate: 24000
1603
+ });
1604
+ return _this;
1605
+ }
1606
+ WavMediaManager.prototype.initialize = function() {
1607
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, Promise, function() {
1608
+ return $23859655abfc5f5c$var$__generator(this, function(_a) {
1609
+ switch(_a.label){
1610
+ case 0:
1611
+ return [
1612
+ 4 /*yield*/ ,
1613
+ this._wavRecorder.begin()
1614
+ ];
1615
+ case 1:
1616
+ _a.sent();
1617
+ this._wavRecorder.listenForDeviceChange(null);
1618
+ this._wavRecorder.listenForDeviceChange(this._handleAvailableDevicesUpdated.bind(this));
1619
+ return [
1620
+ 4 /*yield*/ ,
1621
+ this._wavStreamPlayer.connect()
1622
+ ];
1623
+ case 2:
1624
+ _a.sent();
1625
+ this._initialized = true;
1626
+ return [
1627
+ 2 /*return*/
1628
+ ];
1629
+ }
1630
+ });
1631
+ });
1632
+ };
1633
+ WavMediaManager.prototype.connect = function() {
1634
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, Promise, function() {
1635
+ var isAlreadyRecording;
1636
+ return $23859655abfc5f5c$var$__generator(this, function(_a) {
1637
+ switch(_a.label){
1638
+ case 0:
1639
+ if (!!this._initialized) return [
1640
+ 3 /*break*/ ,
1641
+ 2
1642
+ ];
1643
+ return [
1644
+ 4 /*yield*/ ,
1645
+ this.initialize()
1646
+ ];
1647
+ case 1:
1648
+ _a.sent();
1649
+ _a.label = 2;
1650
+ case 2:
1651
+ isAlreadyRecording = this._wavRecorder.getStatus() == "recording";
1652
+ if (!(this._micEnabled && !isAlreadyRecording)) return [
1653
+ 3 /*break*/ ,
1654
+ 4
1655
+ ];
1656
+ return [
1657
+ 4 /*yield*/ ,
1658
+ this._startRecording()
1659
+ ];
1660
+ case 3:
1661
+ _a.sent();
1662
+ _a.label = 4;
1663
+ case 4:
1664
+ return [
1665
+ 2 /*return*/
1666
+ ];
1667
+ }
1668
+ });
1669
+ });
1670
+ };
1671
+ WavMediaManager.prototype.disconnect = function() {
1672
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, Promise, function() {
1673
+ return $23859655abfc5f5c$var$__generator(this, function(_a) {
1674
+ switch(_a.label){
1675
+ case 0:
1676
+ if (!this._initialized) return [
1677
+ 2 /*return*/
1678
+ ];
1679
+ return [
1680
+ 4 /*yield*/ ,
1681
+ this._wavRecorder.end()
1682
+ ];
1683
+ case 1:
1684
+ _a.sent();
1685
+ return [
1686
+ 4 /*yield*/ ,
1687
+ this._wavStreamPlayer.interrupt()
1688
+ ];
1689
+ case 2:
1690
+ _a.sent();
1691
+ this._initialized = false;
1692
+ return [
1693
+ 2 /*return*/
1694
+ ];
1695
+ }
1696
+ });
1697
+ });
1698
+ };
1699
+ WavMediaManager.prototype.userStartedSpeaking = function() {
1700
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, Promise, function() {
1701
+ return $23859655abfc5f5c$var$__generator(this, function(_a) {
1702
+ return [
1703
+ 2 /*return*/ ,
1704
+ this._wavStreamPlayer.interrupt()
1705
+ ];
1706
+ });
1707
+ });
1708
+ };
1709
+ WavMediaManager.prototype.bufferBotAudio = function(data, id) {
1710
+ return this._wavStreamPlayer.add16BitPCM(data, id);
1711
+ };
1712
+ WavMediaManager.prototype.getAllMics = function() {
1713
+ return this._wavRecorder.listDevices();
1714
+ };
1715
+ WavMediaManager.prototype.getAllCams = function() {
1716
+ // TODO: Video not supported yet
1717
+ return Promise.resolve([]);
1718
+ };
1719
+ WavMediaManager.prototype.getAllSpeakers = function() {
1720
+ // TODO: Implement speaker support
1721
+ return Promise.resolve([]);
1722
+ };
1723
+ WavMediaManager.prototype.updateMic = function(micId) {
1724
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, Promise, function() {
1725
+ var prevMic, curMic;
1726
+ var _a, _b;
1727
+ return $23859655abfc5f5c$var$__generator(this, function(_c) {
1728
+ switch(_c.label){
1729
+ case 0:
1730
+ prevMic = this._wavRecorder.deviceSelection;
1731
+ return [
1732
+ 4 /*yield*/ ,
1733
+ this._wavRecorder.end()
1734
+ ];
1735
+ case 1:
1736
+ _c.sent();
1737
+ return [
1738
+ 4 /*yield*/ ,
1739
+ this._wavRecorder.begin(micId)
1740
+ ];
1741
+ case 2:
1742
+ _c.sent();
1743
+ if (!this._micEnabled) return [
1744
+ 3 /*break*/ ,
1745
+ 4
1746
+ ];
1747
+ return [
1748
+ 4 /*yield*/ ,
1749
+ this._startRecording()
1750
+ ];
1751
+ case 3:
1752
+ _c.sent();
1753
+ _c.label = 4;
1754
+ case 4:
1755
+ curMic = this._wavRecorder.deviceSelection;
1756
+ if (curMic && prevMic && prevMic.label !== curMic.label) (_b = (_a = this._callbacks).onMicUpdated) === null || _b === void 0 || _b.call(_a, curMic);
1757
+ return [
1758
+ 2 /*return*/
1759
+ ];
1760
+ }
1761
+ });
1762
+ });
1763
+ };
1764
+ WavMediaManager.prototype.updateCam = function(camId) {
1765
+ // TODO: Video not supported yet
1766
+ };
1767
+ WavMediaManager.prototype.updateSpeaker = function(speakerId) {
1768
+ // TODO: Implement speaker support
1769
+ };
1770
+ Object.defineProperty(WavMediaManager.prototype, "selectedMic", {
1771
+ get: function() {
1772
+ var _a;
1773
+ return (_a = this._wavRecorder.deviceSelection) !== null && _a !== void 0 ? _a : {};
1774
+ },
1775
+ enumerable: false,
1776
+ configurable: true
1777
+ });
1778
+ Object.defineProperty(WavMediaManager.prototype, "selectedCam", {
1779
+ get: function() {
1780
+ // TODO: Video not supported yet
1781
+ return {};
1782
+ },
1783
+ enumerable: false,
1784
+ configurable: true
1785
+ });
1786
+ Object.defineProperty(WavMediaManager.prototype, "selectedSpeaker", {
1787
+ get: function() {
1788
+ // TODO: Implement speaker support
1789
+ return {};
1790
+ },
1791
+ enumerable: false,
1792
+ configurable: true
1793
+ });
1794
+ WavMediaManager.prototype.enableMic = function(enable) {
1795
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, Promise, function() {
1796
+ var _this = this;
1797
+ return $23859655abfc5f5c$var$__generator(this, function(_a) {
1798
+ switch(_a.label){
1799
+ case 0:
1800
+ this._micEnabled = enable;
1801
+ if (!this._wavRecorder.stream) return [
1802
+ 2 /*return*/
1803
+ ];
1804
+ this._wavRecorder.stream.getAudioTracks().forEach(function(track) {
1805
+ var _a, _b;
1806
+ track.enabled = enable;
1807
+ if (!enable) (_b = (_a = _this._callbacks).onTrackStopped) === null || _b === void 0 || _b.call(_a, track, $23859655abfc5f5c$var$localParticipant());
1808
+ });
1809
+ if (!enable) return [
1810
+ 3 /*break*/ ,
1811
+ 2
1812
+ ];
1813
+ return [
1814
+ 4 /*yield*/ ,
1815
+ this._startRecording()
1816
+ ];
1817
+ case 1:
1818
+ _a.sent();
1819
+ return [
1820
+ 3 /*break*/ ,
1821
+ 4
1822
+ ];
1823
+ case 2:
1824
+ return [
1825
+ 4 /*yield*/ ,
1826
+ this._wavRecorder.pause()
1827
+ ];
1828
+ case 3:
1829
+ _a.sent();
1830
+ _a.label = 4;
1831
+ case 4:
1832
+ return [
1833
+ 2 /*return*/
1834
+ ];
1835
+ }
1836
+ });
1837
+ });
1838
+ };
1839
+ WavMediaManager.prototype.enableCam = function(enable) {
1840
+ // TODO: Video not supported yet
1841
+ };
1842
+ Object.defineProperty(WavMediaManager.prototype, "isCamEnabled", {
1843
+ get: function() {
1844
+ // TODO: Video not supported yet
1845
+ return false;
1846
+ },
1847
+ enumerable: false,
1848
+ configurable: true
1849
+ });
1850
+ Object.defineProperty(WavMediaManager.prototype, "isMicEnabled", {
1851
+ get: function() {
1852
+ return this._micEnabled;
1853
+ },
1854
+ enumerable: false,
1855
+ configurable: true
1856
+ });
1857
+ WavMediaManager.prototype.tracks = function() {
1858
+ var _a;
1859
+ var tracks = (_a = this._wavRecorder.stream) === null || _a === void 0 ? void 0 : _a.getTracks()[0];
1860
+ return {
1861
+ local: tracks ? {
1862
+ audio: tracks
1863
+ } : {}
1864
+ };
1865
+ };
1866
+ WavMediaManager.prototype._startRecording = function() {
1867
+ return $23859655abfc5f5c$var$__awaiter(this, void 0, void 0, function() {
1868
+ var track;
1869
+ var _this = this;
1870
+ var _a, _b, _c;
1871
+ return $23859655abfc5f5c$var$__generator(this, function(_d) {
1872
+ switch(_d.label){
1873
+ case 0:
1874
+ return [
1875
+ 4 /*yield*/ ,
1876
+ this._wavRecorder.record(function(data) {
1877
+ _this._userAudioCallback(data.mono);
1878
+ }, this._recorderChunkSize)
1879
+ ];
1880
+ case 1:
1881
+ _d.sent();
1882
+ track = (_a = this._wavRecorder.stream) === null || _a === void 0 ? void 0 : _a.getAudioTracks()[0];
1883
+ if (track) (_c = (_b = this._callbacks).onTrackStarted) === null || _c === void 0 || _c.call(_b, track, $23859655abfc5f5c$var$localParticipant());
1884
+ return [
1885
+ 2 /*return*/
1886
+ ];
1887
+ }
1888
+ });
1889
+ });
1890
+ };
1891
+ WavMediaManager.prototype._handleAvailableDevicesUpdated = function(devices) {
1892
+ var _a, _b, _c, _d;
1893
+ (_b = (_a = this._callbacks).onAvailableCamsUpdated) === null || _b === void 0 || _b.call(_a, devices.filter(function(d) {
1894
+ return d.kind === "videoinput";
1895
+ }));
1896
+ (_d = (_c = this._callbacks).onAvailableMicsUpdated) === null || _d === void 0 || _d.call(_c, devices.filter(function(d) {
1897
+ return d.kind === "audioinput";
1898
+ }));
1899
+ // if the current device went away or we're using the default and
1900
+ // the default changed, reset the mic.
1901
+ var defaultDevice = devices.find(function(d) {
1902
+ return d.deviceId === "default";
1903
+ });
1904
+ var currentDevice = this._wavRecorder.deviceSelection;
1905
+ if (currentDevice && (!devices.some(function(d) {
1906
+ return d.deviceId === currentDevice.deviceId;
1907
+ }) || currentDevice.deviceId === "default" && currentDevice.label !== (defaultDevice === null || defaultDevice === void 0 ? void 0 : defaultDevice.label))) this.updateMic("");
1908
+ };
1909
+ return WavMediaManager;
1910
+ }($23859655abfc5f5c$export$4a0c46dbbe2ddb67);
1911
+ var $23859655abfc5f5c$var$localParticipant = function() {
1912
+ return {
1913
+ id: "local",
1914
+ name: "",
1915
+ local: true
1916
+ };
1917
+ };
1918
+
1919
+
1920
+
1921
+
1922
+
1923
+ var $1c088932741d88e6$var$__extends = undefined && undefined.__extends || function() {
1924
+ var extendStatics = function(d, b) {
1925
+ extendStatics = Object.setPrototypeOf || ({
1926
+ __proto__: []
1927
+ }) instanceof Array && function(d, b) {
1928
+ d.__proto__ = b;
1929
+ } || function(d, b) {
1930
+ for(var p in b)if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
1931
+ };
1932
+ return extendStatics(d, b);
1933
+ };
1934
+ return function(d, b) {
1935
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
1936
+ extendStatics(d, b);
1937
+ function __() {
1938
+ this.constructor = d;
1939
+ }
1940
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
1941
+ };
1942
+ }();
1943
+ var $1c088932741d88e6$var$__awaiter = undefined && undefined.__awaiter || function(thisArg, _arguments, P, generator) {
1944
+ function adopt(value) {
1945
+ return value instanceof P ? value : new P(function(resolve) {
1946
+ resolve(value);
1947
+ });
1948
+ }
1949
+ return new (P || (P = Promise))(function(resolve, reject) {
1950
+ function fulfilled(value) {
1951
+ try {
1952
+ step(generator.next(value));
1953
+ } catch (e) {
1954
+ reject(e);
1955
+ }
1956
+ }
1957
+ function rejected(value) {
1958
+ try {
1959
+ step(generator["throw"](value));
1960
+ } catch (e) {
1961
+ reject(e);
1962
+ }
1963
+ }
1964
+ function step(result) {
1965
+ result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
1966
+ }
1967
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
1968
+ });
1969
+ };
1970
+ var $1c088932741d88e6$var$__generator = undefined && undefined.__generator || function(thisArg, body) {
1971
+ var _ = {
1972
+ label: 0,
1973
+ sent: function() {
1974
+ if (t[0] & 1) throw t[1];
1975
+ return t[1];
1976
+ },
1977
+ trys: [],
1978
+ ops: []
1979
+ }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
1980
+ return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() {
1981
+ return this;
1982
+ }), g;
1983
+ function verb(n) {
1984
+ return function(v) {
1985
+ return step([
1986
+ n,
1987
+ v
1988
+ ]);
1989
+ };
1990
+ }
1991
+ function step(op) {
1992
+ if (f) throw new TypeError("Generator is already executing.");
1993
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
1994
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
1995
+ if (y = 0, t) op = [
1996
+ op[0] & 2,
1997
+ t.value
1998
+ ];
1999
+ switch(op[0]){
2000
+ case 0:
2001
+ case 1:
2002
+ t = op;
2003
+ break;
2004
+ case 4:
2005
+ _.label++;
2006
+ return {
2007
+ value: op[1],
2008
+ done: false
2009
+ };
2010
+ case 5:
2011
+ _.label++;
2012
+ y = op[1];
2013
+ op = [
2014
+ 0
2015
+ ];
2016
+ continue;
2017
+ case 7:
2018
+ op = _.ops.pop();
2019
+ _.trys.pop();
2020
+ continue;
2021
+ default:
2022
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
2023
+ _ = 0;
2024
+ continue;
2025
+ }
2026
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
2027
+ _.label = op[1];
2028
+ break;
2029
+ }
2030
+ if (op[0] === 6 && _.label < t[1]) {
2031
+ _.label = t[1];
2032
+ t = op;
2033
+ break;
2034
+ }
2035
+ if (t && _.label < t[2]) {
2036
+ _.label = t[2];
2037
+ _.ops.push(op);
2038
+ break;
2039
+ }
2040
+ if (t[2]) _.ops.pop();
2041
+ _.trys.pop();
2042
+ continue;
2043
+ }
2044
+ op = body.call(thisArg, _);
2045
+ } catch (e) {
2046
+ op = [
2047
+ 6,
2048
+ e
2049
+ ];
2050
+ y = 0;
2051
+ } finally{
2052
+ f = t = 0;
2053
+ }
2054
+ if (op[0] & 5) throw op[1];
2055
+ return {
2056
+ value: op[0] ? op[1] : void 0,
2057
+ done: true
2058
+ };
2059
+ }
2060
+ };
2061
+ var $1c088932741d88e6$export$c95c65abc5f47125 = /** @class */ function(_super) {
2062
+ $1c088932741d88e6$var$__extends(DailyMediaManager, _super);
2063
+ function DailyMediaManager(enablePlayer, enableRecording, onTrackStartedCallback, onTrackStoppedCallback, recorderChunkSize, recorderSampleRate) {
2064
+ if (enablePlayer === void 0) enablePlayer = true;
2065
+ if (enableRecording === void 0) enableRecording = true;
2066
+ if (recorderChunkSize === void 0) recorderChunkSize = undefined;
2067
+ if (recorderSampleRate === void 0) recorderSampleRate = 24000;
2068
+ var _a;
2069
+ var _this = _super.call(this) || this;
2070
+ _this._selectedCam = {};
2071
+ _this._selectedMic = {};
2072
+ _this._selectedSpeaker = {};
2073
+ _this._remoteAudioLevelInterval = null;
2074
+ _this._recorderChunkSize = undefined;
2075
+ _this._initialized = false;
2076
+ _this._connected = false;
2077
+ _this._currentAudioTrack = null;
2078
+ _this._connectResolve = null;
2079
+ _this.onTrackStartedCallback = onTrackStartedCallback;
2080
+ _this.onTrackStoppedCallback = onTrackStoppedCallback;
2081
+ _this._recorderChunkSize = recorderChunkSize;
2082
+ _this._daily = (_a = (0, ($parcel$interopDefault($fkNis$dailycodailyjs))).getCallInstance()) !== null && _a !== void 0 ? _a : (0, ($parcel$interopDefault($fkNis$dailycodailyjs))).createCallObject();
2083
+ if (enableRecording) _this._mediaStreamRecorder = new (0, $7cef7a69bdf8f84d$export$2934cf2d25c67a48)({
2084
+ sampleRate: recorderSampleRate
2085
+ });
2086
+ if (enablePlayer) _this._wavStreamPlayer = new (0, $1e7ce9484a3c4077$export$9698d62c78b8f366)({
2087
+ sampleRate: 24000
2088
+ });
2089
+ _this._daily.on("track-started", _this.handleTrackStarted.bind(_this));
2090
+ _this._daily.on("track-stopped", _this.handleTrackStopped.bind(_this));
2091
+ _this._daily.on("available-devices-updated", _this._handleAvailableDevicesUpdated.bind(_this));
2092
+ _this._daily.on("selected-devices-updated", _this._handleSelectedDevicesUpdated.bind(_this));
2093
+ _this._daily.on("local-audio-level", _this._handleLocalAudioLevel.bind(_this));
2094
+ return _this;
2095
+ }
2096
+ DailyMediaManager.prototype.initialize = function() {
2097
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2098
+ var infos, devices, cams, mics, speakers;
2099
+ var _this = this;
2100
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m;
2101
+ return $1c088932741d88e6$var$__generator(this, function(_o) {
2102
+ switch(_o.label){
2103
+ case 0:
2104
+ if (this._initialized) {
2105
+ console.warn("DailyMediaManager already initialized");
2106
+ return [
2107
+ 2 /*return*/
2108
+ ];
2109
+ }
2110
+ return [
2111
+ 4 /*yield*/ ,
2112
+ this._daily.startCamera({
2113
+ startVideoOff: !this._camEnabled,
2114
+ startAudioOff: !this._micEnabled
2115
+ })
2116
+ ];
2117
+ case 1:
2118
+ infos = _o.sent();
2119
+ return [
2120
+ 4 /*yield*/ ,
2121
+ this._daily.enumerateDevices()
2122
+ ];
2123
+ case 2:
2124
+ devices = _o.sent().devices;
2125
+ cams = devices.filter(function(d) {
2126
+ return d.kind === "videoinput";
2127
+ });
2128
+ mics = devices.filter(function(d) {
2129
+ return d.kind === "audioinput";
2130
+ });
2131
+ speakers = devices.filter(function(d) {
2132
+ return d.kind === "audiooutput";
2133
+ });
2134
+ (_b = (_a = this._callbacks).onAvailableCamsUpdated) === null || _b === void 0 || _b.call(_a, cams);
2135
+ (_d = (_c = this._callbacks).onAvailableMicsUpdated) === null || _d === void 0 || _d.call(_c, mics);
2136
+ (_f = (_e = this._callbacks).onAvailableSpeakersUpdated) === null || _f === void 0 || _f.call(_e, speakers);
2137
+ this._selectedCam = infos.camera;
2138
+ (_h = (_g = this._callbacks).onCamUpdated) === null || _h === void 0 || _h.call(_g, infos.camera);
2139
+ this._selectedMic = infos.mic;
2140
+ (_k = (_j = this._callbacks).onMicUpdated) === null || _k === void 0 || _k.call(_j, infos.mic);
2141
+ this._selectedSpeaker = infos.speaker;
2142
+ (_m = (_l = this._callbacks).onSpeakerUpdated) === null || _m === void 0 || _m.call(_l, infos.speaker);
2143
+ if (!!this._daily.isLocalAudioLevelObserverRunning()) return [
2144
+ 3 /*break*/ ,
2145
+ 4
2146
+ ];
2147
+ return [
2148
+ 4 /*yield*/ ,
2149
+ this._daily.startLocalAudioLevelObserver(100)
2150
+ ];
2151
+ case 3:
2152
+ _o.sent();
2153
+ _o.label = 4;
2154
+ case 4:
2155
+ if (!this._wavStreamPlayer) return [
2156
+ 3 /*break*/ ,
2157
+ 6
2158
+ ];
2159
+ return [
2160
+ 4 /*yield*/ ,
2161
+ this._wavStreamPlayer.connect()
2162
+ ];
2163
+ case 5:
2164
+ _o.sent();
2165
+ if (!this._remoteAudioLevelInterval) this._remoteAudioLevelInterval = setInterval(function() {
2166
+ var _a;
2167
+ var frequencies = _this._wavStreamPlayer.getFrequencies();
2168
+ var aveVal = 0;
2169
+ if ((_a = frequencies.values) === null || _a === void 0 ? void 0 : _a.length) aveVal = frequencies.values.reduce(function(a, c) {
2170
+ return a + c;
2171
+ }, 0) / frequencies.values.length;
2172
+ _this._handleRemoteAudioLevel(aveVal);
2173
+ }, 100);
2174
+ _o.label = 6;
2175
+ case 6:
2176
+ this._initialized = true;
2177
+ return [
2178
+ 2 /*return*/
2179
+ ];
2180
+ }
2181
+ });
2182
+ });
2183
+ };
2184
+ DailyMediaManager.prototype.connect = function() {
2185
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2186
+ var _this = this;
2187
+ return $1c088932741d88e6$var$__generator(this, function(_a) {
2188
+ if (this._connected) {
2189
+ console.warn("DailyMediaManager already connected");
2190
+ return [
2191
+ 2 /*return*/
2192
+ ];
2193
+ }
2194
+ this._connected = true;
2195
+ if (!this._initialized) return [
2196
+ 2 /*return*/ ,
2197
+ new Promise(function(resolve) {
2198
+ (function() {
2199
+ return $1c088932741d88e6$var$__awaiter(_this, void 0, void 0, function() {
2200
+ return $1c088932741d88e6$var$__generator(this, function(_a) {
2201
+ switch(_a.label){
2202
+ case 0:
2203
+ this._connectResolve = resolve;
2204
+ return [
2205
+ 4 /*yield*/ ,
2206
+ this.initialize()
2207
+ ];
2208
+ case 1:
2209
+ _a.sent();
2210
+ return [
2211
+ 2 /*return*/
2212
+ ];
2213
+ }
2214
+ });
2215
+ });
2216
+ })();
2217
+ })
2218
+ ];
2219
+ if (this._micEnabled) this._startRecording();
2220
+ return [
2221
+ 2 /*return*/
2222
+ ];
2223
+ });
2224
+ });
2225
+ };
2226
+ DailyMediaManager.prototype.disconnect = function() {
2227
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2228
+ var _a, _b;
2229
+ return $1c088932741d88e6$var$__generator(this, function(_c) {
2230
+ switch(_c.label){
2231
+ case 0:
2232
+ if (this._remoteAudioLevelInterval) clearInterval(this._remoteAudioLevelInterval);
2233
+ this._remoteAudioLevelInterval = null;
2234
+ this._daily.leave();
2235
+ this._currentAudioTrack = null;
2236
+ return [
2237
+ 4 /*yield*/ ,
2238
+ (_a = this._mediaStreamRecorder) === null || _a === void 0 ? void 0 : _a.end()
2239
+ ];
2240
+ case 1:
2241
+ _c.sent();
2242
+ (_b = this._wavStreamPlayer) === null || _b === void 0 || _b.interrupt();
2243
+ this._initialized = false;
2244
+ this._connected = false;
2245
+ return [
2246
+ 2 /*return*/
2247
+ ];
2248
+ }
2249
+ });
2250
+ });
2251
+ };
2252
+ DailyMediaManager.prototype.userStartedSpeaking = function() {
2253
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2254
+ var _a;
2255
+ return $1c088932741d88e6$var$__generator(this, function(_b) {
2256
+ return [
2257
+ 2 /*return*/ ,
2258
+ (_a = this._wavStreamPlayer) === null || _a === void 0 ? void 0 : _a.interrupt()
2259
+ ];
2260
+ });
2261
+ });
2262
+ };
2263
+ DailyMediaManager.prototype.bufferBotAudio = function(data, id) {
2264
+ var _a;
2265
+ return (_a = this._wavStreamPlayer) === null || _a === void 0 ? void 0 : _a.add16BitPCM(data, id);
2266
+ };
2267
+ DailyMediaManager.prototype.getAllMics = function() {
2268
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2269
+ var devices;
2270
+ return $1c088932741d88e6$var$__generator(this, function(_a) {
2271
+ switch(_a.label){
2272
+ case 0:
2273
+ return [
2274
+ 4 /*yield*/ ,
2275
+ this._daily.enumerateDevices()
2276
+ ];
2277
+ case 1:
2278
+ devices = _a.sent().devices;
2279
+ return [
2280
+ 2 /*return*/ ,
2281
+ devices.filter(function(device) {
2282
+ return device.kind === "audioinput";
2283
+ })
2284
+ ];
2285
+ }
2286
+ });
2287
+ });
2288
+ };
2289
+ DailyMediaManager.prototype.getAllCams = function() {
2290
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2291
+ var devices;
2292
+ return $1c088932741d88e6$var$__generator(this, function(_a) {
2293
+ switch(_a.label){
2294
+ case 0:
2295
+ return [
2296
+ 4 /*yield*/ ,
2297
+ this._daily.enumerateDevices()
2298
+ ];
2299
+ case 1:
2300
+ devices = _a.sent().devices;
2301
+ return [
2302
+ 2 /*return*/ ,
2303
+ devices.filter(function(device) {
2304
+ return device.kind === "videoinput";
2305
+ })
2306
+ ];
2307
+ }
2308
+ });
2309
+ });
2310
+ };
2311
+ DailyMediaManager.prototype.getAllSpeakers = function() {
2312
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2313
+ var devices;
2314
+ return $1c088932741d88e6$var$__generator(this, function(_a) {
2315
+ switch(_a.label){
2316
+ case 0:
2317
+ return [
2318
+ 4 /*yield*/ ,
2319
+ this._daily.enumerateDevices()
2320
+ ];
2321
+ case 1:
2322
+ devices = _a.sent().devices;
2323
+ return [
2324
+ 2 /*return*/ ,
2325
+ devices.filter(function(device) {
2326
+ return device.kind === "audiooutput";
2327
+ })
2328
+ ];
2329
+ }
2330
+ });
2331
+ });
2332
+ };
2333
+ DailyMediaManager.prototype.updateMic = function(micId) {
2334
+ var _this = this;
2335
+ this._daily.setInputDevicesAsync({
2336
+ audioDeviceId: micId
2337
+ }).then(function(deviceInfo) {
2338
+ _this._selectedMic = deviceInfo.mic;
2339
+ });
2340
+ };
2341
+ DailyMediaManager.prototype.updateCam = function(camId) {
2342
+ var _this = this;
2343
+ this._daily.setInputDevicesAsync({
2344
+ videoDeviceId: camId
2345
+ }).then(function(deviceInfo) {
2346
+ _this._selectedCam = deviceInfo.camera;
2347
+ });
2348
+ };
2349
+ DailyMediaManager.prototype.updateSpeaker = function(speakerId) {
2350
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2351
+ var sID, speakers, defaultSpeaker_1, defaultSpeakerCp;
2352
+ var _this = this;
2353
+ var _a, _b;
2354
+ return $1c088932741d88e6$var$__generator(this, function(_c) {
2355
+ switch(_c.label){
2356
+ case 0:
2357
+ if (speakerId !== "default" && this._selectedSpeaker.deviceId === speakerId) return [
2358
+ 2 /*return*/
2359
+ ];
2360
+ sID = speakerId;
2361
+ if (!(sID === "default")) return [
2362
+ 3 /*break*/ ,
2363
+ 2
2364
+ ];
2365
+ return [
2366
+ 4 /*yield*/ ,
2367
+ this.getAllSpeakers()
2368
+ ];
2369
+ case 1:
2370
+ speakers = _c.sent();
2371
+ defaultSpeaker_1 = speakers.find(function(s) {
2372
+ return s.deviceId === "default";
2373
+ });
2374
+ if (!defaultSpeaker_1) {
2375
+ console.warn("No default speaker found");
2376
+ return [
2377
+ 2 /*return*/
2378
+ ];
2379
+ }
2380
+ speakers.splice(speakers.indexOf(defaultSpeaker_1), 1);
2381
+ defaultSpeakerCp = speakers.find(function(s) {
2382
+ return defaultSpeaker_1.label.includes(s.label);
2383
+ });
2384
+ sID = (_a = defaultSpeakerCp === null || defaultSpeakerCp === void 0 ? void 0 : defaultSpeakerCp.deviceId) !== null && _a !== void 0 ? _a : speakerId;
2385
+ _c.label = 2;
2386
+ case 2:
2387
+ (_b = this._wavStreamPlayer) === null || _b === void 0 || _b.updateSpeaker(sID).then(function() {
2388
+ var _a, _b;
2389
+ _this._selectedSpeaker = {
2390
+ deviceId: speakerId
2391
+ };
2392
+ (_b = (_a = _this._callbacks).onSpeakerUpdated) === null || _b === void 0 || _b.call(_a, _this._selectedSpeaker);
2393
+ });
2394
+ return [
2395
+ 2 /*return*/
2396
+ ];
2397
+ }
2398
+ });
2399
+ });
2400
+ };
2401
+ Object.defineProperty(DailyMediaManager.prototype, "selectedMic", {
2402
+ get: function() {
2403
+ return this._selectedMic;
2404
+ },
2405
+ enumerable: false,
2406
+ configurable: true
2407
+ });
2408
+ Object.defineProperty(DailyMediaManager.prototype, "selectedCam", {
2409
+ get: function() {
2410
+ return this._selectedCam;
2411
+ },
2412
+ enumerable: false,
2413
+ configurable: true
2414
+ });
2415
+ Object.defineProperty(DailyMediaManager.prototype, "selectedSpeaker", {
2416
+ get: function() {
2417
+ return this._selectedSpeaker;
2418
+ },
2419
+ enumerable: false,
2420
+ configurable: true
2421
+ });
2422
+ DailyMediaManager.prototype.enableMic = function(enable) {
2423
+ return $1c088932741d88e6$var$__awaiter(this, void 0, Promise, function() {
2424
+ var _a;
2425
+ return $1c088932741d88e6$var$__generator(this, function(_b) {
2426
+ this._micEnabled = enable;
2427
+ if (!((_a = this._daily.participants()) === null || _a === void 0 ? void 0 : _a.local)) return [
2428
+ 2 /*return*/
2429
+ ];
2430
+ this._daily.setLocalAudio(enable);
2431
+ if (this._mediaStreamRecorder) {
2432
+ if (enable) {
2433
+ if (this._mediaStreamRecorder.getStatus() === "paused") this._startRecording();
2434
+ // else, we'll record on the track-started event
2435
+ } else if (this._mediaStreamRecorder.getStatus() === "recording") this._mediaStreamRecorder.pause();
2436
+ }
2437
+ return [
2438
+ 2 /*return*/
2439
+ ];
2440
+ });
2441
+ });
2442
+ };
2443
+ DailyMediaManager.prototype.enableCam = function(enable) {
2444
+ this._camEnabled = enable;
2445
+ this._daily.setLocalVideo(enable);
2446
+ };
2447
+ Object.defineProperty(DailyMediaManager.prototype, "isCamEnabled", {
2448
+ get: function() {
2449
+ return this._daily.localVideo();
2450
+ },
2451
+ enumerable: false,
2452
+ configurable: true
2453
+ });
2454
+ Object.defineProperty(DailyMediaManager.prototype, "isMicEnabled", {
2455
+ get: function() {
2456
+ return this._daily.localAudio();
2457
+ },
2458
+ enumerable: false,
2459
+ configurable: true
2460
+ });
2461
+ DailyMediaManager.prototype.tracks = function() {
2462
+ var _a, _b, _c, _d, _e, _f;
2463
+ var participants = this._daily.participants();
2464
+ return {
2465
+ local: {
2466
+ audio: (_c = (_b = (_a = participants === null || participants === void 0 ? void 0 : participants.local) === null || _a === void 0 ? void 0 : _a.tracks) === null || _b === void 0 ? void 0 : _b.audio) === null || _c === void 0 ? void 0 : _c.persistentTrack,
2467
+ video: (_f = (_e = (_d = participants === null || participants === void 0 ? void 0 : participants.local) === null || _d === void 0 ? void 0 : _d.tracks) === null || _e === void 0 ? void 0 : _e.video) === null || _f === void 0 ? void 0 : _f.persistentTrack
2468
+ }
2469
+ };
2470
+ };
2471
+ DailyMediaManager.prototype._startRecording = function() {
2472
+ var _this = this;
2473
+ if (!this._connected || !this._mediaStreamRecorder) return;
2474
+ try {
2475
+ this._mediaStreamRecorder.record(function(data) {
2476
+ _this._userAudioCallback(data.mono);
2477
+ }, this._recorderChunkSize);
2478
+ } catch (e) {
2479
+ var err = e;
2480
+ if (!err.message.includes("Already recording")) console.error("Error starting recording", e);
2481
+ }
2482
+ };
2483
+ DailyMediaManager.prototype._handleAvailableDevicesUpdated = function(event) {
2484
+ var _a, _b, _c, _d, _e, _f;
2485
+ (_b = (_a = this._callbacks).onAvailableCamsUpdated) === null || _b === void 0 || _b.call(_a, event.availableDevices.filter(function(d) {
2486
+ return d.kind === "videoinput";
2487
+ }));
2488
+ (_d = (_c = this._callbacks).onAvailableMicsUpdated) === null || _d === void 0 || _d.call(_c, event.availableDevices.filter(function(d) {
2489
+ return d.kind === "audioinput";
2490
+ }));
2491
+ (_f = (_e = this._callbacks).onAvailableSpeakersUpdated) === null || _f === void 0 || _f.call(_e, event.availableDevices.filter(function(d) {
2492
+ return d.kind === "audiooutput";
2493
+ }));
2494
+ if (this._selectedSpeaker.deviceId === "default") this.updateSpeaker("default");
2495
+ };
2496
+ DailyMediaManager.prototype._handleSelectedDevicesUpdated = function(event) {
2497
+ var _a, _b, _c, _d, _e, _f;
2498
+ if (((_a = this._selectedCam) === null || _a === void 0 ? void 0 : _a.deviceId) !== event.devices.camera) {
2499
+ this._selectedCam = event.devices.camera;
2500
+ (_c = (_b = this._callbacks).onCamUpdated) === null || _c === void 0 || _c.call(_b, event.devices.camera);
2501
+ }
2502
+ if (((_d = this._selectedMic) === null || _d === void 0 ? void 0 : _d.deviceId) !== event.devices.mic) {
2503
+ this._selectedMic = event.devices.mic;
2504
+ (_f = (_e = this._callbacks).onMicUpdated) === null || _f === void 0 || _f.call(_e, event.devices.mic);
2505
+ }
2506
+ };
2507
+ DailyMediaManager.prototype._handleLocalAudioLevel = function(ev) {
2508
+ var _a, _b;
2509
+ (_b = (_a = this._callbacks).onLocalAudioLevel) === null || _b === void 0 || _b.call(_a, ev.audioLevel);
2510
+ };
2511
+ DailyMediaManager.prototype._handleRemoteAudioLevel = function(audioLevel) {
2512
+ var _a, _b;
2513
+ (_b = (_a = this._callbacks).onRemoteAudioLevel) === null || _b === void 0 || _b.call(_a, audioLevel, $1c088932741d88e6$var$botParticipant());
2514
+ };
2515
+ DailyMediaManager.prototype.handleTrackStarted = function(event) {
2516
+ return $1c088932741d88e6$var$__awaiter(this, void 0, void 0, function() {
2517
+ var status, _a;
2518
+ var _b, _c, _d, _e;
2519
+ return $1c088932741d88e6$var$__generator(this, function(_f) {
2520
+ switch(_f.label){
2521
+ case 0:
2522
+ if (!((_b = event.participant) === null || _b === void 0 ? void 0 : _b.local)) return [
2523
+ 2 /*return*/
2524
+ ];
2525
+ if (!(event.track.kind === "audio")) return [
2526
+ 3 /*break*/ ,
2527
+ 10
2528
+ ];
2529
+ if (!this._mediaStreamRecorder) return [
2530
+ 3 /*break*/ ,
2531
+ 9
2532
+ ];
2533
+ status = this._mediaStreamRecorder.getStatus();
2534
+ _a = status;
2535
+ switch(_a){
2536
+ case "ended":
2537
+ return [
2538
+ 3 /*break*/ ,
2539
+ 1
2540
+ ];
2541
+ case "paused":
2542
+ return [
2543
+ 3 /*break*/ ,
2544
+ 3
2545
+ ];
2546
+ case "recording":
2547
+ return [
2548
+ 3 /*break*/ ,
2549
+ 4
2550
+ ];
2551
+ }
2552
+ return [
2553
+ 3 /*break*/ ,
2554
+ 4
2555
+ ];
2556
+ case 1:
2557
+ return [
2558
+ 4 /*yield*/ ,
2559
+ this._mediaStreamRecorder.begin(event.track)
2560
+ ];
2561
+ case 2:
2562
+ _f.sent();
2563
+ if (this._connected) {
2564
+ this._startRecording();
2565
+ if (this._connectResolve) {
2566
+ this._connectResolve();
2567
+ this._connectResolve = null;
2568
+ }
2569
+ }
2570
+ return [
2571
+ 3 /*break*/ ,
2572
+ 9
2573
+ ];
2574
+ case 3:
2575
+ this._startRecording();
2576
+ return [
2577
+ 3 /*break*/ ,
2578
+ 9
2579
+ ];
2580
+ case 4:
2581
+ if (!(this._currentAudioTrack !== event.track)) return [
2582
+ 3 /*break*/ ,
2583
+ 7
2584
+ ];
2585
+ return [
2586
+ 4 /*yield*/ ,
2587
+ this._mediaStreamRecorder.end()
2588
+ ];
2589
+ case 5:
2590
+ _f.sent();
2591
+ return [
2592
+ 4 /*yield*/ ,
2593
+ this._mediaStreamRecorder.begin(event.track)
2594
+ ];
2595
+ case 6:
2596
+ _f.sent();
2597
+ this._startRecording();
2598
+ return [
2599
+ 3 /*break*/ ,
2600
+ 8
2601
+ ];
2602
+ case 7:
2603
+ console.warn("track-started event received for current track and already recording");
2604
+ _f.label = 8;
2605
+ case 8:
2606
+ return [
2607
+ 3 /*break*/ ,
2608
+ 9
2609
+ ];
2610
+ case 9:
2611
+ this._currentAudioTrack = event.track;
2612
+ _f.label = 10;
2613
+ case 10:
2614
+ (_d = (_c = this._callbacks).onTrackStarted) === null || _d === void 0 || _d.call(_c, event.track, event.participant ? $1c088932741d88e6$var$dailyParticipantToParticipant(event.participant) : undefined);
2615
+ (_e = this.onTrackStartedCallback) === null || _e === void 0 || _e.call(this, event);
2616
+ return [
2617
+ 2 /*return*/
2618
+ ];
2619
+ }
2620
+ });
2621
+ });
2622
+ };
2623
+ DailyMediaManager.prototype.handleTrackStopped = function(event) {
2624
+ var _a, _b, _c, _d;
2625
+ if (!((_a = event.participant) === null || _a === void 0 ? void 0 : _a.local)) return;
2626
+ if (event.track.kind === "audio") {
2627
+ if (this._mediaStreamRecorder && this._mediaStreamRecorder.getStatus() === "recording") this._mediaStreamRecorder.pause();
2628
+ }
2629
+ (_c = (_b = this._callbacks).onTrackStopped) === null || _c === void 0 || _c.call(_b, event.track, event.participant ? $1c088932741d88e6$var$dailyParticipantToParticipant(event.participant) : undefined);
2630
+ (_d = this.onTrackStoppedCallback) === null || _d === void 0 || _d.call(this, event);
2631
+ };
2632
+ return DailyMediaManager;
2633
+ }((0, $23859655abfc5f5c$export$4a0c46dbbe2ddb67));
2634
+ var $1c088932741d88e6$var$dailyParticipantToParticipant = function(p) {
2635
+ return {
2636
+ id: p.user_id,
2637
+ local: p.local,
2638
+ name: p.user_name
2639
+ };
2640
+ };
2641
+ var $1c088932741d88e6$var$botParticipant = function() {
2642
+ return {
2643
+ id: "bot",
2644
+ local: false,
2645
+ name: "Bot"
2646
+ };
2647
+ };
2648
+
2649
+
2650
+
2651
+
2652
+ var $158ad1a38fb85e0e$var$__extends = undefined && undefined.__extends || function() {
2653
+ var extendStatics = function(d, b) {
2654
+ extendStatics = Object.setPrototypeOf || ({
2655
+ __proto__: []
2656
+ }) instanceof Array && function(d, b) {
2657
+ d.__proto__ = b;
2658
+ } || function(d, b) {
2659
+ for(var p in b)if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
2660
+ };
2661
+ return extendStatics(d, b);
2662
+ };
2663
+ return function(d, b) {
2664
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
2665
+ extendStatics(d, b);
2666
+ function __() {
2667
+ this.constructor = d;
2668
+ }
2669
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
2670
+ };
2671
+ }();
2672
+ var $158ad1a38fb85e0e$var$__awaiter = undefined && undefined.__awaiter || function(thisArg, _arguments, P, generator) {
2673
+ function adopt(value) {
2674
+ return value instanceof P ? value : new P(function(resolve) {
2675
+ resolve(value);
2676
+ });
2677
+ }
2678
+ return new (P || (P = Promise))(function(resolve, reject) {
2679
+ function fulfilled(value) {
2680
+ try {
2681
+ step(generator.next(value));
2682
+ } catch (e) {
2683
+ reject(e);
2684
+ }
2685
+ }
2686
+ function rejected(value) {
2687
+ try {
2688
+ step(generator["throw"](value));
2689
+ } catch (e) {
2690
+ reject(e);
2691
+ }
2692
+ }
2693
+ function step(result) {
2694
+ result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
2695
+ }
2696
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
2697
+ });
2698
+ };
2699
+ var $158ad1a38fb85e0e$var$__generator = undefined && undefined.__generator || function(thisArg, body) {
2700
+ var _ = {
2701
+ label: 0,
2702
+ sent: function() {
2703
+ if (t[0] & 1) throw t[1];
2704
+ return t[1];
2705
+ },
2706
+ trys: [],
2707
+ ops: []
2708
+ }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
2709
+ return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() {
2710
+ return this;
2711
+ }), g;
2712
+ function verb(n) {
2713
+ return function(v) {
2714
+ return step([
2715
+ n,
2716
+ v
2717
+ ]);
2718
+ };
2719
+ }
2720
+ function step(op) {
2721
+ if (f) throw new TypeError("Generator is already executing.");
2722
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
2723
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
2724
+ if (y = 0, t) op = [
2725
+ op[0] & 2,
2726
+ t.value
2727
+ ];
2728
+ switch(op[0]){
2729
+ case 0:
2730
+ case 1:
2731
+ t = op;
2732
+ break;
2733
+ case 4:
2734
+ _.label++;
2735
+ return {
2736
+ value: op[1],
2737
+ done: false
2738
+ };
2739
+ case 5:
2740
+ _.label++;
2741
+ y = op[1];
2742
+ op = [
2743
+ 0
2744
+ ];
2745
+ continue;
2746
+ case 7:
2747
+ op = _.ops.pop();
2748
+ _.trys.pop();
2749
+ continue;
2750
+ default:
2751
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
2752
+ _ = 0;
2753
+ continue;
2754
+ }
2755
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
2756
+ _.label = op[1];
2757
+ break;
2758
+ }
2759
+ if (op[0] === 6 && _.label < t[1]) {
2760
+ _.label = t[1];
2761
+ t = op;
2762
+ break;
2763
+ }
2764
+ if (t && _.label < t[2]) {
2765
+ _.label = t[2];
2766
+ _.ops.push(op);
2767
+ break;
2768
+ }
2769
+ if (t[2]) _.ops.pop();
2770
+ _.trys.pop();
2771
+ continue;
2772
+ }
2773
+ op = body.call(thisArg, _);
2774
+ } catch (e) {
2775
+ op = [
2776
+ 6,
2777
+ e
2778
+ ];
2779
+ y = 0;
2780
+ } finally{
2781
+ f = t = 0;
2782
+ }
2783
+ if (op[0] & 5) throw op[1];
2784
+ return {
2785
+ value: op[0] ? op[1] : void 0,
2786
+ done: true
2787
+ };
2788
+ }
2789
+ };
2790
+ var $158ad1a38fb85e0e$var$__spreadArray = undefined && undefined.__spreadArray || function(to, from, pack) {
2791
+ if (pack || arguments.length === 2) {
2792
+ for(var i = 0, l = from.length, ar; i < l; i++)if (ar || !(i in from)) {
2793
+ if (!ar) ar = Array.prototype.slice.call(from, 0, i);
2794
+ ar[i] = from[i];
2795
+ }
2796
+ }
2797
+ return to.concat(ar || Array.prototype.slice.call(from));
2798
+ };
2799
+ var $158ad1a38fb85e0e$var$readyStates = [
2800
+ "CONNECTING",
2801
+ "OPEN",
2802
+ "CLOSING",
2803
+ "CLOSED"
2804
+ ];
2805
+ var $158ad1a38fb85e0e$var$KEEP_ALIVE_INTERVAL = 5000;
2806
+ var $158ad1a38fb85e0e$var$KEEP_ALIVE_TIMEOUT = 15000;
2807
+ // client side code in soupSFU has a timeout of 15 seconds for command response
2808
+ // 5 seconds seems reasonable that it provides roughly 3 retry attempts
2809
+ var $158ad1a38fb85e0e$var$WEBSOCKET_CONNECTION_TIMEOUT = 150000;
2810
+ var $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_ATTEMPTS = 2;
2811
+ var $158ad1a38fb85e0e$var$MAX_RECONNECT_ATTEMPTS = 10;
2812
+ var $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_INTERVAL = 1000;
2813
+ var $158ad1a38fb85e0e$var$MAX_RECONNECT_INTERVAL = 30000;
2814
+ var $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_DECAY = 1.5;
2815
+ var $158ad1a38fb85e0e$var$WEBSOCKET_TIMEOUT_CODE = 4100;
2816
+ var $158ad1a38fb85e0e$var$SIG_CONNECTION_CANCELED = "SIG_CONNECTION_CANCELED";
2817
+ var $158ad1a38fb85e0e$var$WEBSOCKET_ERROR = "WEBSOCKET_ERROR";
2818
+ var $158ad1a38fb85e0e$var$LOG_LEVEL;
2819
+ (function(LOG_LEVEL) {
2820
+ LOG_LEVEL[LOG_LEVEL["DEBUG"] = 0] = "DEBUG";
2821
+ LOG_LEVEL[LOG_LEVEL["ERROR"] = 1] = "ERROR";
2822
+ LOG_LEVEL[LOG_LEVEL["INFO"] = 2] = "INFO";
2823
+ LOG_LEVEL[LOG_LEVEL["WARN"] = 3] = "WARN";
2824
+ })($158ad1a38fb85e0e$var$LOG_LEVEL || ($158ad1a38fb85e0e$var$LOG_LEVEL = {}));
2825
+ var $158ad1a38fb85e0e$var$rWebSocket = /** @class */ function() {
2826
+ function rWebSocket(url, protocols) {
2827
+ this._closedManually = false;
2828
+ this._errored = false;
2829
+ this._rejected = false;
2830
+ this._timed_out = false;
2831
+ this._initialConnectionOk = false;
2832
+ this._ws = new WebSocket(url, protocols);
2833
+ }
2834
+ rWebSocket.prototype.addEventListener = function(type, listener) {
2835
+ this._ws.addEventListener(type, listener);
2836
+ };
2837
+ // Add other WebSocket methods as needed
2838
+ rWebSocket.prototype.close = function(code, reason) {
2839
+ this._ws.close(code, reason);
2840
+ };
2841
+ rWebSocket.prototype.send = function(data) {
2842
+ this._ws.send(data);
2843
+ };
2844
+ Object.defineProperty(rWebSocket.prototype, "url", {
2845
+ // Add getters for WebSocket properties
2846
+ get: function() {
2847
+ return this._ws.url;
2848
+ },
2849
+ enumerable: false,
2850
+ configurable: true
2851
+ });
2852
+ Object.defineProperty(rWebSocket.prototype, "readyState", {
2853
+ get: function() {
2854
+ return this._ws.readyState;
2855
+ },
2856
+ enumerable: false,
2857
+ configurable: true
2858
+ });
2859
+ return rWebSocket;
2860
+ }();
2861
+ /**
2862
+ * Builds on top of Javascript Websockets
2863
+ *
2864
+ * This behaves like the Websocket library in every way, except if it fails to
2865
+ * connect or if it gets disconnected, it will try to reconnect depending on
2866
+ * the maximum number of reconnect attempts set. retry is not enabled for initial
2867
+ * connection. When initial connection fails it is best to check yourself before
2868
+ * you keep wreckin' yourself.
2869
+ *
2870
+ * It is API compatible, so when you have:
2871
+ * ws = new WebSocket('ws://....');
2872
+ * you can replace with:
2873
+ * ws = new ReconnectingWebSocket('ws://....');
2874
+ *
2875
+ * While it is API compatible with the NodeJS ws library, we provide the
2876
+ * following additional properties and events on the ReconnectingWebSocket.
2877
+ *
2878
+ * Events:
2879
+ *
2880
+ * connection-timeout
2881
+ * - Emitted when the web socket connection times out.
2882
+ *
2883
+ * reconnecting
2884
+ * - Emitted after a manual close of the web socket is done and before retrying
2885
+ * the connection.
2886
+ *
2887
+ * reconnect-failed
2888
+ * - Emitted when the number of connection attempts exceeds the set number of
2889
+ * reconnection attempts.
2890
+ *
2891
+ * keep-alive
2892
+ * - Emitted when the set keep alive interval elapses. This event may be used
2893
+ * to have ping pong keep-alive mechanism for web socket health.
2894
+ *
2895
+ * Properties:
2896
+ *
2897
+ * keepAliveTimeout
2898
+ * - The timeout for keep-alive. Default: 15000
2899
+ *
2900
+ * keepAliveInterval
2901
+ * - The interval at which to emit keep-alive event. Default: 5000
2902
+ *
2903
+ * shouldRetryFn
2904
+ * - A callback function which should return boolean to determine if a web
2905
+ * socket reconnection attempt should be made. When not set, connection is
2906
+ * always retried.
2907
+ *
2908
+ * connectionTimeout
2909
+ * - The timeout interval for considering whether the connection timed out.
2910
+ * Default: 20000 ms
2911
+ *
2912
+ * maxReconnectAttempts
2913
+ * - The maximum number of attempts to be made for reconnection. Default: 2
2914
+ *
2915
+ * reconnectInterval
2916
+ * - The interval to wait before attempting a reconnection. Default: 1000 ms
2917
+ */ var $158ad1a38fb85e0e$export$4f3d0ffd941ebefb = /** @class */ function(_super) {
2918
+ $158ad1a38fb85e0e$var$__extends(ReconnectingWebSocket, _super);
2919
+ function ReconnectingWebSocket(address, protocols, options) {
2920
+ if (options === void 0) options = {};
2921
+ var _a;
2922
+ var _this = _super.call(this) || this;
2923
+ if (!address) throw new Error("Need a valid WebSocket URL");
2924
+ _this._ws = null;
2925
+ _this._url = address;
2926
+ _this._protocols = protocols;
2927
+ _this._parseBlobToJson = (_a = options === null || options === void 0 ? void 0 : options.parseBlobToJson) !== null && _a !== void 0 ? _a : true;
2928
+ _this.init();
2929
+ return _this;
2930
+ }
2931
+ ReconnectingWebSocket.prototype.init = function() {
2932
+ this._keepAliveTimeout = $158ad1a38fb85e0e$var$KEEP_ALIVE_TIMEOUT;
2933
+ this._keepAliveInterval = $158ad1a38fb85e0e$var$KEEP_ALIVE_INTERVAL;
2934
+ this._disconnected = false;
2935
+ this._keepIntervalID = null;
2936
+ this._shouldRetryFn = null;
2937
+ this._connectionTimeout = $158ad1a38fb85e0e$var$WEBSOCKET_CONNECTION_TIMEOUT;
2938
+ this._reconnectAttempts = 0;
2939
+ this._allowedReconnectAttempts = $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_ATTEMPTS;
2940
+ this._reconnectInterval = $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_INTERVAL;
2941
+ this._maxReconnectInterval = $158ad1a38fb85e0e$var$MAX_RECONNECT_INTERVAL;
2942
+ this._reconnectDecay = $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_DECAY;
2943
+ };
2944
+ ReconnectingWebSocket.prototype.connect = function() {
2945
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
2946
+ var _this = this;
2947
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
2948
+ return [
2949
+ 2 /*return*/ ,
2950
+ new Promise(function(resolve, reject) {
2951
+ _this._disconnected = false;
2952
+ _this.clearReconnectTimeout();
2953
+ var ws = new $158ad1a38fb85e0e$var$rWebSocket(_this._url, _this._protocols);
2954
+ _this.setConnectionTimeout();
2955
+ ws.addEventListener("close", function(evt) {
2956
+ var closeEvent = evt;
2957
+ var code = ws._timed_out ? $158ad1a38fb85e0e$var$WEBSOCKET_TIMEOUT_CODE : closeEvent.code;
2958
+ var reason = ws._timed_out ? "websocket connection timed out" : closeEvent.reason;
2959
+ ws._timed_out = false;
2960
+ if (!ws._closedManually && ws._initialConnectionOk) {
2961
+ console.warn("signaling socket closed unexpectedly: ".concat(code).concat(reason ? " " + reason : ""));
2962
+ _this._closeSocket();
2963
+ _this.emit("close", code, reason);
2964
+ } else _this.log("signaling socket closed");
2965
+ if (!ws._closedManually && (ws._errored || ws._timed_out)) {
2966
+ console.warn("signaling socket closed on error: ".concat(code).concat(reason ? " " + reason : ""));
2967
+ if (!ws._rejected) {
2968
+ ws._rejected = true;
2969
+ var err = new Error("WebSocket connection error (".concat(code, "): ").concat(reason));
2970
+ err.name = $158ad1a38fb85e0e$var$WEBSOCKET_ERROR;
2971
+ reject(err);
2972
+ }
2973
+ }
2974
+ });
2975
+ ws.addEventListener("open", function(evt) {
2976
+ _this.log("wss connection opened to", $158ad1a38fb85e0e$var$LOG_LEVEL.DEBUG, _this._url);
2977
+ _this.clearConnectionTimeout();
2978
+ // now that the timeout closes the socket, in theory this onopen
2979
+ // callback should never happen in the first place, but seems
2980
+ // harmless to leave these safeguards in
2981
+ if (ws._rejected || ws._timed_out) return;
2982
+ if (ws._closedManually || _this._ws && _this._ws !== ws) {
2983
+ ws._rejected = true;
2984
+ ws.close();
2985
+ var err = Error("wss connection interrupted by disconnect or newer connection");
2986
+ err.name = $158ad1a38fb85e0e$var$SIG_CONNECTION_CANCELED;
2987
+ reject(err);
2988
+ return;
2989
+ }
2990
+ ws._initialConnectionOk = _this._url;
2991
+ _this._lastMsgRecvTime = Date.now();
2992
+ if (_this._keepAliveInterval) _this._keepIntervalID = setInterval(function() {
2993
+ return _this.checkSocketHealthAndSendKeepAlive();
2994
+ }, _this._keepAliveInterval);
2995
+ _this._ws = ws;
2996
+ _this.emit("open");
2997
+ resolve(ws);
2998
+ });
2999
+ ws.addEventListener("error", function(evt) {
3000
+ // fyi: evt is an Event here, with 0 amount of helpful info. If there
3001
+ // happens to be info about the error, it's included in the
3002
+ // accompanying close event (because that make sense. shakes head)
3003
+ // SO. We do not reject here. Instead, we just set the _errored
3004
+ // flag on the socket so when the close event occurs, it knows to
3005
+ // reject the promise
3006
+ if (!ws._closedManually) {
3007
+ var wsTarget = evt.currentTarget;
3008
+ _this.log("websocket error event: ".concat(wsTarget === null || wsTarget === void 0 ? void 0 : wsTarget.url));
3009
+ }
3010
+ ws._errored = true;
3011
+ });
3012
+ ws.addEventListener("message", function(msg) {
3013
+ _this._handleMessage(msg);
3014
+ });
3015
+ })
3016
+ ];
3017
+ });
3018
+ });
3019
+ };
3020
+ ReconnectingWebSocket.prototype.setConnectionTimeout = function() {
3021
+ var _this = this;
3022
+ this._connectionTimeoutID = setTimeout(function() {
3023
+ return $158ad1a38fb85e0e$var$__awaiter(_this, void 0, void 0, function() {
3024
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
3025
+ switch(_a.label){
3026
+ case 0:
3027
+ this.log("Connection reconnect attempt timed out.");
3028
+ this.emit("connection-timeout");
3029
+ this.clearConnectionTimeout();
3030
+ return [
3031
+ 4 /*yield*/ ,
3032
+ this._closeSocket()
3033
+ ];
3034
+ case 1:
3035
+ _a.sent();
3036
+ return [
3037
+ 2 /*return*/
3038
+ ];
3039
+ }
3040
+ });
3041
+ });
3042
+ }, this._connectionTimeout);
3043
+ };
3044
+ ReconnectingWebSocket.prototype.clearConnectionTimeout = function() {
3045
+ clearTimeout(this._connectionTimeoutID);
3046
+ this._connectionTimeoutID = undefined;
3047
+ };
3048
+ ReconnectingWebSocket.prototype.clearReconnectTimeout = function() {
3049
+ clearTimeout(this._reconnectTimeoutID);
3050
+ this._reconnectTimeoutID = undefined;
3051
+ };
3052
+ ReconnectingWebSocket.prototype.clearKeepAliveInterval = function() {
3053
+ if (this._keepIntervalID) {
3054
+ clearInterval(this._keepIntervalID);
3055
+ this._keepIntervalID = null;
3056
+ }
3057
+ };
3058
+ ReconnectingWebSocket.prototype.checkSocketHealthAndSendKeepAlive = function() {
3059
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
3060
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
3061
+ switch(_a.label){
3062
+ case 0:
3063
+ if (!(this._ws && this._ws.readyState === WebSocket.OPEN)) return [
3064
+ 2 /*return*/
3065
+ ];
3066
+ if (!this._keepAliveTimeout || !this._keepAliveInterval) return [
3067
+ 2 /*return*/
3068
+ ];
3069
+ if (!(Date.now() - this._lastMsgRecvTime > this._keepAliveTimeout)) return [
3070
+ 3 /*break*/ ,
3071
+ 2
3072
+ ];
3073
+ this.log("Connection is stale, need to reconnect", $158ad1a38fb85e0e$var$LOG_LEVEL.WARN);
3074
+ return [
3075
+ 4 /*yield*/ ,
3076
+ this._closeSocket()
3077
+ ];
3078
+ case 1:
3079
+ _a.sent();
3080
+ return [
3081
+ 2 /*return*/
3082
+ ];
3083
+ case 2:
3084
+ // Only emit the keep-alive event if we haven't sent anything else recently
3085
+ if (Date.now() - this._lastMsgSendTime < this._keepAliveInterval) return [
3086
+ 2 /*return*/
3087
+ ];
3088
+ this.log("Emitting keep-alive", $158ad1a38fb85e0e$var$LOG_LEVEL.DEBUG);
3089
+ this.emit("keep-alive");
3090
+ return [
3091
+ 2 /*return*/
3092
+ ];
3093
+ }
3094
+ });
3095
+ });
3096
+ };
3097
+ // We use the word manually here to imply the application using this code
3098
+ // or this code itself will decide to close the socket.
3099
+ ReconnectingWebSocket.prototype._closeSocket = function() {
3100
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
3101
+ var shouldRetry, error_1;
3102
+ var _a;
3103
+ return $158ad1a38fb85e0e$var$__generator(this, function(_b) {
3104
+ switch(_b.label){
3105
+ case 0:
3106
+ this.log("Closing");
3107
+ _b.label = 1;
3108
+ case 1:
3109
+ _b.trys.push([
3110
+ 1,
3111
+ 4,
3112
+ ,
3113
+ 5
3114
+ ]);
3115
+ this.clearKeepAliveInterval();
3116
+ this._lastMsgRecvTime = 0;
3117
+ if (this._ws) {
3118
+ this._ws._closedManually = true;
3119
+ this._ws.close();
3120
+ }
3121
+ shouldRetry = ((_a = this._ws) === null || _a === void 0 ? void 0 : _a._initialConnectionOk) && this._shouldRetryFn && this._shouldRetryFn();
3122
+ this._ws = null;
3123
+ if (!shouldRetry) return [
3124
+ 3 /*break*/ ,
3125
+ 3
3126
+ ];
3127
+ this.log("Emitting reconnect", $158ad1a38fb85e0e$var$LOG_LEVEL.DEBUG);
3128
+ this.emit("reconnecting");
3129
+ return [
3130
+ 4 /*yield*/ ,
3131
+ this.retryFailedConnection()
3132
+ ];
3133
+ case 2:
3134
+ _b.sent();
3135
+ _b.label = 3;
3136
+ case 3:
3137
+ return [
3138
+ 3 /*break*/ ,
3139
+ 5
3140
+ ];
3141
+ case 4:
3142
+ error_1 = _b.sent();
3143
+ this.log("Error while closing and retrying: ".concat(error_1), $158ad1a38fb85e0e$var$LOG_LEVEL.ERROR);
3144
+ return [
3145
+ 3 /*break*/ ,
3146
+ 5
3147
+ ];
3148
+ case 5:
3149
+ return [
3150
+ 2 /*return*/
3151
+ ];
3152
+ }
3153
+ });
3154
+ });
3155
+ };
3156
+ ReconnectingWebSocket.prototype.retryFailedConnection = function() {
3157
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
3158
+ var timeout;
3159
+ var _this = this;
3160
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
3161
+ if (this._reconnectAttempts < this._allowedReconnectAttempts) {
3162
+ if (this._reconnectTimeoutID) {
3163
+ this.log("Retry already scheduled");
3164
+ return [
3165
+ 2 /*return*/
3166
+ ];
3167
+ }
3168
+ this.log("Retrying failed connection");
3169
+ timeout = // The timeout logic is taken from
3170
+ // https://github.com/joewalnes/reconnecting-websocket
3171
+ this._reconnectInterval * Math.pow(this._reconnectDecay, this._reconnectAttempts);
3172
+ timeout = timeout > this._maxReconnectInterval ? this._maxReconnectInterval : timeout;
3173
+ this.log("Reconnecting in ".concat(timeout / 1000, " seconds"));
3174
+ this._reconnectAttempts += 1;
3175
+ this._reconnectTimeoutID = setTimeout(function() {
3176
+ return _this.connect();
3177
+ }, timeout);
3178
+ } else {
3179
+ this.log("Maximum connection retry attempts exceeded", $158ad1a38fb85e0e$var$LOG_LEVEL.ERROR);
3180
+ this.emit("reconnect-failed");
3181
+ }
3182
+ return [
3183
+ 2 /*return*/
3184
+ ];
3185
+ });
3186
+ });
3187
+ };
3188
+ ReconnectingWebSocket.prototype.log = function(msg, log_level) {
3189
+ if (log_level === void 0) log_level = $158ad1a38fb85e0e$var$LOG_LEVEL.DEBUG;
3190
+ var args = [];
3191
+ for(var _i = 2; _i < arguments.length; _i++)args[_i - 2] = arguments[_i];
3192
+ switch(log_level){
3193
+ case $158ad1a38fb85e0e$var$LOG_LEVEL.DEBUG:
3194
+ console.debug.apply(console, $158ad1a38fb85e0e$var$__spreadArray([
3195
+ "websocket: ".concat(msg)
3196
+ ], args, false));
3197
+ break;
3198
+ case $158ad1a38fb85e0e$var$LOG_LEVEL.ERROR:
3199
+ console.error.apply(console, $158ad1a38fb85e0e$var$__spreadArray([
3200
+ "websocket: ".concat(msg)
3201
+ ], args, false));
3202
+ break;
3203
+ case $158ad1a38fb85e0e$var$LOG_LEVEL.WARN:
3204
+ console.warn.apply(console, $158ad1a38fb85e0e$var$__spreadArray([
3205
+ "websocket: ".concat(msg)
3206
+ ], args, false));
3207
+ break;
3208
+ case $158ad1a38fb85e0e$var$LOG_LEVEL.INFO:
3209
+ default:
3210
+ console.log.apply(console, $158ad1a38fb85e0e$var$__spreadArray([
3211
+ "websocket: ".concat(msg)
3212
+ ], args, false));
3213
+ break;
3214
+ }
3215
+ };
3216
+ ReconnectingWebSocket.prototype.send = function(data) {
3217
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
3218
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
3219
+ try {
3220
+ if (this._ws && this._ws.readyState === WebSocket.OPEN) {
3221
+ this._lastMsgSendTime = Date.now();
3222
+ this._ws.send(data);
3223
+ } else this.log("Failed to send data, web socket not open.", $158ad1a38fb85e0e$var$LOG_LEVEL.ERROR);
3224
+ } catch (error) {
3225
+ this.log("Failed to send data. ".concat(error), $158ad1a38fb85e0e$var$LOG_LEVEL.ERROR);
3226
+ }
3227
+ return [
3228
+ 2 /*return*/
3229
+ ];
3230
+ });
3231
+ });
3232
+ };
3233
+ ReconnectingWebSocket.prototype.close = function() {
3234
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
3235
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
3236
+ try {
3237
+ this.log("Closing websocket");
3238
+ this._disconnected = true;
3239
+ this.clearReconnectTimeout();
3240
+ this._closeSocket();
3241
+ } catch (error) {
3242
+ this.log("Failed to close websocket. ".concat(error));
3243
+ }
3244
+ return [
3245
+ 2 /*return*/
3246
+ ];
3247
+ });
3248
+ });
3249
+ };
3250
+ Object.defineProperty(ReconnectingWebSocket.prototype, "readyState", {
3251
+ get: function() {
3252
+ var _a, _b;
3253
+ return (_b = (_a = this._ws) === null || _a === void 0 ? void 0 : _a.readyState) !== null && _b !== void 0 ? _b : WebSocket.CLOSED;
3254
+ },
3255
+ enumerable: false,
3256
+ configurable: true
3257
+ });
3258
+ Object.defineProperty(ReconnectingWebSocket.prototype, "url", {
3259
+ get: function() {
3260
+ return this._url;
3261
+ },
3262
+ enumerable: false,
3263
+ configurable: true
3264
+ });
3265
+ Object.defineProperty(ReconnectingWebSocket.prototype, "keepAliveTimeout", {
3266
+ get: function() {
3267
+ return this._keepAliveTimeout;
3268
+ },
3269
+ set: function(keepAliveTimeout) {
3270
+ if (typeof keepAliveTimeout === "number") {
3271
+ this.log("Setting ACK freshness timeout to ".concat(keepAliveTimeout));
3272
+ this._keepAliveTimeout = keepAliveTimeout;
3273
+ }
3274
+ },
3275
+ enumerable: false,
3276
+ configurable: true
3277
+ });
3278
+ Object.defineProperty(ReconnectingWebSocket.prototype, "keepAliveInterval", {
3279
+ get: function() {
3280
+ return this._keepAliveInterval;
3281
+ },
3282
+ set: function(keepAliveInterval) {
3283
+ if (typeof keepAliveInterval === "number") {
3284
+ this.log("Setting keep-alive interval to ".concat(keepAliveInterval));
3285
+ this._keepAliveInterval = keepAliveInterval;
3286
+ }
3287
+ },
3288
+ enumerable: false,
3289
+ configurable: true
3290
+ });
3291
+ Object.defineProperty(ReconnectingWebSocket.prototype, "shouldRetryFn", {
3292
+ set: function(cb) {
3293
+ if (typeof cb === "function") this._shouldRetryFn = cb;
3294
+ },
3295
+ enumerable: false,
3296
+ configurable: true
3297
+ });
3298
+ Object.defineProperty(ReconnectingWebSocket.prototype, "connectionTimeout", {
3299
+ get: function() {
3300
+ return this._connectionTimeout;
3301
+ },
3302
+ set: function(timeout) {
3303
+ if (typeof timeout === "number") this._connectionTimeout = timeout;
3304
+ },
3305
+ enumerable: false,
3306
+ configurable: true
3307
+ });
3308
+ Object.defineProperty(ReconnectingWebSocket.prototype, "maxReconnectAttempts", {
3309
+ get: function() {
3310
+ return this._allowedReconnectAttempts;
3311
+ },
3312
+ set: function(attempts) {
3313
+ if (attempts > 0 && attempts < $158ad1a38fb85e0e$var$MAX_RECONNECT_ATTEMPTS) {
3314
+ this.log("Setting maximum connection retry attempts to ".concat(attempts));
3315
+ this._allowedReconnectAttempts = attempts;
3316
+ } else this._allowedReconnectAttempts = $158ad1a38fb85e0e$var$DEFAULT_RECONNECT_ATTEMPTS;
3317
+ },
3318
+ enumerable: false,
3319
+ configurable: true
3320
+ });
3321
+ Object.defineProperty(ReconnectingWebSocket.prototype, "reconnectInterval", {
3322
+ get: function() {
3323
+ return this._reconnectInterval;
3324
+ },
3325
+ set: function(interval) {
3326
+ if (typeof interval === "number") this._reconnectInterval = interval < this._maxReconnectInterval ? interval : this._maxReconnectInterval;
3327
+ },
3328
+ enumerable: false,
3329
+ configurable: true
3330
+ });
3331
+ ReconnectingWebSocket.prototype._handleMessage = function(event) {
3332
+ return $158ad1a38fb85e0e$var$__awaiter(this, void 0, void 0, function() {
3333
+ var data, _parsePromise, msg;
3334
+ var _this = this;
3335
+ return $158ad1a38fb85e0e$var$__generator(this, function(_a) {
3336
+ switch(_a.label){
3337
+ case 0:
3338
+ this._lastMsgRecvTime = Date.now();
3339
+ data = event.data;
3340
+ _parsePromise = new Promise(function(resolve, reject) {
3341
+ if (typeof data === "string") // Handle text message
3342
+ resolve(data);
3343
+ else if (data instanceof ArrayBuffer) {
3344
+ // Handle binary message
3345
+ var arrayBuffer = data;
3346
+ // Parse the ArrayBuffer as needed
3347
+ // Example: Convert ArrayBuffer to Uint8Array
3348
+ resolve(new Uint8Array(arrayBuffer));
3349
+ // Process the Uint8Array as needed
3350
+ } else if (data instanceof Blob) {
3351
+ if (!_this._parseBlobToJson) {
3352
+ resolve(data);
3353
+ return;
3354
+ }
3355
+ // Handle Blob message
3356
+ var blob = data;
3357
+ // Convert Blob to ArrayBuffer
3358
+ var reader_1 = new FileReader();
3359
+ reader_1.onload = function() {
3360
+ var text = reader_1.result;
3361
+ try {
3362
+ var json = JSON.parse(text);
3363
+ resolve(json);
3364
+ } catch (e) {
3365
+ console.error("Failed to parse JSON from Blob:", e);
3366
+ }
3367
+ };
3368
+ reader_1.readAsText(blob);
3369
+ }
3370
+ });
3371
+ return [
3372
+ 4 /*yield*/ ,
3373
+ _parsePromise
3374
+ ];
3375
+ case 1:
3376
+ msg = _a.sent();
3377
+ this.emit("message", msg);
3378
+ return [
3379
+ 2 /*return*/
3380
+ ];
3381
+ }
3382
+ });
3383
+ });
3384
+ };
3385
+ return ReconnectingWebSocket;
3386
+ }((0, $fkNis$events.EventEmitter));
3387
+ [
3388
+ "binaryType",
3389
+ "bufferedAmount",
3390
+ "extensions",
3391
+ "protocol",
3392
+ "readyState",
3393
+ "url",
3394
+ "keepAliveTimeout",
3395
+ "keepAliveInterval",
3396
+ "shouldRetryFn",
3397
+ "connectionTimeout",
3398
+ "maxReconnectAttempts",
3399
+ "reconnectInterval"
3400
+ ].forEach(function(property) {
3401
+ Object.defineProperty($158ad1a38fb85e0e$export$4f3d0ffd941ebefb.prototype, property, {
3402
+ enumerable: true
3403
+ });
3404
+ });
3405
+ [
3406
+ "CONNECTING",
3407
+ "OPEN",
3408
+ "CLOSING",
3409
+ "CLOSED"
3410
+ ].forEach(function(property) {
3411
+ Object.defineProperty($158ad1a38fb85e0e$export$4f3d0ffd941ebefb.prototype, property, {
3412
+ enumerable: true,
3413
+ value: $158ad1a38fb85e0e$var$readyStates.indexOf(property)
3414
+ });
3415
+ });
3416
+ [
3417
+ "CONNECTING",
3418
+ "OPEN",
3419
+ "CLOSING",
3420
+ "CLOSED"
3421
+ ].forEach(function(property) {
3422
+ Object.defineProperty($158ad1a38fb85e0e$export$4f3d0ffd941ebefb, property, {
3423
+ enumerable: true,
3424
+ value: $158ad1a38fb85e0e$var$readyStates.indexOf(property)
3425
+ });
3426
+ });
3427
+
3428
+
3429
+
3430
+
3431
+ // @generated message type with reflection information, may provide speed optimized methods
3432
+ class $6abb1f384118d238$var$TextFrame$Type extends (0, $fkNis$protobuftsruntime.MessageType) {
3433
+ constructor(){
3434
+ super("pipecat.TextFrame", [
3435
+ {
3436
+ no: 1,
3437
+ name: "id",
3438
+ kind: "scalar",
3439
+ T: 4 /*ScalarType.UINT64*/ ,
3440
+ L: 0 /*LongType.BIGINT*/
3441
+ },
3442
+ {
3443
+ no: 2,
3444
+ name: "name",
3445
+ kind: "scalar",
3446
+ T: 9 /*ScalarType.STRING*/
3447
+ },
3448
+ {
3449
+ no: 3,
3450
+ name: "text",
3451
+ kind: "scalar",
3452
+ T: 9 /*ScalarType.STRING*/
3453
+ }
3454
+ ]);
3455
+ }
3456
+ create(value) {
3457
+ const message = globalThis.Object.create(this.messagePrototype);
3458
+ message.id = 0n;
3459
+ message.name = "";
3460
+ message.text = "";
3461
+ if (value !== undefined) (0, $fkNis$protobuftsruntime.reflectionMergePartial)(this, message, value);
3462
+ return message;
3463
+ }
3464
+ internalBinaryRead(reader, length, options, target) {
3465
+ let message = target ?? this.create(), end = reader.pos + length;
3466
+ while(reader.pos < end){
3467
+ let [fieldNo, wireType] = reader.tag();
3468
+ switch(fieldNo){
3469
+ case /* uint64 id */ 1:
3470
+ message.id = reader.uint64().toBigInt();
3471
+ break;
3472
+ case /* string name */ 2:
3473
+ message.name = reader.string();
3474
+ break;
3475
+ case /* string text */ 3:
3476
+ message.text = reader.string();
3477
+ break;
3478
+ default:
3479
+ let u = options.readUnknownField;
3480
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3481
+ let d = reader.skip(wireType);
3482
+ if (u !== false) (u === true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3483
+ }
3484
+ }
3485
+ return message;
3486
+ }
3487
+ internalBinaryWrite(message, writer, options) {
3488
+ /* uint64 id = 1; */ if (message.id !== 0n) writer.tag(1, (0, $fkNis$protobuftsruntime.WireType).Varint).uint64(message.id);
3489
+ /* string name = 2; */ if (message.name !== "") writer.tag(2, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.name);
3490
+ /* string text = 3; */ if (message.text !== "") writer.tag(3, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.text);
3491
+ let u = options.writeUnknownFields;
3492
+ if (u !== false) (u == true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3493
+ return writer;
3494
+ }
3495
+ }
3496
+ const $6abb1f384118d238$export$78410ada03f6931b = new $6abb1f384118d238$var$TextFrame$Type();
3497
+ // @generated message type with reflection information, may provide speed optimized methods
3498
+ class $6abb1f384118d238$var$AudioRawFrame$Type extends (0, $fkNis$protobuftsruntime.MessageType) {
3499
+ constructor(){
3500
+ super("pipecat.AudioRawFrame", [
3501
+ {
3502
+ no: 1,
3503
+ name: "id",
3504
+ kind: "scalar",
3505
+ T: 4 /*ScalarType.UINT64*/ ,
3506
+ L: 0 /*LongType.BIGINT*/
3507
+ },
3508
+ {
3509
+ no: 2,
3510
+ name: "name",
3511
+ kind: "scalar",
3512
+ T: 9 /*ScalarType.STRING*/
3513
+ },
3514
+ {
3515
+ no: 3,
3516
+ name: "audio",
3517
+ kind: "scalar",
3518
+ T: 12 /*ScalarType.BYTES*/
3519
+ },
3520
+ {
3521
+ no: 4,
3522
+ name: "sample_rate",
3523
+ kind: "scalar",
3524
+ T: 13 /*ScalarType.UINT32*/
3525
+ },
3526
+ {
3527
+ no: 5,
3528
+ name: "num_channels",
3529
+ kind: "scalar",
3530
+ T: 13 /*ScalarType.UINT32*/
3531
+ },
3532
+ {
3533
+ no: 6,
3534
+ name: "pts",
3535
+ kind: "scalar",
3536
+ opt: true,
3537
+ T: 4 /*ScalarType.UINT64*/ ,
3538
+ L: 0 /*LongType.BIGINT*/
3539
+ }
3540
+ ]);
3541
+ }
3542
+ create(value) {
3543
+ const message = globalThis.Object.create(this.messagePrototype);
3544
+ message.id = 0n;
3545
+ message.name = "";
3546
+ message.audio = new Uint8Array(0);
3547
+ message.sampleRate = 0;
3548
+ message.numChannels = 0;
3549
+ if (value !== undefined) (0, $fkNis$protobuftsruntime.reflectionMergePartial)(this, message, value);
3550
+ return message;
3551
+ }
3552
+ internalBinaryRead(reader, length, options, target) {
3553
+ let message = target ?? this.create(), end = reader.pos + length;
3554
+ while(reader.pos < end){
3555
+ let [fieldNo, wireType] = reader.tag();
3556
+ switch(fieldNo){
3557
+ case /* uint64 id */ 1:
3558
+ message.id = reader.uint64().toBigInt();
3559
+ break;
3560
+ case /* string name */ 2:
3561
+ message.name = reader.string();
3562
+ break;
3563
+ case /* bytes audio */ 3:
3564
+ message.audio = reader.bytes();
3565
+ break;
3566
+ case /* uint32 sample_rate */ 4:
3567
+ message.sampleRate = reader.uint32();
3568
+ break;
3569
+ case /* uint32 num_channels */ 5:
3570
+ message.numChannels = reader.uint32();
3571
+ break;
3572
+ case /* optional uint64 pts */ 6:
3573
+ message.pts = reader.uint64().toBigInt();
3574
+ break;
3575
+ default:
3576
+ let u = options.readUnknownField;
3577
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3578
+ let d = reader.skip(wireType);
3579
+ if (u !== false) (u === true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3580
+ }
3581
+ }
3582
+ return message;
3583
+ }
3584
+ internalBinaryWrite(message, writer, options) {
3585
+ /* uint64 id = 1; */ if (message.id !== 0n) writer.tag(1, (0, $fkNis$protobuftsruntime.WireType).Varint).uint64(message.id);
3586
+ /* string name = 2; */ if (message.name !== "") writer.tag(2, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.name);
3587
+ /* bytes audio = 3; */ if (message.audio.length) writer.tag(3, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).bytes(message.audio);
3588
+ /* uint32 sample_rate = 4; */ if (message.sampleRate !== 0) writer.tag(4, (0, $fkNis$protobuftsruntime.WireType).Varint).uint32(message.sampleRate);
3589
+ /* uint32 num_channels = 5; */ if (message.numChannels !== 0) writer.tag(5, (0, $fkNis$protobuftsruntime.WireType).Varint).uint32(message.numChannels);
3590
+ /* optional uint64 pts = 6; */ if (message.pts !== undefined) writer.tag(6, (0, $fkNis$protobuftsruntime.WireType).Varint).uint64(message.pts);
3591
+ let u = options.writeUnknownFields;
3592
+ if (u !== false) (u == true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3593
+ return writer;
3594
+ }
3595
+ }
3596
+ const $6abb1f384118d238$export$51d8721de3cbff8f = new $6abb1f384118d238$var$AudioRawFrame$Type();
3597
+ // @generated message type with reflection information, may provide speed optimized methods
3598
+ class $6abb1f384118d238$var$TranscriptionFrame$Type extends (0, $fkNis$protobuftsruntime.MessageType) {
3599
+ constructor(){
3600
+ super("pipecat.TranscriptionFrame", [
3601
+ {
3602
+ no: 1,
3603
+ name: "id",
3604
+ kind: "scalar",
3605
+ T: 4 /*ScalarType.UINT64*/ ,
3606
+ L: 0 /*LongType.BIGINT*/
3607
+ },
3608
+ {
3609
+ no: 2,
3610
+ name: "name",
3611
+ kind: "scalar",
3612
+ T: 9 /*ScalarType.STRING*/
3613
+ },
3614
+ {
3615
+ no: 3,
3616
+ name: "text",
3617
+ kind: "scalar",
3618
+ T: 9 /*ScalarType.STRING*/
3619
+ },
3620
+ {
3621
+ no: 4,
3622
+ name: "user_id",
3623
+ kind: "scalar",
3624
+ T: 9 /*ScalarType.STRING*/
3625
+ },
3626
+ {
3627
+ no: 5,
3628
+ name: "timestamp",
3629
+ kind: "scalar",
3630
+ T: 9 /*ScalarType.STRING*/
3631
+ }
3632
+ ]);
3633
+ }
3634
+ create(value) {
3635
+ const message = globalThis.Object.create(this.messagePrototype);
3636
+ message.id = 0n;
3637
+ message.name = "";
3638
+ message.text = "";
3639
+ message.userId = "";
3640
+ message.timestamp = "";
3641
+ if (value !== undefined) (0, $fkNis$protobuftsruntime.reflectionMergePartial)(this, message, value);
3642
+ return message;
3643
+ }
3644
+ internalBinaryRead(reader, length, options, target) {
3645
+ let message = target ?? this.create(), end = reader.pos + length;
3646
+ while(reader.pos < end){
3647
+ let [fieldNo, wireType] = reader.tag();
3648
+ switch(fieldNo){
3649
+ case /* uint64 id */ 1:
3650
+ message.id = reader.uint64().toBigInt();
3651
+ break;
3652
+ case /* string name */ 2:
3653
+ message.name = reader.string();
3654
+ break;
3655
+ case /* string text */ 3:
3656
+ message.text = reader.string();
3657
+ break;
3658
+ case /* string user_id */ 4:
3659
+ message.userId = reader.string();
3660
+ break;
3661
+ case /* string timestamp */ 5:
3662
+ message.timestamp = reader.string();
3663
+ break;
3664
+ default:
3665
+ let u = options.readUnknownField;
3666
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3667
+ let d = reader.skip(wireType);
3668
+ if (u !== false) (u === true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3669
+ }
3670
+ }
3671
+ return message;
3672
+ }
3673
+ internalBinaryWrite(message, writer, options) {
3674
+ /* uint64 id = 1; */ if (message.id !== 0n) writer.tag(1, (0, $fkNis$protobuftsruntime.WireType).Varint).uint64(message.id);
3675
+ /* string name = 2; */ if (message.name !== "") writer.tag(2, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.name);
3676
+ /* string text = 3; */ if (message.text !== "") writer.tag(3, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.text);
3677
+ /* string user_id = 4; */ if (message.userId !== "") writer.tag(4, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.userId);
3678
+ /* string timestamp = 5; */ if (message.timestamp !== "") writer.tag(5, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.timestamp);
3679
+ let u = options.writeUnknownFields;
3680
+ if (u !== false) (u == true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3681
+ return writer;
3682
+ }
3683
+ }
3684
+ const $6abb1f384118d238$export$10b388c15a5cdc8a = new $6abb1f384118d238$var$TranscriptionFrame$Type();
3685
+ // @generated message type with reflection information, may provide speed optimized methods
3686
+ class $6abb1f384118d238$var$MessageFrame$Type extends (0, $fkNis$protobuftsruntime.MessageType) {
3687
+ constructor(){
3688
+ super("pipecat.MessageFrame", [
3689
+ {
3690
+ no: 1,
3691
+ name: "data",
3692
+ kind: "scalar",
3693
+ T: 9 /*ScalarType.STRING*/
3694
+ }
3695
+ ]);
3696
+ }
3697
+ create(value) {
3698
+ const message = globalThis.Object.create(this.messagePrototype);
3699
+ message.data = "";
3700
+ if (value !== undefined) (0, $fkNis$protobuftsruntime.reflectionMergePartial)(this, message, value);
3701
+ return message;
3702
+ }
3703
+ internalBinaryRead(reader, length, options, target) {
3704
+ let message = target ?? this.create(), end = reader.pos + length;
3705
+ while(reader.pos < end){
3706
+ let [fieldNo, wireType] = reader.tag();
3707
+ switch(fieldNo){
3708
+ case /* string data */ 1:
3709
+ message.data = reader.string();
3710
+ break;
3711
+ default:
3712
+ let u = options.readUnknownField;
3713
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3714
+ let d = reader.skip(wireType);
3715
+ if (u !== false) (u === true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3716
+ }
3717
+ }
3718
+ return message;
3719
+ }
3720
+ internalBinaryWrite(message, writer, options) {
3721
+ /* string data = 1; */ if (message.data !== "") writer.tag(1, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).string(message.data);
3722
+ let u = options.writeUnknownFields;
3723
+ if (u !== false) (u == true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3724
+ return writer;
3725
+ }
3726
+ }
3727
+ const $6abb1f384118d238$export$bc3f45a6d434f14a = new $6abb1f384118d238$var$MessageFrame$Type();
3728
+ // @generated message type with reflection information, may provide speed optimized methods
3729
+ class $6abb1f384118d238$var$Frame$Type extends (0, $fkNis$protobuftsruntime.MessageType) {
3730
+ constructor(){
3731
+ super("pipecat.Frame", [
3732
+ {
3733
+ no: 1,
3734
+ name: "text",
3735
+ kind: "message",
3736
+ oneof: "frame",
3737
+ T: ()=>$6abb1f384118d238$export$78410ada03f6931b
3738
+ },
3739
+ {
3740
+ no: 2,
3741
+ name: "audio",
3742
+ kind: "message",
3743
+ oneof: "frame",
3744
+ T: ()=>$6abb1f384118d238$export$51d8721de3cbff8f
3745
+ },
3746
+ {
3747
+ no: 3,
3748
+ name: "transcription",
3749
+ kind: "message",
3750
+ oneof: "frame",
3751
+ T: ()=>$6abb1f384118d238$export$10b388c15a5cdc8a
3752
+ },
3753
+ {
3754
+ no: 4,
3755
+ name: "message",
3756
+ kind: "message",
3757
+ oneof: "frame",
3758
+ T: ()=>$6abb1f384118d238$export$bc3f45a6d434f14a
3759
+ }
3760
+ ]);
3761
+ }
3762
+ create(value) {
3763
+ const message = globalThis.Object.create(this.messagePrototype);
3764
+ message.frame = {
3765
+ oneofKind: undefined
3766
+ };
3767
+ if (value !== undefined) (0, $fkNis$protobuftsruntime.reflectionMergePartial)(this, message, value);
3768
+ return message;
3769
+ }
3770
+ internalBinaryRead(reader, length, options, target) {
3771
+ let message = target ?? this.create(), end = reader.pos + length;
3772
+ while(reader.pos < end){
3773
+ let [fieldNo, wireType] = reader.tag();
3774
+ switch(fieldNo){
3775
+ case /* pipecat.TextFrame text */ 1:
3776
+ message.frame = {
3777
+ oneofKind: "text",
3778
+ text: $6abb1f384118d238$export$78410ada03f6931b.internalBinaryRead(reader, reader.uint32(), options, message.frame.text)
3779
+ };
3780
+ break;
3781
+ case /* pipecat.AudioRawFrame audio */ 2:
3782
+ message.frame = {
3783
+ oneofKind: "audio",
3784
+ audio: $6abb1f384118d238$export$51d8721de3cbff8f.internalBinaryRead(reader, reader.uint32(), options, message.frame.audio)
3785
+ };
3786
+ break;
3787
+ case /* pipecat.TranscriptionFrame transcription */ 3:
3788
+ message.frame = {
3789
+ oneofKind: "transcription",
3790
+ transcription: $6abb1f384118d238$export$10b388c15a5cdc8a.internalBinaryRead(reader, reader.uint32(), options, message.frame.transcription)
3791
+ };
3792
+ break;
3793
+ case /* pipecat.MessageFrame message */ 4:
3794
+ message.frame = {
3795
+ oneofKind: "message",
3796
+ message: $6abb1f384118d238$export$bc3f45a6d434f14a.internalBinaryRead(reader, reader.uint32(), options, message.frame.message)
3797
+ };
3798
+ break;
3799
+ default:
3800
+ let u = options.readUnknownField;
3801
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3802
+ let d = reader.skip(wireType);
3803
+ if (u !== false) (u === true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3804
+ }
3805
+ }
3806
+ return message;
3807
+ }
3808
+ internalBinaryWrite(message, writer, options) {
3809
+ /* pipecat.TextFrame text = 1; */ if (message.frame.oneofKind === "text") $6abb1f384118d238$export$78410ada03f6931b.internalBinaryWrite(message.frame.text, writer.tag(1, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).fork(), options).join();
3810
+ /* pipecat.AudioRawFrame audio = 2; */ if (message.frame.oneofKind === "audio") $6abb1f384118d238$export$51d8721de3cbff8f.internalBinaryWrite(message.frame.audio, writer.tag(2, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).fork(), options).join();
3811
+ /* pipecat.TranscriptionFrame transcription = 3; */ if (message.frame.oneofKind === "transcription") $6abb1f384118d238$export$10b388c15a5cdc8a.internalBinaryWrite(message.frame.transcription, writer.tag(3, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).fork(), options).join();
3812
+ /* pipecat.MessageFrame message = 4; */ if (message.frame.oneofKind === "message") $6abb1f384118d238$export$bc3f45a6d434f14a.internalBinaryWrite(message.frame.message, writer.tag(4, (0, $fkNis$protobuftsruntime.WireType).LengthDelimited).fork(), options).join();
3813
+ let u = options.writeUnknownFields;
3814
+ if (u !== false) (u == true ? (0, $fkNis$protobuftsruntime.UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3815
+ return writer;
3816
+ }
3817
+ }
3818
+ const $6abb1f384118d238$export$b89a827e9254211a = new $6abb1f384118d238$var$Frame$Type();
3819
+
3820
+
3821
+ class $bee70417e8ead9ed$export$de21836fc42c6f9c extends (0, $fkNis$pipecataiclientjs.Transport) {
3822
+ constructor(){
3823
+ super();
3824
+ this.audioQueue = [];
3825
+ this._mediaManager = new (0, $1c088932741d88e6$export$c95c65abc5f47125)(true, true, undefined, undefined, 512, $bee70417e8ead9ed$export$de21836fc42c6f9c.RECORDER_SAMPLE_RATE);
3826
+ this._mediaManager.setUserAudioCallback(this.handleUserAudioStream.bind(this));
3827
+ this._ws = null;
3828
+ }
3829
+ initialize(options, messageHandler) {
3830
+ this._options = options;
3831
+ this._callbacks = options.callbacks ?? {};
3832
+ this._onMessage = messageHandler;
3833
+ this._mediaManager.setRTVIOptions(options);
3834
+ this.state = "disconnected";
3835
+ }
3836
+ async initDevices() {
3837
+ this.state = "initializing";
3838
+ await this._mediaManager.initialize();
3839
+ this.state = "initialized";
3840
+ }
3841
+ async connect(authBundle, abortController) {
3842
+ this.state = "connecting";
3843
+ try {
3844
+ this._ws = this.initializeWebsocket(authBundle);
3845
+ await this._ws.connect();
3846
+ await this._mediaManager.connect();
3847
+ this.state = "connected";
3848
+ this._callbacks.onConnected?.();
3849
+ } catch (error) {
3850
+ const msg = `Failed to connect to websocket: ${error}`;
3851
+ (0, $fkNis$pipecataiclientjs.logger).error(msg);
3852
+ this.state = "error";
3853
+ throw new (0, $fkNis$pipecataiclientjs.TransportStartError)(msg);
3854
+ }
3855
+ }
3856
+ async disconnect() {
3857
+ this.state = "disconnecting";
3858
+ await this._mediaManager.disconnect();
3859
+ await this._ws?.close();
3860
+ this.state = "disconnected";
3861
+ this._callbacks.onDisconnected?.();
3862
+ }
3863
+ getAllMics() {
3864
+ return this._mediaManager.getAllMics();
3865
+ }
3866
+ getAllCams() {
3867
+ return this._mediaManager.getAllCams();
3868
+ }
3869
+ getAllSpeakers() {
3870
+ return this._mediaManager.getAllSpeakers();
3871
+ }
3872
+ async updateMic(micId) {
3873
+ return this._mediaManager.updateMic(micId);
3874
+ }
3875
+ updateCam(camId) {
3876
+ return this._mediaManager.updateCam(camId);
3877
+ }
3878
+ updateSpeaker(speakerId) {
3879
+ return this._mediaManager.updateSpeaker(speakerId);
3880
+ }
3881
+ get selectedMic() {
3882
+ return this._mediaManager.selectedMic;
3883
+ }
3884
+ get selectedSpeaker() {
3885
+ return this._mediaManager.selectedSpeaker;
3886
+ }
3887
+ enableMic(enable) {
3888
+ this._mediaManager.enableMic(enable);
3889
+ }
3890
+ get isMicEnabled() {
3891
+ return this._mediaManager.isMicEnabled;
3892
+ }
3893
+ get state() {
3894
+ return this._state;
3895
+ }
3896
+ set state(state) {
3897
+ if (this._state === state) return;
3898
+ this._state = state;
3899
+ this._callbacks.onTransportStateChanged?.(state);
3900
+ }
3901
+ get expiry() {
3902
+ return this._expiry;
3903
+ }
3904
+ tracks() {
3905
+ return this._mediaManager.tracks();
3906
+ }
3907
+ initializeWebsocket(authBundle) {
3908
+ const ws = new (0, $158ad1a38fb85e0e$export$4f3d0ffd941ebefb)(`${authBundle.ws_url}`, undefined, {
3909
+ parseBlobToJson: false
3910
+ });
3911
+ // disabling the keep alive, there is no API for it inside Pipecat
3912
+ ws.keepAliveInterval = 0;
3913
+ ws.on("open", ()=>{
3914
+ (0, $fkNis$pipecataiclientjs.logger).debug("Websocket connection opened");
3915
+ });
3916
+ ws.on("message", async (data)=>{
3917
+ let arrayBuffer = await data.arrayBuffer();
3918
+ const parsedFrame = (0, $6abb1f384118d238$export$b89a827e9254211a).fromBinary(new Uint8Array(arrayBuffer)).frame;
3919
+ if (parsedFrame.oneofKind === "audio") {
3920
+ // We should be able to use parsedFrame.audio.audio.buffer but for
3921
+ // some reason that contains all the bytes from the protobuf message.
3922
+ const audioVector = Array.from(parsedFrame.audio.audio);
3923
+ const uint8Array = new Uint8Array(audioVector);
3924
+ const int16Array = new Int16Array(uint8Array.buffer);
3925
+ this._mediaManager.bufferBotAudio(int16Array);
3926
+ } else if (parsedFrame.oneofKind === "message") {
3927
+ let jsonText = parsedFrame.message.data;
3928
+ try {
3929
+ let jsonMessage = JSON.parse(jsonText);
3930
+ if (jsonMessage.label === "rtvi-ai") this._onMessage(jsonMessage);
3931
+ } catch {
3932
+ (0, $fkNis$pipecataiclientjs.logger).warn("Failed to parse message", jsonText);
3933
+ }
3934
+ }
3935
+ });
3936
+ ws.on("error", (error)=>{
3937
+ this.connectionError(`websocket error: ${error}`);
3938
+ });
3939
+ ws.on("connection-timeout", ()=>{
3940
+ this.connectionError("websocket connection timed out");
3941
+ });
3942
+ ws.on("close", (code)=>{
3943
+ this.connectionError(`websocket connection closed. Code: ${code}`);
3944
+ });
3945
+ ws.on("reconnect-failed", ()=>{
3946
+ this.connectionError(`websocket reconnect failed`);
3947
+ });
3948
+ return ws;
3949
+ }
3950
+ sendReadyMessage() {
3951
+ this.state = "ready";
3952
+ this.sendMessage((0, $fkNis$pipecataiclientjs.RTVIMessage).clientReady());
3953
+ }
3954
+ handleUserAudioStream(data) {
3955
+ if (this.state === "ready") try {
3956
+ this.flushAudioQueue();
3957
+ this._sendAudioInput(data);
3958
+ } catch (error) {
3959
+ (0, $fkNis$pipecataiclientjs.logger).error("Error sending audio stream to websocket:", error);
3960
+ this.state = "error";
3961
+ }
3962
+ else this.audioQueue.push(data);
3963
+ }
3964
+ flushAudioQueue() {
3965
+ if (this.audioQueue.length <= 0) return;
3966
+ (0, $fkNis$pipecataiclientjs.logger).info("Will flush audio queue", this.audioQueue.length);
3967
+ while(this.audioQueue.length > 0){
3968
+ const queuedData = this.audioQueue.shift();
3969
+ if (queuedData) this._sendAudioInput(queuedData);
3970
+ }
3971
+ }
3972
+ sendMessage(message) {
3973
+ (0, $fkNis$pipecataiclientjs.logger).debug("Received message to send to Web Socket", message);
3974
+ const frame = (0, $6abb1f384118d238$export$b89a827e9254211a).create({
3975
+ frame: {
3976
+ oneofKind: "message",
3977
+ message: {
3978
+ data: JSON.stringify(message)
3979
+ }
3980
+ }
3981
+ });
3982
+ this._sendMsg(frame);
3983
+ }
3984
+ async _sendAudioInput(data) {
3985
+ const pcmByteArray = new Uint8Array(data);
3986
+ const frame = (0, $6abb1f384118d238$export$b89a827e9254211a).create({
3987
+ frame: {
3988
+ oneofKind: "audio",
3989
+ audio: {
3990
+ id: 0n,
3991
+ name: "audio",
3992
+ audio: pcmByteArray,
3993
+ sampleRate: $bee70417e8ead9ed$export$de21836fc42c6f9c.RECORDER_SAMPLE_RATE,
3994
+ numChannels: 1
3995
+ }
3996
+ }
3997
+ });
3998
+ await this._sendMsg(frame);
3999
+ }
4000
+ async _sendMsg(msg) {
4001
+ if (!this._ws) {
4002
+ (0, $fkNis$pipecataiclientjs.logger).error("sendMsg called but WS is null");
4003
+ return;
4004
+ }
4005
+ if (this._ws.readyState !== WebSocket.OPEN) {
4006
+ (0, $fkNis$pipecataiclientjs.logger).error("attempt to send to closed socket");
4007
+ return;
4008
+ }
4009
+ if (!msg) {
4010
+ (0, $fkNis$pipecataiclientjs.logger).error("need a msg to send a msg");
4011
+ return;
4012
+ }
4013
+ try {
4014
+ const encodedFrame = new Uint8Array((0, $6abb1f384118d238$export$b89a827e9254211a).toBinary(msg));
4015
+ await this._ws.send(encodedFrame);
4016
+ } catch (e) {
4017
+ (0, $fkNis$pipecataiclientjs.logger).error("sendMsg error", e);
4018
+ }
4019
+ }
4020
+ connectionError(errorMsg) {
4021
+ console.error(errorMsg);
4022
+ this.state = "error";
4023
+ this.disconnect();
4024
+ }
4025
+ // Not implemented
4026
+ enableScreenShare(enable) {
4027
+ (0, $fkNis$pipecataiclientjs.logger).error("startScreenShare not implemented for WebSocketTransport");
4028
+ throw new Error("Not implemented");
4029
+ }
4030
+ get isSharingScreen() {
4031
+ (0, $fkNis$pipecataiclientjs.logger).error("isSharingScreen not implemented for WebSocketTransport");
4032
+ return false;
4033
+ }
4034
+ enableCam(enable) {
4035
+ (0, $fkNis$pipecataiclientjs.logger).error("enableCam not implemented for WebSocketTransport");
4036
+ throw new Error("Not implemented");
4037
+ }
4038
+ get isCamEnabled() {
4039
+ (0, $fkNis$pipecataiclientjs.logger).error("isCamEnabled not implemented for WebSocketTransport");
4040
+ return false;
4041
+ }
4042
+ get selectedCam() {
4043
+ (0, $fkNis$pipecataiclientjs.logger).error("selectedCam not implemented for WebSocketTransport");
4044
+ throw new Error("Not implemented");
4045
+ }
4046
+ }
4047
+ $bee70417e8ead9ed$export$de21836fc42c6f9c.RECORDER_SAMPLE_RATE = 16000;
4048
+
4049
+
4050
+
4051
+
4052
+ //# sourceMappingURL=index.js.map