@pipecat-ai/websocket-transport 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4041 @@
1
+ import $kR6tG$dailycodailyjs from "@daily-co/daily-js";
2
+ import {logger as $kR6tG$logger, TransportStartError as $kR6tG$TransportStartError, RTVIMessage as $kR6tG$RTVIMessage, Transport as $kR6tG$Transport} from "@pipecat-ai/client-js";
3
+ import {EventEmitter as $kR6tG$EventEmitter} from "events";
4
+ import {reflectionMergePartial as $kR6tG$reflectionMergePartial, UnknownFieldHandler as $kR6tG$UnknownFieldHandler, WireType as $kR6tG$WireType, MessageType as $kR6tG$MessageType} from "@protobuf-ts/runtime";
5
+
6
+ // export * from "./realTimeWebSocketTransport";
7
+ // export * from "../../../lib/wavtools/dist/index.d.ts";
8
+ /**
9
+ * Raw wav audio file contents
10
+ * @typedef {Object} WavPackerAudioType
11
+ * @property {Blob} blob
12
+ * @property {string} url
13
+ * @property {number} channelCount
14
+ * @property {number} sampleRate
15
+ * @property {number} duration
16
+ */ /**
17
+ * Utility class for assembling PCM16 "audio/wav" data
18
+ * @class
19
+ */ class $6d4b7449a1e1544a$export$13afda237b1c9846 {
20
+ /**
21
+ * Converts Float32Array of amplitude data to ArrayBuffer in Int16Array format
22
+ * @param {Float32Array} float32Array
23
+ * @returns {ArrayBuffer}
24
+ */ static floatTo16BitPCM(float32Array) {
25
+ const buffer = new ArrayBuffer(float32Array.length * 2);
26
+ const view = new DataView(buffer);
27
+ let offset = 0;
28
+ for(let i = 0; i < float32Array.length; i++, offset += 2){
29
+ let s = Math.max(-1, Math.min(1, float32Array[i]));
30
+ view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
31
+ }
32
+ return buffer;
33
+ }
34
+ /**
35
+ * Concatenates two ArrayBuffers
36
+ * @param {ArrayBuffer} leftBuffer
37
+ * @param {ArrayBuffer} rightBuffer
38
+ * @returns {ArrayBuffer}
39
+ */ static mergeBuffers(leftBuffer, rightBuffer) {
40
+ const tmpArray = new Uint8Array(leftBuffer.byteLength + rightBuffer.byteLength);
41
+ tmpArray.set(new Uint8Array(leftBuffer), 0);
42
+ tmpArray.set(new Uint8Array(rightBuffer), leftBuffer.byteLength);
43
+ return tmpArray.buffer;
44
+ }
45
+ /**
46
+ * Packs data into an Int16 format
47
+ * @private
48
+ * @param {number} size 0 = 1x Int16, 1 = 2x Int16
49
+ * @param {number} arg value to pack
50
+ * @returns
51
+ */ _packData(size, arg) {
52
+ return [
53
+ new Uint8Array([
54
+ arg,
55
+ arg >> 8
56
+ ]),
57
+ new Uint8Array([
58
+ arg,
59
+ arg >> 8,
60
+ arg >> 16,
61
+ arg >> 24
62
+ ])
63
+ ][size];
64
+ }
65
+ /**
66
+ * Packs audio into "audio/wav" Blob
67
+ * @param {number} sampleRate
68
+ * @param {{bitsPerSample: number, channels: Array<Float32Array>, data: Int16Array}} audio
69
+ * @returns {WavPackerAudioType}
70
+ */ pack(sampleRate, audio) {
71
+ if (!audio?.bitsPerSample) throw new Error(`Missing "bitsPerSample"`);
72
+ else if (!audio?.channels) throw new Error(`Missing "channels"`);
73
+ else if (!audio?.data) throw new Error(`Missing "data"`);
74
+ const { bitsPerSample: bitsPerSample, channels: channels, data: data } = audio;
75
+ const output = [
76
+ // Header
77
+ 'RIFF',
78
+ this._packData(1, 52),
79
+ 'WAVE',
80
+ // chunk 1
81
+ 'fmt ',
82
+ this._packData(1, 16),
83
+ this._packData(0, 1),
84
+ this._packData(0, channels.length),
85
+ this._packData(1, sampleRate),
86
+ this._packData(1, sampleRate * channels.length * bitsPerSample / 8),
87
+ this._packData(0, channels.length * bitsPerSample / 8),
88
+ this._packData(0, bitsPerSample),
89
+ // chunk 2
90
+ 'data',
91
+ this._packData(1, channels[0].length * channels.length * bitsPerSample / 8),
92
+ data
93
+ ];
94
+ const blob = new Blob(output, {
95
+ type: 'audio/mpeg'
96
+ });
97
+ const url = URL.createObjectURL(blob);
98
+ return {
99
+ blob: blob,
100
+ url: url,
101
+ channelCount: channels.length,
102
+ sampleRate: sampleRate,
103
+ duration: data.byteLength / (channels.length * sampleRate * 2)
104
+ };
105
+ }
106
+ }
107
+ globalThis.WavPacker = $6d4b7449a1e1544a$export$13afda237b1c9846;
108
+
109
+
110
+ /**
111
+ * Constants for help with visualization
112
+ * Helps map frequency ranges from Fast Fourier Transform
113
+ * to human-interpretable ranges, notably music ranges and
114
+ * human vocal ranges.
115
+ */ // Eighth octave frequencies
116
+ const $03f71ce85e00ada6$var$octave8Frequencies = [
117
+ 4186.01,
118
+ 4434.92,
119
+ 4698.63,
120
+ 4978.03,
121
+ 5274.04,
122
+ 5587.65,
123
+ 5919.91,
124
+ 6271.93,
125
+ 6644.88,
126
+ 7040.0,
127
+ 7458.62,
128
+ 7902.13
129
+ ];
130
+ // Labels for each of the above frequencies
131
+ const $03f71ce85e00ada6$var$octave8FrequencyLabels = [
132
+ 'C',
133
+ 'C#',
134
+ 'D',
135
+ 'D#',
136
+ 'E',
137
+ 'F',
138
+ 'F#',
139
+ 'G',
140
+ 'G#',
141
+ 'A',
142
+ 'A#',
143
+ 'B'
144
+ ];
145
+ const $03f71ce85e00ada6$export$776c63898ae5b636 = [];
146
+ const $03f71ce85e00ada6$export$facd167cc27ea9b0 = [];
147
+ for(let i = 1; i <= 8; i++)for(let f = 0; f < $03f71ce85e00ada6$var$octave8Frequencies.length; f++){
148
+ const freq = $03f71ce85e00ada6$var$octave8Frequencies[f];
149
+ $03f71ce85e00ada6$export$776c63898ae5b636.push(freq / Math.pow(2, 8 - i));
150
+ $03f71ce85e00ada6$export$facd167cc27ea9b0.push($03f71ce85e00ada6$var$octave8FrequencyLabels[f] + i);
151
+ }
152
+ /**
153
+ * Subset of the note frequencies between 32 and 2000 Hz
154
+ * 6 octave range: C1 to B6
155
+ */ const $03f71ce85e00ada6$var$voiceFrequencyRange = [
156
+ 32.0,
157
+ 2000.0
158
+ ];
159
+ const $03f71ce85e00ada6$export$dbc1581ed2cfa183 = $03f71ce85e00ada6$export$776c63898ae5b636.filter((_, i)=>{
160
+ return $03f71ce85e00ada6$export$776c63898ae5b636[i] > $03f71ce85e00ada6$var$voiceFrequencyRange[0] && $03f71ce85e00ada6$export$776c63898ae5b636[i] < $03f71ce85e00ada6$var$voiceFrequencyRange[1];
161
+ });
162
+ const $03f71ce85e00ada6$export$30a6f2881311088f = $03f71ce85e00ada6$export$facd167cc27ea9b0.filter((_, i)=>{
163
+ return $03f71ce85e00ada6$export$776c63898ae5b636[i] > $03f71ce85e00ada6$var$voiceFrequencyRange[0] && $03f71ce85e00ada6$export$776c63898ae5b636[i] < $03f71ce85e00ada6$var$voiceFrequencyRange[1];
164
+ });
165
+
166
+
167
+ class $f32f064564ee62f6$export$2c3136da0bf130f9 {
168
+ /**
169
+ * Retrieves frequency domain data from an AnalyserNode adjusted to a decibel range
170
+ * returns human-readable formatting and labels
171
+ * @param {AnalyserNode} analyser
172
+ * @param {number} sampleRate
173
+ * @param {Float32Array} [fftResult]
174
+ * @param {"frequency"|"music"|"voice"} [analysisType]
175
+ * @param {number} [minDecibels] default -100
176
+ * @param {number} [maxDecibels] default -30
177
+ * @returns {AudioAnalysisOutputType}
178
+ */ static getFrequencies(analyser, sampleRate, fftResult, analysisType = 'frequency', minDecibels = -100, maxDecibels = -30) {
179
+ if (!fftResult) {
180
+ fftResult = new Float32Array(analyser.frequencyBinCount);
181
+ analyser.getFloatFrequencyData(fftResult);
182
+ }
183
+ const nyquistFrequency = sampleRate / 2;
184
+ const frequencyStep = 1 / fftResult.length * nyquistFrequency;
185
+ let outputValues;
186
+ let frequencies;
187
+ let labels;
188
+ if (analysisType === 'music' || analysisType === 'voice') {
189
+ const useFrequencies = analysisType === 'voice' ? (0, $03f71ce85e00ada6$export$dbc1581ed2cfa183) : (0, $03f71ce85e00ada6$export$776c63898ae5b636);
190
+ const aggregateOutput = Array(useFrequencies.length).fill(minDecibels);
191
+ for(let i = 0; i < fftResult.length; i++){
192
+ const frequency = i * frequencyStep;
193
+ const amplitude = fftResult[i];
194
+ for(let n = useFrequencies.length - 1; n >= 0; n--)if (frequency > useFrequencies[n]) {
195
+ aggregateOutput[n] = Math.max(aggregateOutput[n], amplitude);
196
+ break;
197
+ }
198
+ }
199
+ outputValues = aggregateOutput;
200
+ frequencies = analysisType === 'voice' ? (0, $03f71ce85e00ada6$export$dbc1581ed2cfa183) : (0, $03f71ce85e00ada6$export$776c63898ae5b636);
201
+ labels = analysisType === 'voice' ? (0, $03f71ce85e00ada6$export$30a6f2881311088f) : (0, $03f71ce85e00ada6$export$facd167cc27ea9b0);
202
+ } else {
203
+ outputValues = Array.from(fftResult);
204
+ frequencies = outputValues.map((_, i)=>frequencyStep * i);
205
+ labels = frequencies.map((f)=>`${f.toFixed(2)} Hz`);
206
+ }
207
+ // We normalize to {0, 1}
208
+ const normalizedOutput = outputValues.map((v)=>{
209
+ return Math.max(0, Math.min((v - minDecibels) / (maxDecibels - minDecibels), 1));
210
+ });
211
+ const values = new Float32Array(normalizedOutput);
212
+ return {
213
+ values: values,
214
+ frequencies: frequencies,
215
+ labels: labels
216
+ };
217
+ }
218
+ /**
219
+ * Creates a new AudioAnalysis instance for an HTMLAudioElement
220
+ * @param {HTMLAudioElement} audioElement
221
+ * @param {AudioBuffer|null} [audioBuffer] If provided, will cache all frequency domain data from the buffer
222
+ * @returns {AudioAnalysis}
223
+ */ constructor(audioElement, audioBuffer = null){
224
+ this.fftResults = [];
225
+ if (audioBuffer) {
226
+ /**
227
+ * Modified from
228
+ * https://stackoverflow.com/questions/75063715/using-the-web-audio-api-to-analyze-a-song-without-playing
229
+ *
230
+ * We do this to populate FFT values for the audio if provided an `audioBuffer`
231
+ * The reason to do this is that Safari fails when using `createMediaElementSource`
232
+ * This has a non-zero RAM cost so we only opt-in to run it on Safari, Chrome is better
233
+ */ const { length: length, sampleRate: sampleRate } = audioBuffer;
234
+ const offlineAudioContext = new OfflineAudioContext({
235
+ length: length,
236
+ sampleRate: sampleRate
237
+ });
238
+ const source = offlineAudioContext.createBufferSource();
239
+ source.buffer = audioBuffer;
240
+ const analyser = offlineAudioContext.createAnalyser();
241
+ analyser.fftSize = 8192;
242
+ analyser.smoothingTimeConstant = 0.1;
243
+ source.connect(analyser);
244
+ // limit is :: 128 / sampleRate;
245
+ // but we just want 60fps - cuts ~1s from 6MB to 1MB of RAM
246
+ const renderQuantumInSeconds = 1 / 60;
247
+ const durationInSeconds = length / sampleRate;
248
+ const analyze = (index)=>{
249
+ const suspendTime = renderQuantumInSeconds * index;
250
+ if (suspendTime < durationInSeconds) offlineAudioContext.suspend(suspendTime).then(()=>{
251
+ const fftResult = new Float32Array(analyser.frequencyBinCount);
252
+ analyser.getFloatFrequencyData(fftResult);
253
+ this.fftResults.push(fftResult);
254
+ analyze(index + 1);
255
+ });
256
+ if (index === 1) offlineAudioContext.startRendering();
257
+ else offlineAudioContext.resume();
258
+ };
259
+ source.start(0);
260
+ analyze(1);
261
+ this.audio = audioElement;
262
+ this.context = offlineAudioContext;
263
+ this.analyser = analyser;
264
+ this.sampleRate = sampleRate;
265
+ this.audioBuffer = audioBuffer;
266
+ } else {
267
+ const audioContext = new AudioContext();
268
+ const track = audioContext.createMediaElementSource(audioElement);
269
+ const analyser = audioContext.createAnalyser();
270
+ analyser.fftSize = 8192;
271
+ analyser.smoothingTimeConstant = 0.1;
272
+ track.connect(analyser);
273
+ analyser.connect(audioContext.destination);
274
+ this.audio = audioElement;
275
+ this.context = audioContext;
276
+ this.analyser = analyser;
277
+ this.sampleRate = this.context.sampleRate;
278
+ this.audioBuffer = null;
279
+ }
280
+ }
281
+ /**
282
+ * Gets the current frequency domain data from the playing audio track
283
+ * @param {"frequency"|"music"|"voice"} [analysisType]
284
+ * @param {number} [minDecibels] default -100
285
+ * @param {number} [maxDecibels] default -30
286
+ * @returns {AudioAnalysisOutputType}
287
+ */ getFrequencies(analysisType = 'frequency', minDecibels = -100, maxDecibels = -30) {
288
+ let fftResult = null;
289
+ if (this.audioBuffer && this.fftResults.length) {
290
+ const pct = this.audio.currentTime / this.audio.duration;
291
+ const index = Math.min(pct * this.fftResults.length | 0, this.fftResults.length - 1);
292
+ fftResult = this.fftResults[index];
293
+ }
294
+ return $f32f064564ee62f6$export$2c3136da0bf130f9.getFrequencies(this.analyser, this.sampleRate, fftResult, analysisType, minDecibels, maxDecibels);
295
+ }
296
+ /**
297
+ * Resume the internal AudioContext if it was suspended due to the lack of
298
+ * user interaction when the AudioAnalysis was instantiated.
299
+ * @returns {Promise<true>}
300
+ */ async resumeIfSuspended() {
301
+ if (this.context.state === 'suspended') await this.context.resume();
302
+ return true;
303
+ }
304
+ }
305
+ globalThis.AudioAnalysis = $f32f064564ee62f6$export$2c3136da0bf130f9;
306
+
307
+
308
+ const $29a8a70a9466b14f$export$50b76700e2b15e9 = `
309
+ class StreamProcessor extends AudioWorkletProcessor {
310
+ constructor() {
311
+ super();
312
+ this.hasStarted = false;
313
+ this.hasInterrupted = false;
314
+ this.outputBuffers = [];
315
+ this.bufferLength = 128;
316
+ this.write = { buffer: new Float32Array(this.bufferLength), trackId: null };
317
+ this.writeOffset = 0;
318
+ this.trackSampleOffsets = {};
319
+ this.port.onmessage = (event) => {
320
+ if (event.data) {
321
+ const payload = event.data;
322
+ if (payload.event === 'write') {
323
+ const int16Array = payload.buffer;
324
+ const float32Array = new Float32Array(int16Array.length);
325
+ for (let i = 0; i < int16Array.length; i++) {
326
+ float32Array[i] = int16Array[i] / 0x8000; // Convert Int16 to Float32
327
+ }
328
+ this.writeData(float32Array, payload.trackId);
329
+ } else if (
330
+ payload.event === 'offset' ||
331
+ payload.event === 'interrupt'
332
+ ) {
333
+ const requestId = payload.requestId;
334
+ const trackId = this.write.trackId;
335
+ const offset = this.trackSampleOffsets[trackId] || 0;
336
+ this.port.postMessage({
337
+ event: 'offset',
338
+ requestId,
339
+ trackId,
340
+ offset,
341
+ });
342
+ if (payload.event === 'interrupt') {
343
+ this.hasInterrupted = true;
344
+ }
345
+ } else {
346
+ throw new Error(\`Unhandled event "\${payload.event}"\`);
347
+ }
348
+ }
349
+ };
350
+ }
351
+
352
+ writeData(float32Array, trackId = null) {
353
+ let { buffer } = this.write;
354
+ let offset = this.writeOffset;
355
+ for (let i = 0; i < float32Array.length; i++) {
356
+ buffer[offset++] = float32Array[i];
357
+ if (offset >= buffer.length) {
358
+ this.outputBuffers.push(this.write);
359
+ this.write = { buffer: new Float32Array(this.bufferLength), trackId };
360
+ buffer = this.write.buffer;
361
+ offset = 0;
362
+ }
363
+ }
364
+ this.writeOffset = offset;
365
+ return true;
366
+ }
367
+
368
+ process(inputs, outputs, parameters) {
369
+ const output = outputs[0];
370
+ const outputChannelData = output[0];
371
+ const outputBuffers = this.outputBuffers;
372
+ if (this.hasInterrupted) {
373
+ this.port.postMessage({ event: 'stop' });
374
+ return false;
375
+ } else if (outputBuffers.length) {
376
+ this.hasStarted = true;
377
+ const { buffer, trackId } = outputBuffers.shift();
378
+ for (let i = 0; i < outputChannelData.length; i++) {
379
+ outputChannelData[i] = buffer[i] || 0;
380
+ }
381
+ if (trackId) {
382
+ this.trackSampleOffsets[trackId] =
383
+ this.trackSampleOffsets[trackId] || 0;
384
+ this.trackSampleOffsets[trackId] += buffer.length;
385
+ }
386
+ return true;
387
+ } else if (this.hasStarted) {
388
+ this.port.postMessage({ event: 'stop' });
389
+ return false;
390
+ } else {
391
+ return true;
392
+ }
393
+ }
394
+ }
395
+
396
+ registerProcessor('stream_processor', StreamProcessor);
397
+ `;
398
+ const $29a8a70a9466b14f$var$script = new Blob([
399
+ $29a8a70a9466b14f$export$50b76700e2b15e9
400
+ ], {
401
+ type: 'application/javascript'
402
+ });
403
+ const $29a8a70a9466b14f$var$src = URL.createObjectURL($29a8a70a9466b14f$var$script);
404
+ const $29a8a70a9466b14f$export$bfa8c596114d74df = $29a8a70a9466b14f$var$src;
405
+
406
+
407
+
408
+ class $d0a969833958d9e7$export$9698d62c78b8f366 {
409
+ /**
410
+ * Creates a new WavStreamPlayer instance
411
+ * @param {{sampleRate?: number}} options
412
+ * @returns {WavStreamPlayer}
413
+ */ constructor({ sampleRate: sampleRate = 44100 } = {}){
414
+ this.scriptSrc = (0, $29a8a70a9466b14f$export$bfa8c596114d74df);
415
+ this.sampleRate = sampleRate;
416
+ this.context = null;
417
+ this.stream = null;
418
+ this.analyser = null;
419
+ this.trackSampleOffsets = {};
420
+ this.interruptedTrackIds = {};
421
+ }
422
+ /**
423
+ * Connects the audio context and enables output to speakers
424
+ * @returns {Promise<true>}
425
+ */ async connect() {
426
+ this.context = new AudioContext({
427
+ sampleRate: this.sampleRate
428
+ });
429
+ if (this._speakerID) this.context.setSinkId(this._speakerID);
430
+ if (this.context.state === "suspended") await this.context.resume();
431
+ try {
432
+ await this.context.audioWorklet.addModule(this.scriptSrc);
433
+ } catch (e) {
434
+ console.error(e);
435
+ throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
436
+ }
437
+ const analyser = this.context.createAnalyser();
438
+ analyser.fftSize = 8192;
439
+ analyser.smoothingTimeConstant = 0.1;
440
+ this.analyser = analyser;
441
+ return true;
442
+ }
443
+ /**
444
+ * Gets the current frequency domain data from the playing track
445
+ * @param {"frequency"|"music"|"voice"} [analysisType]
446
+ * @param {number} [minDecibels] default -100
447
+ * @param {number} [maxDecibels] default -30
448
+ * @returns {import('./analysis/audio_analysis.js').AudioAnalysisOutputType}
449
+ */ getFrequencies(analysisType = "frequency", minDecibels = -100, maxDecibels = -30) {
450
+ if (!this.analyser) throw new Error("Not connected, please call .connect() first");
451
+ return (0, $f32f064564ee62f6$export$2c3136da0bf130f9).getFrequencies(this.analyser, this.sampleRate, null, analysisType, minDecibels, maxDecibels);
452
+ }
453
+ /**
454
+ * @param {string} speaker deviceId
455
+ */ async updateSpeaker(speaker) {
456
+ const _prevSpeaker = this._speakerID;
457
+ this._speakerID = speaker;
458
+ if (this.context) try {
459
+ if (speaker === "default") await this.context.setSinkId();
460
+ else await this.context.setSinkId(speaker);
461
+ } catch (e) {
462
+ console.error(`Could not set sinkId to ${speaker}: ${e}`);
463
+ this._speakerID = _prevSpeaker;
464
+ }
465
+ }
466
+ /**
467
+ * Starts audio streaming
468
+ * @private
469
+ * @returns {Promise<true>}
470
+ */ _start() {
471
+ const streamNode = new AudioWorkletNode(this.context, "stream_processor");
472
+ streamNode.connect(this.context.destination);
473
+ streamNode.port.onmessage = (e)=>{
474
+ const { event: event } = e.data;
475
+ if (event === "stop") {
476
+ streamNode.disconnect();
477
+ this.stream = null;
478
+ } else if (event === "offset") {
479
+ const { requestId: requestId, trackId: trackId, offset: offset } = e.data;
480
+ const currentTime = offset / this.sampleRate;
481
+ this.trackSampleOffsets[requestId] = {
482
+ trackId: trackId,
483
+ offset: offset,
484
+ currentTime: currentTime
485
+ };
486
+ }
487
+ };
488
+ this.analyser.disconnect();
489
+ streamNode.connect(this.analyser);
490
+ this.stream = streamNode;
491
+ return true;
492
+ }
493
+ /**
494
+ * Adds 16BitPCM data to the currently playing audio stream
495
+ * You can add chunks beyond the current play point and they will be queued for play
496
+ * @param {ArrayBuffer|Int16Array} arrayBuffer
497
+ * @param {string} [trackId]
498
+ * @returns {Int16Array}
499
+ */ add16BitPCM(arrayBuffer, trackId = "default") {
500
+ if (typeof trackId !== "string") throw new Error(`trackId must be a string`);
501
+ else if (this.interruptedTrackIds[trackId]) return;
502
+ if (!this.stream) this._start();
503
+ let buffer;
504
+ if (arrayBuffer instanceof Int16Array) buffer = arrayBuffer;
505
+ else if (arrayBuffer instanceof ArrayBuffer) buffer = new Int16Array(arrayBuffer);
506
+ else throw new Error(`argument must be Int16Array or ArrayBuffer`);
507
+ this.stream.port.postMessage({
508
+ event: "write",
509
+ buffer: buffer,
510
+ trackId: trackId
511
+ });
512
+ return buffer;
513
+ }
514
+ /**
515
+ * Gets the offset (sample count) of the currently playing stream
516
+ * @param {boolean} [interrupt]
517
+ * @returns {{trackId: string|null, offset: number, currentTime: number}}
518
+ */ async getTrackSampleOffset(interrupt = false) {
519
+ if (!this.stream) return null;
520
+ const requestId = crypto.randomUUID();
521
+ this.stream.port.postMessage({
522
+ event: interrupt ? "interrupt" : "offset",
523
+ requestId: requestId
524
+ });
525
+ let trackSampleOffset;
526
+ while(!trackSampleOffset){
527
+ trackSampleOffset = this.trackSampleOffsets[requestId];
528
+ await new Promise((r)=>setTimeout(()=>r(), 1));
529
+ }
530
+ const { trackId: trackId } = trackSampleOffset;
531
+ if (interrupt && trackId) this.interruptedTrackIds[trackId] = true;
532
+ return trackSampleOffset;
533
+ }
534
+ /**
535
+ * Strips the current stream and returns the sample offset of the audio
536
+ * @param {boolean} [interrupt]
537
+ * @returns {{trackId: string|null, offset: number, currentTime: number}}
538
+ */ async interrupt() {
539
+ return this.getTrackSampleOffset(true);
540
+ }
541
+ }
542
+ globalThis.WavStreamPlayer = $d0a969833958d9e7$export$9698d62c78b8f366;
543
+
544
+
545
+ const $8e1d1e6ff08f6fb5$var$AudioProcessorWorklet = `
546
+ class AudioProcessor extends AudioWorkletProcessor {
547
+
548
+ constructor() {
549
+ super();
550
+ this.port.onmessage = this.receive.bind(this);
551
+ this.initialize();
552
+ }
553
+
554
+ initialize() {
555
+ this.foundAudio = false;
556
+ this.recording = false;
557
+ this.chunks = [];
558
+ }
559
+
560
+ /**
561
+ * Concatenates sampled chunks into channels
562
+ * Format is chunk[Left[], Right[]]
563
+ */
564
+ readChannelData(chunks, channel = -1, maxChannels = 9) {
565
+ let channelLimit;
566
+ if (channel !== -1) {
567
+ if (chunks[0] && chunks[0].length - 1 < channel) {
568
+ throw new Error(
569
+ \`Channel \${channel} out of range: max \${chunks[0].length}\`
570
+ );
571
+ }
572
+ channelLimit = channel + 1;
573
+ } else {
574
+ channel = 0;
575
+ channelLimit = Math.min(chunks[0] ? chunks[0].length : 1, maxChannels);
576
+ }
577
+ const channels = [];
578
+ for (let n = channel; n < channelLimit; n++) {
579
+ const length = chunks.reduce((sum, chunk) => {
580
+ return sum + chunk[n].length;
581
+ }, 0);
582
+ const buffers = chunks.map((chunk) => chunk[n]);
583
+ const result = new Float32Array(length);
584
+ let offset = 0;
585
+ for (let i = 0; i < buffers.length; i++) {
586
+ result.set(buffers[i], offset);
587
+ offset += buffers[i].length;
588
+ }
589
+ channels[n] = result;
590
+ }
591
+ return channels;
592
+ }
593
+
594
+ /**
595
+ * Combines parallel audio data into correct format,
596
+ * channels[Left[], Right[]] to float32Array[LRLRLRLR...]
597
+ */
598
+ formatAudioData(channels) {
599
+ if (channels.length === 1) {
600
+ // Simple case is only one channel
601
+ const float32Array = channels[0].slice();
602
+ const meanValues = channels[0].slice();
603
+ return { float32Array, meanValues };
604
+ } else {
605
+ const float32Array = new Float32Array(
606
+ channels[0].length * channels.length
607
+ );
608
+ const meanValues = new Float32Array(channels[0].length);
609
+ for (let i = 0; i < channels[0].length; i++) {
610
+ const offset = i * channels.length;
611
+ let meanValue = 0;
612
+ for (let n = 0; n < channels.length; n++) {
613
+ float32Array[offset + n] = channels[n][i];
614
+ meanValue += channels[n][i];
615
+ }
616
+ meanValues[i] = meanValue / channels.length;
617
+ }
618
+ return { float32Array, meanValues };
619
+ }
620
+ }
621
+
622
+ /**
623
+ * Converts 32-bit float data to 16-bit integers
624
+ */
625
+ floatTo16BitPCM(float32Array) {
626
+ const buffer = new ArrayBuffer(float32Array.length * 2);
627
+ const view = new DataView(buffer);
628
+ let offset = 0;
629
+ for (let i = 0; i < float32Array.length; i++, offset += 2) {
630
+ let s = Math.max(-1, Math.min(1, float32Array[i]));
631
+ view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
632
+ }
633
+ return buffer;
634
+ }
635
+
636
+ /**
637
+ * Retrieves the most recent amplitude values from the audio stream
638
+ * @param {number} channel
639
+ */
640
+ getValues(channel = -1) {
641
+ const channels = this.readChannelData(this.chunks, channel);
642
+ const { meanValues } = this.formatAudioData(channels);
643
+ return { meanValues, channels };
644
+ }
645
+
646
+ /**
647
+ * Exports chunks as an audio/wav file
648
+ */
649
+ export() {
650
+ const channels = this.readChannelData(this.chunks);
651
+ const { float32Array, meanValues } = this.formatAudioData(channels);
652
+ const audioData = this.floatTo16BitPCM(float32Array);
653
+ return {
654
+ meanValues: meanValues,
655
+ audio: {
656
+ bitsPerSample: 16,
657
+ channels: channels,
658
+ data: audioData,
659
+ },
660
+ };
661
+ }
662
+
663
+ receive(e) {
664
+ const { event, id } = e.data;
665
+ let receiptData = {};
666
+ switch (event) {
667
+ case 'start':
668
+ this.recording = true;
669
+ break;
670
+ case 'stop':
671
+ this.recording = false;
672
+ break;
673
+ case 'clear':
674
+ this.initialize();
675
+ break;
676
+ case 'export':
677
+ receiptData = this.export();
678
+ break;
679
+ case 'read':
680
+ receiptData = this.getValues();
681
+ break;
682
+ default:
683
+ break;
684
+ }
685
+ // Always send back receipt
686
+ this.port.postMessage({ event: 'receipt', id, data: receiptData });
687
+ }
688
+
689
+ sendChunk(chunk) {
690
+ const channels = this.readChannelData([chunk]);
691
+ const { float32Array, meanValues } = this.formatAudioData(channels);
692
+ const rawAudioData = this.floatTo16BitPCM(float32Array);
693
+ const monoAudioData = this.floatTo16BitPCM(meanValues);
694
+ this.port.postMessage({
695
+ event: 'chunk',
696
+ data: {
697
+ mono: monoAudioData,
698
+ raw: rawAudioData,
699
+ },
700
+ });
701
+ }
702
+
703
+ process(inputList, outputList, parameters) {
704
+ // Copy input to output (e.g. speakers)
705
+ // Note that this creates choppy sounds with Mac products
706
+ const sourceLimit = Math.min(inputList.length, outputList.length);
707
+ for (let inputNum = 0; inputNum < sourceLimit; inputNum++) {
708
+ const input = inputList[inputNum];
709
+ const output = outputList[inputNum];
710
+ const channelCount = Math.min(input.length, output.length);
711
+ for (let channelNum = 0; channelNum < channelCount; channelNum++) {
712
+ input[channelNum].forEach((sample, i) => {
713
+ output[channelNum][i] = sample;
714
+ });
715
+ }
716
+ }
717
+ const inputs = inputList[0];
718
+ // There's latency at the beginning of a stream before recording starts
719
+ // Make sure we actually receive audio data before we start storing chunks
720
+ let sliceIndex = 0;
721
+ if (!this.foundAudio) {
722
+ for (const channel of inputs) {
723
+ sliceIndex = 0; // reset for each channel
724
+ if (this.foundAudio) {
725
+ break;
726
+ }
727
+ if (channel) {
728
+ for (const value of channel) {
729
+ if (value !== 0) {
730
+ // find only one non-zero entry in any channel
731
+ this.foundAudio = true;
732
+ break;
733
+ } else {
734
+ sliceIndex++;
735
+ }
736
+ }
737
+ }
738
+ }
739
+ }
740
+ if (inputs && inputs[0] && this.foundAudio && this.recording) {
741
+ // We need to copy the TypedArray, because the \`process\`
742
+ // internals will reuse the same buffer to hold each input
743
+ const chunk = inputs.map((input) => input.slice(sliceIndex));
744
+ this.chunks.push(chunk);
745
+ this.sendChunk(chunk);
746
+ }
747
+ return true;
748
+ }
749
+ }
750
+
751
+ registerProcessor('audio_processor', AudioProcessor);
752
+ `;
753
+ const $8e1d1e6ff08f6fb5$var$script = new Blob([
754
+ $8e1d1e6ff08f6fb5$var$AudioProcessorWorklet
755
+ ], {
756
+ type: 'application/javascript'
757
+ });
758
+ const $8e1d1e6ff08f6fb5$var$src = URL.createObjectURL($8e1d1e6ff08f6fb5$var$script);
759
+ const $8e1d1e6ff08f6fb5$export$1f65f50a8cbff43c = $8e1d1e6ff08f6fb5$var$src;
760
+
761
+
762
+
763
+
764
+ class $62bc376044a05513$export$439b217ca659a877 {
765
+ /**
766
+ * Create a new WavRecorder instance
767
+ * @param {{sampleRate?: number, outputToSpeakers?: boolean, debug?: boolean}} [options]
768
+ * @returns {WavRecorder}
769
+ */ constructor({ sampleRate: sampleRate = 44100, outputToSpeakers: outputToSpeakers = false, debug: debug = false } = {}){
770
+ // Script source
771
+ this.scriptSrc = (0, $8e1d1e6ff08f6fb5$export$1f65f50a8cbff43c);
772
+ // Config
773
+ this.sampleRate = sampleRate;
774
+ this.outputToSpeakers = outputToSpeakers;
775
+ this.debug = !!debug;
776
+ this._deviceChangeCallback = null;
777
+ this._devices = [];
778
+ this.deviceSelection = null;
779
+ // State variables
780
+ this.stream = null;
781
+ this.processor = null;
782
+ this.source = null;
783
+ this.node = null;
784
+ this.recording = false;
785
+ // Event handling with AudioWorklet
786
+ this._lastEventId = 0;
787
+ this.eventReceipts = {};
788
+ this.eventTimeout = 5000;
789
+ // Process chunks of audio
790
+ this._chunkProcessor = ()=>{};
791
+ this._chunkProcessorSize = void 0;
792
+ this._chunkProcessorBuffer = {
793
+ raw: new ArrayBuffer(0),
794
+ mono: new ArrayBuffer(0)
795
+ };
796
+ }
797
+ /**
798
+ * Decodes audio data from multiple formats to a Blob, url, Float32Array and AudioBuffer
799
+ * @param {Blob|Float32Array|Int16Array|ArrayBuffer|number[]} audioData
800
+ * @param {number} sampleRate
801
+ * @param {number} fromSampleRate
802
+ * @returns {Promise<DecodedAudioType>}
803
+ */ static async decode(audioData, sampleRate = 44100, fromSampleRate = -1) {
804
+ const context = new AudioContext({
805
+ sampleRate: sampleRate
806
+ });
807
+ let arrayBuffer;
808
+ let blob;
809
+ if (audioData instanceof Blob) {
810
+ if (fromSampleRate !== -1) throw new Error(`Can not specify "fromSampleRate" when reading from Blob`);
811
+ blob = audioData;
812
+ arrayBuffer = await blob.arrayBuffer();
813
+ } else if (audioData instanceof ArrayBuffer) {
814
+ if (fromSampleRate !== -1) throw new Error(`Can not specify "fromSampleRate" when reading from ArrayBuffer`);
815
+ arrayBuffer = audioData;
816
+ blob = new Blob([
817
+ arrayBuffer
818
+ ], {
819
+ type: 'audio/wav'
820
+ });
821
+ } else {
822
+ let float32Array;
823
+ let data;
824
+ if (audioData instanceof Int16Array) {
825
+ data = audioData;
826
+ float32Array = new Float32Array(audioData.length);
827
+ for(let i = 0; i < audioData.length; i++)float32Array[i] = audioData[i] / 0x8000;
828
+ } else if (audioData instanceof Float32Array) float32Array = audioData;
829
+ else if (audioData instanceof Array) float32Array = new Float32Array(audioData);
830
+ else throw new Error(`"audioData" must be one of: Blob, Float32Arrray, Int16Array, ArrayBuffer, Array<number>`);
831
+ if (fromSampleRate === -1) throw new Error(`Must specify "fromSampleRate" when reading from Float32Array, In16Array or Array`);
832
+ else if (fromSampleRate < 3000) throw new Error(`Minimum "fromSampleRate" is 3000 (3kHz)`);
833
+ if (!data) data = (0, $6d4b7449a1e1544a$export$13afda237b1c9846).floatTo16BitPCM(float32Array);
834
+ const audio = {
835
+ bitsPerSample: 16,
836
+ channels: [
837
+ float32Array
838
+ ],
839
+ data: data
840
+ };
841
+ const packer = new (0, $6d4b7449a1e1544a$export$13afda237b1c9846)();
842
+ const result = packer.pack(fromSampleRate, audio);
843
+ blob = result.blob;
844
+ arrayBuffer = await blob.arrayBuffer();
845
+ }
846
+ const audioBuffer = await context.decodeAudioData(arrayBuffer);
847
+ const values = audioBuffer.getChannelData(0);
848
+ const url = URL.createObjectURL(blob);
849
+ return {
850
+ blob: blob,
851
+ url: url,
852
+ values: values,
853
+ audioBuffer: audioBuffer
854
+ };
855
+ }
856
+ /**
857
+ * Logs data in debug mode
858
+ * @param {...any} arguments
859
+ * @returns {true}
860
+ */ log() {
861
+ if (this.debug) this.log(...arguments);
862
+ return true;
863
+ }
864
+ /**
865
+ * Retrieves the current sampleRate for the recorder
866
+ * @returns {number}
867
+ */ getSampleRate() {
868
+ return this.sampleRate;
869
+ }
870
+ /**
871
+ * Retrieves the current status of the recording
872
+ * @returns {"ended"|"paused"|"recording"}
873
+ */ getStatus() {
874
+ if (!this.processor) return 'ended';
875
+ else if (!this.recording) return 'paused';
876
+ else return 'recording';
877
+ }
878
+ /**
879
+ * Sends an event to the AudioWorklet
880
+ * @private
881
+ * @param {string} name
882
+ * @param {{[key: string]: any}} data
883
+ * @param {AudioWorkletNode} [_processor]
884
+ * @returns {Promise<{[key: string]: any}>}
885
+ */ async _event(name, data = {}, _processor = null) {
886
+ _processor = _processor || this.processor;
887
+ if (!_processor) throw new Error('Can not send events without recording first');
888
+ const message = {
889
+ event: name,
890
+ id: this._lastEventId++,
891
+ data: data
892
+ };
893
+ _processor.port.postMessage(message);
894
+ const t0 = new Date().valueOf();
895
+ while(!this.eventReceipts[message.id]){
896
+ if (new Date().valueOf() - t0 > this.eventTimeout) throw new Error(`Timeout waiting for "${name}" event`);
897
+ await new Promise((res)=>setTimeout(()=>res(true), 1));
898
+ }
899
+ const payload = this.eventReceipts[message.id];
900
+ delete this.eventReceipts[message.id];
901
+ return payload;
902
+ }
903
+ /**
904
+ * Sets device change callback, remove if callback provided is `null`
905
+ * @param {(Array<MediaDeviceInfo & {default: boolean}>): void|null} callback
906
+ * @returns {true}
907
+ */ listenForDeviceChange(callback) {
908
+ if (callback === null && this._deviceChangeCallback) {
909
+ navigator.mediaDevices.removeEventListener('devicechange', this._deviceChangeCallback);
910
+ this._deviceChangeCallback = null;
911
+ } else if (callback !== null) {
912
+ // Basically a debounce; we only want this called once when devices change
913
+ // And we only want the most recent callback() to be executed
914
+ // if a few are operating at the same time
915
+ let lastId = 0;
916
+ let lastDevices = [];
917
+ const serializeDevices = (devices)=>devices.map((d)=>d.deviceId).sort().join(',');
918
+ const cb = async ()=>{
919
+ let id = ++lastId;
920
+ const devices = await this.listDevices();
921
+ if (id === lastId) {
922
+ if (serializeDevices(lastDevices) !== serializeDevices(devices)) {
923
+ lastDevices = devices;
924
+ callback(devices.slice());
925
+ }
926
+ }
927
+ };
928
+ navigator.mediaDevices.addEventListener('devicechange', cb);
929
+ cb();
930
+ this._deviceChangeCallback = cb;
931
+ }
932
+ return true;
933
+ }
934
+ /**
935
+ * Manually request permission to use the microphone
936
+ * @returns {Promise<true>}
937
+ */ async requestPermission() {
938
+ const permissionStatus = await navigator.permissions.query({
939
+ name: 'microphone'
940
+ });
941
+ if (permissionStatus.state === 'denied') window.alert('You must grant microphone access to use this feature.');
942
+ else if (permissionStatus.state === 'prompt') try {
943
+ const stream = await navigator.mediaDevices.getUserMedia({
944
+ audio: true
945
+ });
946
+ const tracks = stream.getTracks();
947
+ tracks.forEach((track)=>track.stop());
948
+ } catch (e) {
949
+ window.alert('You must grant microphone access to use this feature.');
950
+ }
951
+ return true;
952
+ }
953
+ /**
954
+ * List all eligible devices for recording, will request permission to use microphone
955
+ * @returns {Promise<Array<MediaDeviceInfo & {default: boolean}>>}
956
+ */ async listDevices() {
957
+ if (!navigator.mediaDevices || !('enumerateDevices' in navigator.mediaDevices)) throw new Error('Could not request user devices');
958
+ await this.requestPermission();
959
+ const devices = await navigator.mediaDevices.enumerateDevices();
960
+ const audioDevices = devices.filter((device)=>device.kind === 'audioinput');
961
+ return audioDevices;
962
+ // const defaultDeviceIndex = audioDevices.findIndex(
963
+ // (device) => device.deviceId === 'default'
964
+ // );
965
+ // const deviceList = [];
966
+ // if (defaultDeviceIndex !== -1) {
967
+ // let defaultDevice = audioDevices.splice(defaultDeviceIndex, 1)[0];
968
+ // let existingIndex = audioDevices.findIndex(
969
+ // (device) => device.groupId === defaultDevice.groupId
970
+ // );
971
+ // if (existingIndex !== -1) {
972
+ // defaultDevice = audioDevices.splice(existingIndex, 1)[0];
973
+ // }
974
+ // defaultDevice.default = true;
975
+ // deviceList.push(defaultDevice);
976
+ // }
977
+ // return deviceList.concat(audioDevices);
978
+ }
979
+ /**
980
+ * Begins a recording session and requests microphone permissions if not already granted
981
+ * Microphone recording indicator will appear on browser tab but status will be "paused"
982
+ * @param {string} [deviceId] if no device provided, default device will be used
983
+ * @returns {Promise<true>}
984
+ */ async begin(deviceId) {
985
+ if (this.processor) throw new Error(`Already connected: please call .end() to start a new session`);
986
+ if (!navigator.mediaDevices || !('getUserMedia' in navigator.mediaDevices)) throw new Error('Could not request user media');
987
+ deviceId = deviceId ?? this.deviceSelection?.deviceId;
988
+ try {
989
+ const config = {
990
+ audio: true
991
+ };
992
+ if (deviceId) config.audio = {
993
+ deviceId: {
994
+ exact: deviceId
995
+ }
996
+ };
997
+ this.stream = await navigator.mediaDevices.getUserMedia(config);
998
+ } catch (err) {
999
+ throw new Error('Could not start media stream');
1000
+ }
1001
+ this.listDevices().then((devices)=>{
1002
+ deviceId = this.stream.getAudioTracks()[0].getSettings().deviceId;
1003
+ console.log('find current device', devices, deviceId, this.stream.getAudioTracks()[0].getSettings());
1004
+ this.deviceSelection = devices.find((d)=>d.deviceId === deviceId);
1005
+ console.log('current device', this.deviceSelection);
1006
+ });
1007
+ const context = new AudioContext({
1008
+ sampleRate: this.sampleRate
1009
+ });
1010
+ const source = context.createMediaStreamSource(this.stream);
1011
+ // Load and execute the module script.
1012
+ try {
1013
+ await context.audioWorklet.addModule(this.scriptSrc);
1014
+ } catch (e) {
1015
+ console.error(e);
1016
+ throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
1017
+ }
1018
+ const processor = new AudioWorkletNode(context, 'audio_processor');
1019
+ processor.port.onmessage = (e)=>{
1020
+ const { event: event, id: id, data: data } = e.data;
1021
+ if (event === 'receipt') this.eventReceipts[id] = data;
1022
+ else if (event === 'chunk') {
1023
+ if (this._chunkProcessorSize) {
1024
+ const buffer = this._chunkProcessorBuffer;
1025
+ this._chunkProcessorBuffer = {
1026
+ raw: (0, $6d4b7449a1e1544a$export$13afda237b1c9846).mergeBuffers(buffer.raw, data.raw),
1027
+ mono: (0, $6d4b7449a1e1544a$export$13afda237b1c9846).mergeBuffers(buffer.mono, data.mono)
1028
+ };
1029
+ if (this._chunkProcessorBuffer.mono.byteLength >= this._chunkProcessorSize) {
1030
+ this._chunkProcessor(this._chunkProcessorBuffer);
1031
+ this._chunkProcessorBuffer = {
1032
+ raw: new ArrayBuffer(0),
1033
+ mono: new ArrayBuffer(0)
1034
+ };
1035
+ }
1036
+ } else this._chunkProcessor(data);
1037
+ }
1038
+ };
1039
+ const node = source.connect(processor);
1040
+ const analyser = context.createAnalyser();
1041
+ analyser.fftSize = 8192;
1042
+ analyser.smoothingTimeConstant = 0.1;
1043
+ node.connect(analyser);
1044
+ if (this.outputToSpeakers) {
1045
+ // eslint-disable-next-line no-console
1046
+ console.warn("Warning: Output to speakers may affect sound quality,\nespecially due to system audio feedback preventative measures.\nuse only for debugging");
1047
+ analyser.connect(context.destination);
1048
+ }
1049
+ this.source = source;
1050
+ this.node = node;
1051
+ this.analyser = analyser;
1052
+ this.processor = processor;
1053
+ console.log('begin completed');
1054
+ return true;
1055
+ }
1056
+ /**
1057
+ * Gets the current frequency domain data from the recording track
1058
+ * @param {"frequency"|"music"|"voice"} [analysisType]
1059
+ * @param {number} [minDecibels] default -100
1060
+ * @param {number} [maxDecibels] default -30
1061
+ * @returns {import('./analysis/audio_analysis.js').AudioAnalysisOutputType}
1062
+ */ getFrequencies(analysisType = 'frequency', minDecibels = -100, maxDecibels = -30) {
1063
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1064
+ return (0, $f32f064564ee62f6$export$2c3136da0bf130f9).getFrequencies(this.analyser, this.sampleRate, null, analysisType, minDecibels, maxDecibels);
1065
+ }
1066
+ /**
1067
+ * Pauses the recording
1068
+ * Keeps microphone stream open but halts storage of audio
1069
+ * @returns {Promise<true>}
1070
+ */ async pause() {
1071
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1072
+ else if (!this.recording) throw new Error('Already paused: please call .record() first');
1073
+ if (this._chunkProcessorBuffer.raw.byteLength) this._chunkProcessor(this._chunkProcessorBuffer);
1074
+ this.log('Pausing ...');
1075
+ await this._event('stop');
1076
+ this.recording = false;
1077
+ return true;
1078
+ }
1079
+ /**
1080
+ * Start recording stream and storing to memory from the connected audio source
1081
+ * @param {(data: { mono: Int16Array; raw: Int16Array }) => any} [chunkProcessor]
1082
+ * @param {number} [chunkSize] chunkProcessor will not be triggered until this size threshold met in mono audio
1083
+ * @returns {Promise<true>}
1084
+ */ async record(chunkProcessor = ()=>{}, chunkSize = 8192) {
1085
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1086
+ else if (this.recording) throw new Error('Already recording: please call .pause() first');
1087
+ else if (typeof chunkProcessor !== 'function') throw new Error(`chunkProcessor must be a function`);
1088
+ this._chunkProcessor = chunkProcessor;
1089
+ this._chunkProcessorSize = chunkSize;
1090
+ this._chunkProcessorBuffer = {
1091
+ raw: new ArrayBuffer(0),
1092
+ mono: new ArrayBuffer(0)
1093
+ };
1094
+ this.log('Recording ...');
1095
+ await this._event('start');
1096
+ this.recording = true;
1097
+ return true;
1098
+ }
1099
+ /**
1100
+ * Clears the audio buffer, empties stored recording
1101
+ * @returns {Promise<true>}
1102
+ */ async clear() {
1103
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1104
+ await this._event('clear');
1105
+ return true;
1106
+ }
1107
+ /**
1108
+ * Reads the current audio stream data
1109
+ * @returns {Promise<{meanValues: Float32Array, channels: Array<Float32Array>}>}
1110
+ */ async read() {
1111
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1112
+ this.log('Reading ...');
1113
+ const result = await this._event('read');
1114
+ return result;
1115
+ }
1116
+ /**
1117
+ * Saves the current audio stream to a file
1118
+ * @param {boolean} [force] Force saving while still recording
1119
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1120
+ */ async save(force = false) {
1121
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1122
+ if (!force && this.recording) throw new Error('Currently recording: please call .pause() first, or call .save(true) to force');
1123
+ this.log('Exporting ...');
1124
+ const exportData = await this._event('export');
1125
+ const packer = new (0, $6d4b7449a1e1544a$export$13afda237b1c9846)();
1126
+ const result = packer.pack(this.sampleRate, exportData.audio);
1127
+ return result;
1128
+ }
1129
+ /**
1130
+ * Ends the current recording session and saves the result
1131
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1132
+ */ async end() {
1133
+ if (!this.processor) throw new Error('Session ended: please call .begin() first');
1134
+ const _processor = this.processor;
1135
+ this.log('Stopping ...');
1136
+ await this._event('stop');
1137
+ this.recording = false;
1138
+ const tracks = this.stream.getTracks();
1139
+ tracks.forEach((track)=>track.stop());
1140
+ this.log('Exporting ...');
1141
+ const exportData = await this._event('export', {}, _processor);
1142
+ this.processor.disconnect();
1143
+ this.source.disconnect();
1144
+ this.node.disconnect();
1145
+ this.analyser.disconnect();
1146
+ this.stream = null;
1147
+ this.processor = null;
1148
+ this.source = null;
1149
+ this.node = null;
1150
+ const packer = new (0, $6d4b7449a1e1544a$export$13afda237b1c9846)();
1151
+ const result = packer.pack(this.sampleRate, exportData.audio);
1152
+ return result;
1153
+ }
1154
+ /**
1155
+ * Performs a full cleanup of WavRecorder instance
1156
+ * Stops actively listening via microphone and removes existing listeners
1157
+ * @returns {Promise<true>}
1158
+ */ async quit() {
1159
+ this.listenForDeviceChange(null);
1160
+ // we do not reset this on end so that selections persist across starts
1161
+ this.deviceSelection = null;
1162
+ if (this.processor) await this.end();
1163
+ return true;
1164
+ }
1165
+ }
1166
+ globalThis.WavRecorder = $62bc376044a05513$export$439b217ca659a877;
1167
+
1168
+
1169
+
1170
+
1171
+
1172
+ class $5fc11d7bc0d20724$export$2934cf2d25c67a48 {
1173
+ /**
1174
+ * Create a new MediaStreamRecorder instance
1175
+ * @param {{sampleRate?: number, outputToSpeakers?: boolean, debug?: boolean}} [options]
1176
+ * @returns {MediaStreamRecorder}
1177
+ */ constructor({ sampleRate: sampleRate = 44100, outputToSpeakers: outputToSpeakers = false, debug: debug = false } = {}){
1178
+ // Script source
1179
+ this.scriptSrc = (0, $8e1d1e6ff08f6fb5$export$1f65f50a8cbff43c);
1180
+ // Config
1181
+ this.sampleRate = sampleRate;
1182
+ this.outputToSpeakers = outputToSpeakers;
1183
+ this.debug = !!debug;
1184
+ // State variables
1185
+ this.stream = null;
1186
+ this.processor = null;
1187
+ this.source = null;
1188
+ this.node = null;
1189
+ this.recording = false;
1190
+ // Event handling with AudioWorklet
1191
+ this._lastEventId = 0;
1192
+ this.eventReceipts = {};
1193
+ this.eventTimeout = 5000;
1194
+ // Process chunks of audio
1195
+ this._chunkProcessor = ()=>{};
1196
+ this._chunkProcessorSize = void 0;
1197
+ this._chunkProcessorBuffer = {
1198
+ raw: new ArrayBuffer(0),
1199
+ mono: new ArrayBuffer(0)
1200
+ };
1201
+ }
1202
+ /**
1203
+ * Logs data in debug mode
1204
+ * @param {...any} arguments
1205
+ * @returns {true}
1206
+ */ log() {
1207
+ if (this.debug) this.log(...arguments);
1208
+ return true;
1209
+ }
1210
+ /**
1211
+ * Retrieves the current sampleRate for the recorder
1212
+ * @returns {number}
1213
+ */ getSampleRate() {
1214
+ return this.sampleRate;
1215
+ }
1216
+ /**
1217
+ * Retrieves the current status of the recording
1218
+ * @returns {"ended"|"paused"|"recording"}
1219
+ */ getStatus() {
1220
+ if (!this.processor) return "ended";
1221
+ else if (!this.recording) return "paused";
1222
+ else return "recording";
1223
+ }
1224
+ /**
1225
+ * Sends an event to the AudioWorklet
1226
+ * @private
1227
+ * @param {string} name
1228
+ * @param {{[key: string]: any}} data
1229
+ * @param {AudioWorkletNode} [_processor]
1230
+ * @returns {Promise<{[key: string]: any}>}
1231
+ */ async _event(name, data = {}, _processor = null) {
1232
+ _processor = _processor || this.processor;
1233
+ if (!_processor) throw new Error("Can not send events without recording first");
1234
+ const message = {
1235
+ event: name,
1236
+ id: this._lastEventId++,
1237
+ data: data
1238
+ };
1239
+ _processor.port.postMessage(message);
1240
+ const t0 = new Date().valueOf();
1241
+ while(!this.eventReceipts[message.id]){
1242
+ if (new Date().valueOf() - t0 > this.eventTimeout) throw new Error(`Timeout waiting for "${name}" event`);
1243
+ await new Promise((res)=>setTimeout(()=>res(true), 1));
1244
+ }
1245
+ const payload = this.eventReceipts[message.id];
1246
+ delete this.eventReceipts[message.id];
1247
+ return payload;
1248
+ }
1249
+ /**
1250
+ * Begins a recording session for the given audioTrack
1251
+ * Microphone recording indicator will appear on browser tab but status will be "paused"
1252
+ * @param {MediaStreamTrack} [audioTrack] if no device provided, default device will be used
1253
+ * @returns {Promise<true>}
1254
+ */ async begin(audioTrack) {
1255
+ if (this.processor) throw new Error(`Already connected: please call .end() to start a new session`);
1256
+ if (!audioTrack || audioTrack.kind !== "audio") throw new Error("No audio track provided");
1257
+ this.stream = new MediaStream([
1258
+ audioTrack
1259
+ ]);
1260
+ const context = new AudioContext({
1261
+ sampleRate: this.sampleRate
1262
+ });
1263
+ const source = context.createMediaStreamSource(this.stream);
1264
+ // Load and execute the module script.
1265
+ try {
1266
+ await context.audioWorklet.addModule(this.scriptSrc);
1267
+ } catch (e) {
1268
+ console.error(e);
1269
+ throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
1270
+ }
1271
+ const processor = new AudioWorkletNode(context, "audio_processor");
1272
+ processor.port.onmessage = (e)=>{
1273
+ const { event: event, id: id, data: data } = e.data;
1274
+ if (event === "receipt") this.eventReceipts[id] = data;
1275
+ else if (event === "chunk") {
1276
+ if (this._chunkProcessorSize) {
1277
+ const buffer = this._chunkProcessorBuffer;
1278
+ this._chunkProcessorBuffer = {
1279
+ raw: (0, $6d4b7449a1e1544a$export$13afda237b1c9846).mergeBuffers(buffer.raw, data.raw),
1280
+ mono: (0, $6d4b7449a1e1544a$export$13afda237b1c9846).mergeBuffers(buffer.mono, data.mono)
1281
+ };
1282
+ if (this._chunkProcessorBuffer.mono.byteLength >= this._chunkProcessorSize) {
1283
+ this._chunkProcessor(this._chunkProcessorBuffer);
1284
+ this._chunkProcessorBuffer = {
1285
+ raw: new ArrayBuffer(0),
1286
+ mono: new ArrayBuffer(0)
1287
+ };
1288
+ }
1289
+ } else this._chunkProcessor(data);
1290
+ }
1291
+ };
1292
+ const node = source.connect(processor);
1293
+ const analyser = context.createAnalyser();
1294
+ analyser.fftSize = 8192;
1295
+ analyser.smoothingTimeConstant = 0.1;
1296
+ node.connect(analyser);
1297
+ if (this.outputToSpeakers) {
1298
+ // eslint-disable-next-line no-console
1299
+ console.warn("Warning: Output to speakers may affect sound quality,\nespecially due to system audio feedback preventative measures.\nuse only for debugging");
1300
+ analyser.connect(context.destination);
1301
+ }
1302
+ this.source = source;
1303
+ this.node = node;
1304
+ this.analyser = analyser;
1305
+ this.processor = processor;
1306
+ return true;
1307
+ }
1308
+ /**
1309
+ * Gets the current frequency domain data from the recording track
1310
+ * @param {"frequency"|"music"|"voice"} [analysisType]
1311
+ * @param {number} [minDecibels] default -100
1312
+ * @param {number} [maxDecibels] default -30
1313
+ * @returns {import('./analysis/audio_analysis.js').AudioAnalysisOutputType}
1314
+ */ getFrequencies(analysisType = "frequency", minDecibels = -100, maxDecibels = -30) {
1315
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1316
+ return (0, $f32f064564ee62f6$export$2c3136da0bf130f9).getFrequencies(this.analyser, this.sampleRate, null, analysisType, minDecibels, maxDecibels);
1317
+ }
1318
+ /**
1319
+ * Pauses the recording
1320
+ * Keeps microphone stream open but halts storage of audio
1321
+ * @returns {Promise<true>}
1322
+ */ async pause() {
1323
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1324
+ else if (!this.recording) throw new Error("Already paused: please call .record() first");
1325
+ if (this._chunkProcessorBuffer.raw.byteLength) this._chunkProcessor(this._chunkProcessorBuffer);
1326
+ this.log("Pausing ...");
1327
+ await this._event("stop");
1328
+ this.recording = false;
1329
+ return true;
1330
+ }
1331
+ /**
1332
+ * Start recording stream and storing to memory from the connected audio source
1333
+ * @param {(data: { mono: Int16Array; raw: Int16Array }) => any} [chunkProcessor]
1334
+ * @param {number} [chunkSize] chunkProcessor will not be triggered until this size threshold met in mono audio
1335
+ * @returns {Promise<true>}
1336
+ */ async record(chunkProcessor = ()=>{}, chunkSize = 8192) {
1337
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1338
+ else if (this.recording) throw new Error("Already recording: HELLO please call .pause() first");
1339
+ else if (typeof chunkProcessor !== "function") throw new Error(`chunkProcessor must be a function`);
1340
+ this._chunkProcessor = chunkProcessor;
1341
+ this._chunkProcessorSize = chunkSize;
1342
+ this._chunkProcessorBuffer = {
1343
+ raw: new ArrayBuffer(0),
1344
+ mono: new ArrayBuffer(0)
1345
+ };
1346
+ this.log("Recording ...");
1347
+ await this._event("start");
1348
+ this.recording = true;
1349
+ return true;
1350
+ }
1351
+ /**
1352
+ * Clears the audio buffer, empties stored recording
1353
+ * @returns {Promise<true>}
1354
+ */ async clear() {
1355
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1356
+ await this._event("clear");
1357
+ return true;
1358
+ }
1359
+ /**
1360
+ * Reads the current audio stream data
1361
+ * @returns {Promise<{meanValues: Float32Array, channels: Array<Float32Array>}>}
1362
+ */ async read() {
1363
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1364
+ this.log("Reading ...");
1365
+ const result = await this._event("read");
1366
+ return result;
1367
+ }
1368
+ /**
1369
+ * Saves the current audio stream to a file
1370
+ * @param {boolean} [force] Force saving while still recording
1371
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1372
+ */ async save(force = false) {
1373
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1374
+ if (!force && this.recording) throw new Error("Currently recording: please call .pause() first, or call .save(true) to force");
1375
+ this.log("Exporting ...");
1376
+ const exportData = await this._event("export");
1377
+ const packer = new (0, $6d4b7449a1e1544a$export$13afda237b1c9846)();
1378
+ const result = packer.pack(this.sampleRate, exportData.audio);
1379
+ return result;
1380
+ }
1381
+ /**
1382
+ * Ends the current recording session and saves the result
1383
+ * @returns {Promise<import('./wav_packer.js').WavPackerAudioType>}
1384
+ */ async end() {
1385
+ if (!this.processor) throw new Error("Session ended: please call .begin() first");
1386
+ const _processor = this.processor;
1387
+ this.log("Stopping ...");
1388
+ await this._event("stop");
1389
+ this.recording = false;
1390
+ this.log("Exporting ...");
1391
+ const exportData = await this._event("export", {}, _processor);
1392
+ this.processor.disconnect();
1393
+ this.source.disconnect();
1394
+ this.node.disconnect();
1395
+ this.analyser.disconnect();
1396
+ this.stream = null;
1397
+ this.processor = null;
1398
+ this.source = null;
1399
+ this.node = null;
1400
+ const packer = new (0, $6d4b7449a1e1544a$export$13afda237b1c9846)();
1401
+ const result = packer.pack(this.sampleRate, exportData.audio);
1402
+ return result;
1403
+ }
1404
+ /**
1405
+ * Performs a full cleanup of WavRecorder instance
1406
+ * Stops actively listening via microphone and removes existing listeners
1407
+ * @returns {Promise<true>}
1408
+ */ async quit() {
1409
+ this.listenForDeviceChange(null);
1410
+ if (this.processor) await this.end();
1411
+ return true;
1412
+ }
1413
+ }
1414
+ globalThis.WavRecorder = WavRecorder;
1415
+
1416
+
1417
+
1418
+
1419
+ var $fc49a56cd8739127$var$__extends = undefined && undefined.__extends || function() {
1420
+ var extendStatics = function(d, b) {
1421
+ extendStatics = Object.setPrototypeOf || ({
1422
+ __proto__: []
1423
+ }) instanceof Array && function(d, b) {
1424
+ d.__proto__ = b;
1425
+ } || function(d, b) {
1426
+ for(var p in b)if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
1427
+ };
1428
+ return extendStatics(d, b);
1429
+ };
1430
+ return function(d, b) {
1431
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
1432
+ extendStatics(d, b);
1433
+ function __() {
1434
+ this.constructor = d;
1435
+ }
1436
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
1437
+ };
1438
+ }();
1439
+ var $fc49a56cd8739127$var$__awaiter = undefined && undefined.__awaiter || function(thisArg, _arguments, P, generator) {
1440
+ function adopt(value) {
1441
+ return value instanceof P ? value : new P(function(resolve) {
1442
+ resolve(value);
1443
+ });
1444
+ }
1445
+ return new (P || (P = Promise))(function(resolve, reject) {
1446
+ function fulfilled(value) {
1447
+ try {
1448
+ step(generator.next(value));
1449
+ } catch (e) {
1450
+ reject(e);
1451
+ }
1452
+ }
1453
+ function rejected(value) {
1454
+ try {
1455
+ step(generator["throw"](value));
1456
+ } catch (e) {
1457
+ reject(e);
1458
+ }
1459
+ }
1460
+ function step(result) {
1461
+ result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
1462
+ }
1463
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
1464
+ });
1465
+ };
1466
+ var $fc49a56cd8739127$var$__generator = undefined && undefined.__generator || function(thisArg, body) {
1467
+ var _ = {
1468
+ label: 0,
1469
+ sent: function() {
1470
+ if (t[0] & 1) throw t[1];
1471
+ return t[1];
1472
+ },
1473
+ trys: [],
1474
+ ops: []
1475
+ }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
1476
+ return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() {
1477
+ return this;
1478
+ }), g;
1479
+ function verb(n) {
1480
+ return function(v) {
1481
+ return step([
1482
+ n,
1483
+ v
1484
+ ]);
1485
+ };
1486
+ }
1487
+ function step(op) {
1488
+ if (f) throw new TypeError("Generator is already executing.");
1489
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
1490
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
1491
+ if (y = 0, t) op = [
1492
+ op[0] & 2,
1493
+ t.value
1494
+ ];
1495
+ switch(op[0]){
1496
+ case 0:
1497
+ case 1:
1498
+ t = op;
1499
+ break;
1500
+ case 4:
1501
+ _.label++;
1502
+ return {
1503
+ value: op[1],
1504
+ done: false
1505
+ };
1506
+ case 5:
1507
+ _.label++;
1508
+ y = op[1];
1509
+ op = [
1510
+ 0
1511
+ ];
1512
+ continue;
1513
+ case 7:
1514
+ op = _.ops.pop();
1515
+ _.trys.pop();
1516
+ continue;
1517
+ default:
1518
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
1519
+ _ = 0;
1520
+ continue;
1521
+ }
1522
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
1523
+ _.label = op[1];
1524
+ break;
1525
+ }
1526
+ if (op[0] === 6 && _.label < t[1]) {
1527
+ _.label = t[1];
1528
+ t = op;
1529
+ break;
1530
+ }
1531
+ if (t && _.label < t[2]) {
1532
+ _.label = t[2];
1533
+ _.ops.push(op);
1534
+ break;
1535
+ }
1536
+ if (t[2]) _.ops.pop();
1537
+ _.trys.pop();
1538
+ continue;
1539
+ }
1540
+ op = body.call(thisArg, _);
1541
+ } catch (e) {
1542
+ op = [
1543
+ 6,
1544
+ e
1545
+ ];
1546
+ y = 0;
1547
+ } finally{
1548
+ f = t = 0;
1549
+ }
1550
+ if (op[0] & 5) throw op[1];
1551
+ return {
1552
+ value: op[0] ? op[1] : void 0,
1553
+ done: true
1554
+ };
1555
+ }
1556
+ };
1557
+ var $fc49a56cd8739127$export$4a0c46dbbe2ddb67 = /** @class */ function() {
1558
+ function MediaManager() {
1559
+ this._callbacks = {};
1560
+ this._micEnabled = true;
1561
+ this._camEnabled = false;
1562
+ }
1563
+ MediaManager.prototype.setUserAudioCallback = function(userAudioCallback) {
1564
+ this._userAudioCallback = userAudioCallback;
1565
+ };
1566
+ MediaManager.prototype.setRTVIOptions = function(options, override) {
1567
+ var _a, _b, _c;
1568
+ if (override === void 0) override = false;
1569
+ if (this._options && !override) return;
1570
+ this._options = options;
1571
+ this._callbacks = (_a = options.callbacks) !== null && _a !== void 0 ? _a : {};
1572
+ this._micEnabled = (_b = options.enableMic) !== null && _b !== void 0 ? _b : true;
1573
+ this._camEnabled = (_c = options.enableCam) !== null && _c !== void 0 ? _c : false;
1574
+ };
1575
+ return MediaManager;
1576
+ }();
1577
+ var $fc49a56cd8739127$export$45c5b9bfba2f6304 = /** @class */ function(_super) {
1578
+ $fc49a56cd8739127$var$__extends(WavMediaManager, _super);
1579
+ function WavMediaManager(recorderChunkSize, recorderSampleRate) {
1580
+ if (recorderChunkSize === void 0) recorderChunkSize = undefined;
1581
+ if (recorderSampleRate === void 0) recorderSampleRate = 24000;
1582
+ var _this = _super.call(this) || this;
1583
+ _this._initialized = false;
1584
+ _this._recorderChunkSize = undefined;
1585
+ _this._recorderChunkSize = recorderChunkSize;
1586
+ _this._wavRecorder = new (0, $62bc376044a05513$export$439b217ca659a877)({
1587
+ sampleRate: recorderSampleRate
1588
+ });
1589
+ _this._wavStreamPlayer = new (0, $d0a969833958d9e7$export$9698d62c78b8f366)({
1590
+ sampleRate: 24000
1591
+ });
1592
+ return _this;
1593
+ }
1594
+ WavMediaManager.prototype.initialize = function() {
1595
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, Promise, function() {
1596
+ return $fc49a56cd8739127$var$__generator(this, function(_a) {
1597
+ switch(_a.label){
1598
+ case 0:
1599
+ return [
1600
+ 4 /*yield*/ ,
1601
+ this._wavRecorder.begin()
1602
+ ];
1603
+ case 1:
1604
+ _a.sent();
1605
+ this._wavRecorder.listenForDeviceChange(null);
1606
+ this._wavRecorder.listenForDeviceChange(this._handleAvailableDevicesUpdated.bind(this));
1607
+ return [
1608
+ 4 /*yield*/ ,
1609
+ this._wavStreamPlayer.connect()
1610
+ ];
1611
+ case 2:
1612
+ _a.sent();
1613
+ this._initialized = true;
1614
+ return [
1615
+ 2 /*return*/
1616
+ ];
1617
+ }
1618
+ });
1619
+ });
1620
+ };
1621
+ WavMediaManager.prototype.connect = function() {
1622
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, Promise, function() {
1623
+ var isAlreadyRecording;
1624
+ return $fc49a56cd8739127$var$__generator(this, function(_a) {
1625
+ switch(_a.label){
1626
+ case 0:
1627
+ if (!!this._initialized) return [
1628
+ 3 /*break*/ ,
1629
+ 2
1630
+ ];
1631
+ return [
1632
+ 4 /*yield*/ ,
1633
+ this.initialize()
1634
+ ];
1635
+ case 1:
1636
+ _a.sent();
1637
+ _a.label = 2;
1638
+ case 2:
1639
+ isAlreadyRecording = this._wavRecorder.getStatus() == "recording";
1640
+ if (!(this._micEnabled && !isAlreadyRecording)) return [
1641
+ 3 /*break*/ ,
1642
+ 4
1643
+ ];
1644
+ return [
1645
+ 4 /*yield*/ ,
1646
+ this._startRecording()
1647
+ ];
1648
+ case 3:
1649
+ _a.sent();
1650
+ _a.label = 4;
1651
+ case 4:
1652
+ return [
1653
+ 2 /*return*/
1654
+ ];
1655
+ }
1656
+ });
1657
+ });
1658
+ };
1659
+ WavMediaManager.prototype.disconnect = function() {
1660
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, Promise, function() {
1661
+ return $fc49a56cd8739127$var$__generator(this, function(_a) {
1662
+ switch(_a.label){
1663
+ case 0:
1664
+ if (!this._initialized) return [
1665
+ 2 /*return*/
1666
+ ];
1667
+ return [
1668
+ 4 /*yield*/ ,
1669
+ this._wavRecorder.end()
1670
+ ];
1671
+ case 1:
1672
+ _a.sent();
1673
+ return [
1674
+ 4 /*yield*/ ,
1675
+ this._wavStreamPlayer.interrupt()
1676
+ ];
1677
+ case 2:
1678
+ _a.sent();
1679
+ this._initialized = false;
1680
+ return [
1681
+ 2 /*return*/
1682
+ ];
1683
+ }
1684
+ });
1685
+ });
1686
+ };
1687
+ WavMediaManager.prototype.userStartedSpeaking = function() {
1688
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, Promise, function() {
1689
+ return $fc49a56cd8739127$var$__generator(this, function(_a) {
1690
+ return [
1691
+ 2 /*return*/ ,
1692
+ this._wavStreamPlayer.interrupt()
1693
+ ];
1694
+ });
1695
+ });
1696
+ };
1697
+ WavMediaManager.prototype.bufferBotAudio = function(data, id) {
1698
+ return this._wavStreamPlayer.add16BitPCM(data, id);
1699
+ };
1700
+ WavMediaManager.prototype.getAllMics = function() {
1701
+ return this._wavRecorder.listDevices();
1702
+ };
1703
+ WavMediaManager.prototype.getAllCams = function() {
1704
+ // TODO: Video not supported yet
1705
+ return Promise.resolve([]);
1706
+ };
1707
+ WavMediaManager.prototype.getAllSpeakers = function() {
1708
+ // TODO: Implement speaker support
1709
+ return Promise.resolve([]);
1710
+ };
1711
+ WavMediaManager.prototype.updateMic = function(micId) {
1712
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, Promise, function() {
1713
+ var prevMic, curMic;
1714
+ var _a, _b;
1715
+ return $fc49a56cd8739127$var$__generator(this, function(_c) {
1716
+ switch(_c.label){
1717
+ case 0:
1718
+ prevMic = this._wavRecorder.deviceSelection;
1719
+ return [
1720
+ 4 /*yield*/ ,
1721
+ this._wavRecorder.end()
1722
+ ];
1723
+ case 1:
1724
+ _c.sent();
1725
+ return [
1726
+ 4 /*yield*/ ,
1727
+ this._wavRecorder.begin(micId)
1728
+ ];
1729
+ case 2:
1730
+ _c.sent();
1731
+ if (!this._micEnabled) return [
1732
+ 3 /*break*/ ,
1733
+ 4
1734
+ ];
1735
+ return [
1736
+ 4 /*yield*/ ,
1737
+ this._startRecording()
1738
+ ];
1739
+ case 3:
1740
+ _c.sent();
1741
+ _c.label = 4;
1742
+ case 4:
1743
+ curMic = this._wavRecorder.deviceSelection;
1744
+ if (curMic && prevMic && prevMic.label !== curMic.label) (_b = (_a = this._callbacks).onMicUpdated) === null || _b === void 0 || _b.call(_a, curMic);
1745
+ return [
1746
+ 2 /*return*/
1747
+ ];
1748
+ }
1749
+ });
1750
+ });
1751
+ };
1752
+ WavMediaManager.prototype.updateCam = function(camId) {
1753
+ // TODO: Video not supported yet
1754
+ };
1755
+ WavMediaManager.prototype.updateSpeaker = function(speakerId) {
1756
+ // TODO: Implement speaker support
1757
+ };
1758
+ Object.defineProperty(WavMediaManager.prototype, "selectedMic", {
1759
+ get: function() {
1760
+ var _a;
1761
+ return (_a = this._wavRecorder.deviceSelection) !== null && _a !== void 0 ? _a : {};
1762
+ },
1763
+ enumerable: false,
1764
+ configurable: true
1765
+ });
1766
+ Object.defineProperty(WavMediaManager.prototype, "selectedCam", {
1767
+ get: function() {
1768
+ // TODO: Video not supported yet
1769
+ return {};
1770
+ },
1771
+ enumerable: false,
1772
+ configurable: true
1773
+ });
1774
+ Object.defineProperty(WavMediaManager.prototype, "selectedSpeaker", {
1775
+ get: function() {
1776
+ // TODO: Implement speaker support
1777
+ return {};
1778
+ },
1779
+ enumerable: false,
1780
+ configurable: true
1781
+ });
1782
+ WavMediaManager.prototype.enableMic = function(enable) {
1783
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, Promise, function() {
1784
+ var _this = this;
1785
+ return $fc49a56cd8739127$var$__generator(this, function(_a) {
1786
+ switch(_a.label){
1787
+ case 0:
1788
+ this._micEnabled = enable;
1789
+ if (!this._wavRecorder.stream) return [
1790
+ 2 /*return*/
1791
+ ];
1792
+ this._wavRecorder.stream.getAudioTracks().forEach(function(track) {
1793
+ var _a, _b;
1794
+ track.enabled = enable;
1795
+ if (!enable) (_b = (_a = _this._callbacks).onTrackStopped) === null || _b === void 0 || _b.call(_a, track, $fc49a56cd8739127$var$localParticipant());
1796
+ });
1797
+ if (!enable) return [
1798
+ 3 /*break*/ ,
1799
+ 2
1800
+ ];
1801
+ return [
1802
+ 4 /*yield*/ ,
1803
+ this._startRecording()
1804
+ ];
1805
+ case 1:
1806
+ _a.sent();
1807
+ return [
1808
+ 3 /*break*/ ,
1809
+ 4
1810
+ ];
1811
+ case 2:
1812
+ return [
1813
+ 4 /*yield*/ ,
1814
+ this._wavRecorder.pause()
1815
+ ];
1816
+ case 3:
1817
+ _a.sent();
1818
+ _a.label = 4;
1819
+ case 4:
1820
+ return [
1821
+ 2 /*return*/
1822
+ ];
1823
+ }
1824
+ });
1825
+ });
1826
+ };
1827
+ WavMediaManager.prototype.enableCam = function(enable) {
1828
+ // TODO: Video not supported yet
1829
+ };
1830
+ Object.defineProperty(WavMediaManager.prototype, "isCamEnabled", {
1831
+ get: function() {
1832
+ // TODO: Video not supported yet
1833
+ return false;
1834
+ },
1835
+ enumerable: false,
1836
+ configurable: true
1837
+ });
1838
+ Object.defineProperty(WavMediaManager.prototype, "isMicEnabled", {
1839
+ get: function() {
1840
+ return this._micEnabled;
1841
+ },
1842
+ enumerable: false,
1843
+ configurable: true
1844
+ });
1845
+ WavMediaManager.prototype.tracks = function() {
1846
+ var _a;
1847
+ var tracks = (_a = this._wavRecorder.stream) === null || _a === void 0 ? void 0 : _a.getTracks()[0];
1848
+ return {
1849
+ local: tracks ? {
1850
+ audio: tracks
1851
+ } : {}
1852
+ };
1853
+ };
1854
+ WavMediaManager.prototype._startRecording = function() {
1855
+ return $fc49a56cd8739127$var$__awaiter(this, void 0, void 0, function() {
1856
+ var track;
1857
+ var _this = this;
1858
+ var _a, _b, _c;
1859
+ return $fc49a56cd8739127$var$__generator(this, function(_d) {
1860
+ switch(_d.label){
1861
+ case 0:
1862
+ return [
1863
+ 4 /*yield*/ ,
1864
+ this._wavRecorder.record(function(data) {
1865
+ _this._userAudioCallback(data.mono);
1866
+ }, this._recorderChunkSize)
1867
+ ];
1868
+ case 1:
1869
+ _d.sent();
1870
+ track = (_a = this._wavRecorder.stream) === null || _a === void 0 ? void 0 : _a.getAudioTracks()[0];
1871
+ if (track) (_c = (_b = this._callbacks).onTrackStarted) === null || _c === void 0 || _c.call(_b, track, $fc49a56cd8739127$var$localParticipant());
1872
+ return [
1873
+ 2 /*return*/
1874
+ ];
1875
+ }
1876
+ });
1877
+ });
1878
+ };
1879
+ WavMediaManager.prototype._handleAvailableDevicesUpdated = function(devices) {
1880
+ var _a, _b, _c, _d;
1881
+ (_b = (_a = this._callbacks).onAvailableCamsUpdated) === null || _b === void 0 || _b.call(_a, devices.filter(function(d) {
1882
+ return d.kind === "videoinput";
1883
+ }));
1884
+ (_d = (_c = this._callbacks).onAvailableMicsUpdated) === null || _d === void 0 || _d.call(_c, devices.filter(function(d) {
1885
+ return d.kind === "audioinput";
1886
+ }));
1887
+ // if the current device went away or we're using the default and
1888
+ // the default changed, reset the mic.
1889
+ var defaultDevice = devices.find(function(d) {
1890
+ return d.deviceId === "default";
1891
+ });
1892
+ var currentDevice = this._wavRecorder.deviceSelection;
1893
+ if (currentDevice && (!devices.some(function(d) {
1894
+ return d.deviceId === currentDevice.deviceId;
1895
+ }) || currentDevice.deviceId === "default" && currentDevice.label !== (defaultDevice === null || defaultDevice === void 0 ? void 0 : defaultDevice.label))) this.updateMic("");
1896
+ };
1897
+ return WavMediaManager;
1898
+ }($fc49a56cd8739127$export$4a0c46dbbe2ddb67);
1899
+ var $fc49a56cd8739127$var$localParticipant = function() {
1900
+ return {
1901
+ id: "local",
1902
+ name: "",
1903
+ local: true
1904
+ };
1905
+ };
1906
+
1907
+
1908
+
1909
+
1910
+
1911
+ var $22ece045290c996a$var$__extends = undefined && undefined.__extends || function() {
1912
+ var extendStatics = function(d, b) {
1913
+ extendStatics = Object.setPrototypeOf || ({
1914
+ __proto__: []
1915
+ }) instanceof Array && function(d, b) {
1916
+ d.__proto__ = b;
1917
+ } || function(d, b) {
1918
+ for(var p in b)if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
1919
+ };
1920
+ return extendStatics(d, b);
1921
+ };
1922
+ return function(d, b) {
1923
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
1924
+ extendStatics(d, b);
1925
+ function __() {
1926
+ this.constructor = d;
1927
+ }
1928
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
1929
+ };
1930
+ }();
1931
+ var $22ece045290c996a$var$__awaiter = undefined && undefined.__awaiter || function(thisArg, _arguments, P, generator) {
1932
+ function adopt(value) {
1933
+ return value instanceof P ? value : new P(function(resolve) {
1934
+ resolve(value);
1935
+ });
1936
+ }
1937
+ return new (P || (P = Promise))(function(resolve, reject) {
1938
+ function fulfilled(value) {
1939
+ try {
1940
+ step(generator.next(value));
1941
+ } catch (e) {
1942
+ reject(e);
1943
+ }
1944
+ }
1945
+ function rejected(value) {
1946
+ try {
1947
+ step(generator["throw"](value));
1948
+ } catch (e) {
1949
+ reject(e);
1950
+ }
1951
+ }
1952
+ function step(result) {
1953
+ result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
1954
+ }
1955
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
1956
+ });
1957
+ };
1958
+ var $22ece045290c996a$var$__generator = undefined && undefined.__generator || function(thisArg, body) {
1959
+ var _ = {
1960
+ label: 0,
1961
+ sent: function() {
1962
+ if (t[0] & 1) throw t[1];
1963
+ return t[1];
1964
+ },
1965
+ trys: [],
1966
+ ops: []
1967
+ }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
1968
+ return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() {
1969
+ return this;
1970
+ }), g;
1971
+ function verb(n) {
1972
+ return function(v) {
1973
+ return step([
1974
+ n,
1975
+ v
1976
+ ]);
1977
+ };
1978
+ }
1979
+ function step(op) {
1980
+ if (f) throw new TypeError("Generator is already executing.");
1981
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
1982
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
1983
+ if (y = 0, t) op = [
1984
+ op[0] & 2,
1985
+ t.value
1986
+ ];
1987
+ switch(op[0]){
1988
+ case 0:
1989
+ case 1:
1990
+ t = op;
1991
+ break;
1992
+ case 4:
1993
+ _.label++;
1994
+ return {
1995
+ value: op[1],
1996
+ done: false
1997
+ };
1998
+ case 5:
1999
+ _.label++;
2000
+ y = op[1];
2001
+ op = [
2002
+ 0
2003
+ ];
2004
+ continue;
2005
+ case 7:
2006
+ op = _.ops.pop();
2007
+ _.trys.pop();
2008
+ continue;
2009
+ default:
2010
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
2011
+ _ = 0;
2012
+ continue;
2013
+ }
2014
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
2015
+ _.label = op[1];
2016
+ break;
2017
+ }
2018
+ if (op[0] === 6 && _.label < t[1]) {
2019
+ _.label = t[1];
2020
+ t = op;
2021
+ break;
2022
+ }
2023
+ if (t && _.label < t[2]) {
2024
+ _.label = t[2];
2025
+ _.ops.push(op);
2026
+ break;
2027
+ }
2028
+ if (t[2]) _.ops.pop();
2029
+ _.trys.pop();
2030
+ continue;
2031
+ }
2032
+ op = body.call(thisArg, _);
2033
+ } catch (e) {
2034
+ op = [
2035
+ 6,
2036
+ e
2037
+ ];
2038
+ y = 0;
2039
+ } finally{
2040
+ f = t = 0;
2041
+ }
2042
+ if (op[0] & 5) throw op[1];
2043
+ return {
2044
+ value: op[0] ? op[1] : void 0,
2045
+ done: true
2046
+ };
2047
+ }
2048
+ };
2049
+ var $22ece045290c996a$export$c95c65abc5f47125 = /** @class */ function(_super) {
2050
+ $22ece045290c996a$var$__extends(DailyMediaManager, _super);
2051
+ function DailyMediaManager(enablePlayer, enableRecording, onTrackStartedCallback, onTrackStoppedCallback, recorderChunkSize, recorderSampleRate) {
2052
+ if (enablePlayer === void 0) enablePlayer = true;
2053
+ if (enableRecording === void 0) enableRecording = true;
2054
+ if (recorderChunkSize === void 0) recorderChunkSize = undefined;
2055
+ if (recorderSampleRate === void 0) recorderSampleRate = 24000;
2056
+ var _a;
2057
+ var _this = _super.call(this) || this;
2058
+ _this._selectedCam = {};
2059
+ _this._selectedMic = {};
2060
+ _this._selectedSpeaker = {};
2061
+ _this._remoteAudioLevelInterval = null;
2062
+ _this._recorderChunkSize = undefined;
2063
+ _this._initialized = false;
2064
+ _this._connected = false;
2065
+ _this._currentAudioTrack = null;
2066
+ _this._connectResolve = null;
2067
+ _this.onTrackStartedCallback = onTrackStartedCallback;
2068
+ _this.onTrackStoppedCallback = onTrackStoppedCallback;
2069
+ _this._recorderChunkSize = recorderChunkSize;
2070
+ _this._daily = (_a = (0, $kR6tG$dailycodailyjs).getCallInstance()) !== null && _a !== void 0 ? _a : (0, $kR6tG$dailycodailyjs).createCallObject();
2071
+ if (enableRecording) _this._mediaStreamRecorder = new (0, $5fc11d7bc0d20724$export$2934cf2d25c67a48)({
2072
+ sampleRate: recorderSampleRate
2073
+ });
2074
+ if (enablePlayer) _this._wavStreamPlayer = new (0, $d0a969833958d9e7$export$9698d62c78b8f366)({
2075
+ sampleRate: 24000
2076
+ });
2077
+ _this._daily.on("track-started", _this.handleTrackStarted.bind(_this));
2078
+ _this._daily.on("track-stopped", _this.handleTrackStopped.bind(_this));
2079
+ _this._daily.on("available-devices-updated", _this._handleAvailableDevicesUpdated.bind(_this));
2080
+ _this._daily.on("selected-devices-updated", _this._handleSelectedDevicesUpdated.bind(_this));
2081
+ _this._daily.on("local-audio-level", _this._handleLocalAudioLevel.bind(_this));
2082
+ return _this;
2083
+ }
2084
+ DailyMediaManager.prototype.initialize = function() {
2085
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2086
+ var infos, devices, cams, mics, speakers;
2087
+ var _this = this;
2088
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m;
2089
+ return $22ece045290c996a$var$__generator(this, function(_o) {
2090
+ switch(_o.label){
2091
+ case 0:
2092
+ if (this._initialized) {
2093
+ console.warn("DailyMediaManager already initialized");
2094
+ return [
2095
+ 2 /*return*/
2096
+ ];
2097
+ }
2098
+ return [
2099
+ 4 /*yield*/ ,
2100
+ this._daily.startCamera({
2101
+ startVideoOff: !this._camEnabled,
2102
+ startAudioOff: !this._micEnabled
2103
+ })
2104
+ ];
2105
+ case 1:
2106
+ infos = _o.sent();
2107
+ return [
2108
+ 4 /*yield*/ ,
2109
+ this._daily.enumerateDevices()
2110
+ ];
2111
+ case 2:
2112
+ devices = _o.sent().devices;
2113
+ cams = devices.filter(function(d) {
2114
+ return d.kind === "videoinput";
2115
+ });
2116
+ mics = devices.filter(function(d) {
2117
+ return d.kind === "audioinput";
2118
+ });
2119
+ speakers = devices.filter(function(d) {
2120
+ return d.kind === "audiooutput";
2121
+ });
2122
+ (_b = (_a = this._callbacks).onAvailableCamsUpdated) === null || _b === void 0 || _b.call(_a, cams);
2123
+ (_d = (_c = this._callbacks).onAvailableMicsUpdated) === null || _d === void 0 || _d.call(_c, mics);
2124
+ (_f = (_e = this._callbacks).onAvailableSpeakersUpdated) === null || _f === void 0 || _f.call(_e, speakers);
2125
+ this._selectedCam = infos.camera;
2126
+ (_h = (_g = this._callbacks).onCamUpdated) === null || _h === void 0 || _h.call(_g, infos.camera);
2127
+ this._selectedMic = infos.mic;
2128
+ (_k = (_j = this._callbacks).onMicUpdated) === null || _k === void 0 || _k.call(_j, infos.mic);
2129
+ this._selectedSpeaker = infos.speaker;
2130
+ (_m = (_l = this._callbacks).onSpeakerUpdated) === null || _m === void 0 || _m.call(_l, infos.speaker);
2131
+ if (!!this._daily.isLocalAudioLevelObserverRunning()) return [
2132
+ 3 /*break*/ ,
2133
+ 4
2134
+ ];
2135
+ return [
2136
+ 4 /*yield*/ ,
2137
+ this._daily.startLocalAudioLevelObserver(100)
2138
+ ];
2139
+ case 3:
2140
+ _o.sent();
2141
+ _o.label = 4;
2142
+ case 4:
2143
+ if (!this._wavStreamPlayer) return [
2144
+ 3 /*break*/ ,
2145
+ 6
2146
+ ];
2147
+ return [
2148
+ 4 /*yield*/ ,
2149
+ this._wavStreamPlayer.connect()
2150
+ ];
2151
+ case 5:
2152
+ _o.sent();
2153
+ if (!this._remoteAudioLevelInterval) this._remoteAudioLevelInterval = setInterval(function() {
2154
+ var _a;
2155
+ var frequencies = _this._wavStreamPlayer.getFrequencies();
2156
+ var aveVal = 0;
2157
+ if ((_a = frequencies.values) === null || _a === void 0 ? void 0 : _a.length) aveVal = frequencies.values.reduce(function(a, c) {
2158
+ return a + c;
2159
+ }, 0) / frequencies.values.length;
2160
+ _this._handleRemoteAudioLevel(aveVal);
2161
+ }, 100);
2162
+ _o.label = 6;
2163
+ case 6:
2164
+ this._initialized = true;
2165
+ return [
2166
+ 2 /*return*/
2167
+ ];
2168
+ }
2169
+ });
2170
+ });
2171
+ };
2172
+ DailyMediaManager.prototype.connect = function() {
2173
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2174
+ var _this = this;
2175
+ return $22ece045290c996a$var$__generator(this, function(_a) {
2176
+ if (this._connected) {
2177
+ console.warn("DailyMediaManager already connected");
2178
+ return [
2179
+ 2 /*return*/
2180
+ ];
2181
+ }
2182
+ this._connected = true;
2183
+ if (!this._initialized) return [
2184
+ 2 /*return*/ ,
2185
+ new Promise(function(resolve) {
2186
+ (function() {
2187
+ return $22ece045290c996a$var$__awaiter(_this, void 0, void 0, function() {
2188
+ return $22ece045290c996a$var$__generator(this, function(_a) {
2189
+ switch(_a.label){
2190
+ case 0:
2191
+ this._connectResolve = resolve;
2192
+ return [
2193
+ 4 /*yield*/ ,
2194
+ this.initialize()
2195
+ ];
2196
+ case 1:
2197
+ _a.sent();
2198
+ return [
2199
+ 2 /*return*/
2200
+ ];
2201
+ }
2202
+ });
2203
+ });
2204
+ })();
2205
+ })
2206
+ ];
2207
+ if (this._micEnabled) this._startRecording();
2208
+ return [
2209
+ 2 /*return*/
2210
+ ];
2211
+ });
2212
+ });
2213
+ };
2214
+ DailyMediaManager.prototype.disconnect = function() {
2215
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2216
+ var _a, _b;
2217
+ return $22ece045290c996a$var$__generator(this, function(_c) {
2218
+ switch(_c.label){
2219
+ case 0:
2220
+ if (this._remoteAudioLevelInterval) clearInterval(this._remoteAudioLevelInterval);
2221
+ this._remoteAudioLevelInterval = null;
2222
+ this._daily.leave();
2223
+ this._currentAudioTrack = null;
2224
+ return [
2225
+ 4 /*yield*/ ,
2226
+ (_a = this._mediaStreamRecorder) === null || _a === void 0 ? void 0 : _a.end()
2227
+ ];
2228
+ case 1:
2229
+ _c.sent();
2230
+ (_b = this._wavStreamPlayer) === null || _b === void 0 || _b.interrupt();
2231
+ this._initialized = false;
2232
+ this._connected = false;
2233
+ return [
2234
+ 2 /*return*/
2235
+ ];
2236
+ }
2237
+ });
2238
+ });
2239
+ };
2240
+ DailyMediaManager.prototype.userStartedSpeaking = function() {
2241
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2242
+ var _a;
2243
+ return $22ece045290c996a$var$__generator(this, function(_b) {
2244
+ return [
2245
+ 2 /*return*/ ,
2246
+ (_a = this._wavStreamPlayer) === null || _a === void 0 ? void 0 : _a.interrupt()
2247
+ ];
2248
+ });
2249
+ });
2250
+ };
2251
+ DailyMediaManager.prototype.bufferBotAudio = function(data, id) {
2252
+ var _a;
2253
+ return (_a = this._wavStreamPlayer) === null || _a === void 0 ? void 0 : _a.add16BitPCM(data, id);
2254
+ };
2255
+ DailyMediaManager.prototype.getAllMics = function() {
2256
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2257
+ var devices;
2258
+ return $22ece045290c996a$var$__generator(this, function(_a) {
2259
+ switch(_a.label){
2260
+ case 0:
2261
+ return [
2262
+ 4 /*yield*/ ,
2263
+ this._daily.enumerateDevices()
2264
+ ];
2265
+ case 1:
2266
+ devices = _a.sent().devices;
2267
+ return [
2268
+ 2 /*return*/ ,
2269
+ devices.filter(function(device) {
2270
+ return device.kind === "audioinput";
2271
+ })
2272
+ ];
2273
+ }
2274
+ });
2275
+ });
2276
+ };
2277
+ DailyMediaManager.prototype.getAllCams = function() {
2278
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2279
+ var devices;
2280
+ return $22ece045290c996a$var$__generator(this, function(_a) {
2281
+ switch(_a.label){
2282
+ case 0:
2283
+ return [
2284
+ 4 /*yield*/ ,
2285
+ this._daily.enumerateDevices()
2286
+ ];
2287
+ case 1:
2288
+ devices = _a.sent().devices;
2289
+ return [
2290
+ 2 /*return*/ ,
2291
+ devices.filter(function(device) {
2292
+ return device.kind === "videoinput";
2293
+ })
2294
+ ];
2295
+ }
2296
+ });
2297
+ });
2298
+ };
2299
+ DailyMediaManager.prototype.getAllSpeakers = function() {
2300
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2301
+ var devices;
2302
+ return $22ece045290c996a$var$__generator(this, function(_a) {
2303
+ switch(_a.label){
2304
+ case 0:
2305
+ return [
2306
+ 4 /*yield*/ ,
2307
+ this._daily.enumerateDevices()
2308
+ ];
2309
+ case 1:
2310
+ devices = _a.sent().devices;
2311
+ return [
2312
+ 2 /*return*/ ,
2313
+ devices.filter(function(device) {
2314
+ return device.kind === "audiooutput";
2315
+ })
2316
+ ];
2317
+ }
2318
+ });
2319
+ });
2320
+ };
2321
+ DailyMediaManager.prototype.updateMic = function(micId) {
2322
+ var _this = this;
2323
+ this._daily.setInputDevicesAsync({
2324
+ audioDeviceId: micId
2325
+ }).then(function(deviceInfo) {
2326
+ _this._selectedMic = deviceInfo.mic;
2327
+ });
2328
+ };
2329
+ DailyMediaManager.prototype.updateCam = function(camId) {
2330
+ var _this = this;
2331
+ this._daily.setInputDevicesAsync({
2332
+ videoDeviceId: camId
2333
+ }).then(function(deviceInfo) {
2334
+ _this._selectedCam = deviceInfo.camera;
2335
+ });
2336
+ };
2337
+ DailyMediaManager.prototype.updateSpeaker = function(speakerId) {
2338
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2339
+ var sID, speakers, defaultSpeaker_1, defaultSpeakerCp;
2340
+ var _this = this;
2341
+ var _a, _b;
2342
+ return $22ece045290c996a$var$__generator(this, function(_c) {
2343
+ switch(_c.label){
2344
+ case 0:
2345
+ if (speakerId !== "default" && this._selectedSpeaker.deviceId === speakerId) return [
2346
+ 2 /*return*/
2347
+ ];
2348
+ sID = speakerId;
2349
+ if (!(sID === "default")) return [
2350
+ 3 /*break*/ ,
2351
+ 2
2352
+ ];
2353
+ return [
2354
+ 4 /*yield*/ ,
2355
+ this.getAllSpeakers()
2356
+ ];
2357
+ case 1:
2358
+ speakers = _c.sent();
2359
+ defaultSpeaker_1 = speakers.find(function(s) {
2360
+ return s.deviceId === "default";
2361
+ });
2362
+ if (!defaultSpeaker_1) {
2363
+ console.warn("No default speaker found");
2364
+ return [
2365
+ 2 /*return*/
2366
+ ];
2367
+ }
2368
+ speakers.splice(speakers.indexOf(defaultSpeaker_1), 1);
2369
+ defaultSpeakerCp = speakers.find(function(s) {
2370
+ return defaultSpeaker_1.label.includes(s.label);
2371
+ });
2372
+ sID = (_a = defaultSpeakerCp === null || defaultSpeakerCp === void 0 ? void 0 : defaultSpeakerCp.deviceId) !== null && _a !== void 0 ? _a : speakerId;
2373
+ _c.label = 2;
2374
+ case 2:
2375
+ (_b = this._wavStreamPlayer) === null || _b === void 0 || _b.updateSpeaker(sID).then(function() {
2376
+ var _a, _b;
2377
+ _this._selectedSpeaker = {
2378
+ deviceId: speakerId
2379
+ };
2380
+ (_b = (_a = _this._callbacks).onSpeakerUpdated) === null || _b === void 0 || _b.call(_a, _this._selectedSpeaker);
2381
+ });
2382
+ return [
2383
+ 2 /*return*/
2384
+ ];
2385
+ }
2386
+ });
2387
+ });
2388
+ };
2389
+ Object.defineProperty(DailyMediaManager.prototype, "selectedMic", {
2390
+ get: function() {
2391
+ return this._selectedMic;
2392
+ },
2393
+ enumerable: false,
2394
+ configurable: true
2395
+ });
2396
+ Object.defineProperty(DailyMediaManager.prototype, "selectedCam", {
2397
+ get: function() {
2398
+ return this._selectedCam;
2399
+ },
2400
+ enumerable: false,
2401
+ configurable: true
2402
+ });
2403
+ Object.defineProperty(DailyMediaManager.prototype, "selectedSpeaker", {
2404
+ get: function() {
2405
+ return this._selectedSpeaker;
2406
+ },
2407
+ enumerable: false,
2408
+ configurable: true
2409
+ });
2410
+ DailyMediaManager.prototype.enableMic = function(enable) {
2411
+ return $22ece045290c996a$var$__awaiter(this, void 0, Promise, function() {
2412
+ var _a;
2413
+ return $22ece045290c996a$var$__generator(this, function(_b) {
2414
+ this._micEnabled = enable;
2415
+ if (!((_a = this._daily.participants()) === null || _a === void 0 ? void 0 : _a.local)) return [
2416
+ 2 /*return*/
2417
+ ];
2418
+ this._daily.setLocalAudio(enable);
2419
+ if (this._mediaStreamRecorder) {
2420
+ if (enable) {
2421
+ if (this._mediaStreamRecorder.getStatus() === "paused") this._startRecording();
2422
+ // else, we'll record on the track-started event
2423
+ } else if (this._mediaStreamRecorder.getStatus() === "recording") this._mediaStreamRecorder.pause();
2424
+ }
2425
+ return [
2426
+ 2 /*return*/
2427
+ ];
2428
+ });
2429
+ });
2430
+ };
2431
+ DailyMediaManager.prototype.enableCam = function(enable) {
2432
+ this._camEnabled = enable;
2433
+ this._daily.setLocalVideo(enable);
2434
+ };
2435
+ Object.defineProperty(DailyMediaManager.prototype, "isCamEnabled", {
2436
+ get: function() {
2437
+ return this._daily.localVideo();
2438
+ },
2439
+ enumerable: false,
2440
+ configurable: true
2441
+ });
2442
+ Object.defineProperty(DailyMediaManager.prototype, "isMicEnabled", {
2443
+ get: function() {
2444
+ return this._daily.localAudio();
2445
+ },
2446
+ enumerable: false,
2447
+ configurable: true
2448
+ });
2449
+ DailyMediaManager.prototype.tracks = function() {
2450
+ var _a, _b, _c, _d, _e, _f;
2451
+ var participants = this._daily.participants();
2452
+ return {
2453
+ local: {
2454
+ audio: (_c = (_b = (_a = participants === null || participants === void 0 ? void 0 : participants.local) === null || _a === void 0 ? void 0 : _a.tracks) === null || _b === void 0 ? void 0 : _b.audio) === null || _c === void 0 ? void 0 : _c.persistentTrack,
2455
+ video: (_f = (_e = (_d = participants === null || participants === void 0 ? void 0 : participants.local) === null || _d === void 0 ? void 0 : _d.tracks) === null || _e === void 0 ? void 0 : _e.video) === null || _f === void 0 ? void 0 : _f.persistentTrack
2456
+ }
2457
+ };
2458
+ };
2459
+ DailyMediaManager.prototype._startRecording = function() {
2460
+ var _this = this;
2461
+ if (!this._connected || !this._mediaStreamRecorder) return;
2462
+ try {
2463
+ this._mediaStreamRecorder.record(function(data) {
2464
+ _this._userAudioCallback(data.mono);
2465
+ }, this._recorderChunkSize);
2466
+ } catch (e) {
2467
+ var err = e;
2468
+ if (!err.message.includes("Already recording")) console.error("Error starting recording", e);
2469
+ }
2470
+ };
2471
+ DailyMediaManager.prototype._handleAvailableDevicesUpdated = function(event) {
2472
+ var _a, _b, _c, _d, _e, _f;
2473
+ (_b = (_a = this._callbacks).onAvailableCamsUpdated) === null || _b === void 0 || _b.call(_a, event.availableDevices.filter(function(d) {
2474
+ return d.kind === "videoinput";
2475
+ }));
2476
+ (_d = (_c = this._callbacks).onAvailableMicsUpdated) === null || _d === void 0 || _d.call(_c, event.availableDevices.filter(function(d) {
2477
+ return d.kind === "audioinput";
2478
+ }));
2479
+ (_f = (_e = this._callbacks).onAvailableSpeakersUpdated) === null || _f === void 0 || _f.call(_e, event.availableDevices.filter(function(d) {
2480
+ return d.kind === "audiooutput";
2481
+ }));
2482
+ if (this._selectedSpeaker.deviceId === "default") this.updateSpeaker("default");
2483
+ };
2484
+ DailyMediaManager.prototype._handleSelectedDevicesUpdated = function(event) {
2485
+ var _a, _b, _c, _d, _e, _f;
2486
+ if (((_a = this._selectedCam) === null || _a === void 0 ? void 0 : _a.deviceId) !== event.devices.camera) {
2487
+ this._selectedCam = event.devices.camera;
2488
+ (_c = (_b = this._callbacks).onCamUpdated) === null || _c === void 0 || _c.call(_b, event.devices.camera);
2489
+ }
2490
+ if (((_d = this._selectedMic) === null || _d === void 0 ? void 0 : _d.deviceId) !== event.devices.mic) {
2491
+ this._selectedMic = event.devices.mic;
2492
+ (_f = (_e = this._callbacks).onMicUpdated) === null || _f === void 0 || _f.call(_e, event.devices.mic);
2493
+ }
2494
+ };
2495
+ DailyMediaManager.prototype._handleLocalAudioLevel = function(ev) {
2496
+ var _a, _b;
2497
+ (_b = (_a = this._callbacks).onLocalAudioLevel) === null || _b === void 0 || _b.call(_a, ev.audioLevel);
2498
+ };
2499
+ DailyMediaManager.prototype._handleRemoteAudioLevel = function(audioLevel) {
2500
+ var _a, _b;
2501
+ (_b = (_a = this._callbacks).onRemoteAudioLevel) === null || _b === void 0 || _b.call(_a, audioLevel, $22ece045290c996a$var$botParticipant());
2502
+ };
2503
+ DailyMediaManager.prototype.handleTrackStarted = function(event) {
2504
+ return $22ece045290c996a$var$__awaiter(this, void 0, void 0, function() {
2505
+ var status, _a;
2506
+ var _b, _c, _d, _e;
2507
+ return $22ece045290c996a$var$__generator(this, function(_f) {
2508
+ switch(_f.label){
2509
+ case 0:
2510
+ if (!((_b = event.participant) === null || _b === void 0 ? void 0 : _b.local)) return [
2511
+ 2 /*return*/
2512
+ ];
2513
+ if (!(event.track.kind === "audio")) return [
2514
+ 3 /*break*/ ,
2515
+ 10
2516
+ ];
2517
+ if (!this._mediaStreamRecorder) return [
2518
+ 3 /*break*/ ,
2519
+ 9
2520
+ ];
2521
+ status = this._mediaStreamRecorder.getStatus();
2522
+ _a = status;
2523
+ switch(_a){
2524
+ case "ended":
2525
+ return [
2526
+ 3 /*break*/ ,
2527
+ 1
2528
+ ];
2529
+ case "paused":
2530
+ return [
2531
+ 3 /*break*/ ,
2532
+ 3
2533
+ ];
2534
+ case "recording":
2535
+ return [
2536
+ 3 /*break*/ ,
2537
+ 4
2538
+ ];
2539
+ }
2540
+ return [
2541
+ 3 /*break*/ ,
2542
+ 4
2543
+ ];
2544
+ case 1:
2545
+ return [
2546
+ 4 /*yield*/ ,
2547
+ this._mediaStreamRecorder.begin(event.track)
2548
+ ];
2549
+ case 2:
2550
+ _f.sent();
2551
+ if (this._connected) {
2552
+ this._startRecording();
2553
+ if (this._connectResolve) {
2554
+ this._connectResolve();
2555
+ this._connectResolve = null;
2556
+ }
2557
+ }
2558
+ return [
2559
+ 3 /*break*/ ,
2560
+ 9
2561
+ ];
2562
+ case 3:
2563
+ this._startRecording();
2564
+ return [
2565
+ 3 /*break*/ ,
2566
+ 9
2567
+ ];
2568
+ case 4:
2569
+ if (!(this._currentAudioTrack !== event.track)) return [
2570
+ 3 /*break*/ ,
2571
+ 7
2572
+ ];
2573
+ return [
2574
+ 4 /*yield*/ ,
2575
+ this._mediaStreamRecorder.end()
2576
+ ];
2577
+ case 5:
2578
+ _f.sent();
2579
+ return [
2580
+ 4 /*yield*/ ,
2581
+ this._mediaStreamRecorder.begin(event.track)
2582
+ ];
2583
+ case 6:
2584
+ _f.sent();
2585
+ this._startRecording();
2586
+ return [
2587
+ 3 /*break*/ ,
2588
+ 8
2589
+ ];
2590
+ case 7:
2591
+ console.warn("track-started event received for current track and already recording");
2592
+ _f.label = 8;
2593
+ case 8:
2594
+ return [
2595
+ 3 /*break*/ ,
2596
+ 9
2597
+ ];
2598
+ case 9:
2599
+ this._currentAudioTrack = event.track;
2600
+ _f.label = 10;
2601
+ case 10:
2602
+ (_d = (_c = this._callbacks).onTrackStarted) === null || _d === void 0 || _d.call(_c, event.track, event.participant ? $22ece045290c996a$var$dailyParticipantToParticipant(event.participant) : undefined);
2603
+ (_e = this.onTrackStartedCallback) === null || _e === void 0 || _e.call(this, event);
2604
+ return [
2605
+ 2 /*return*/
2606
+ ];
2607
+ }
2608
+ });
2609
+ });
2610
+ };
2611
+ DailyMediaManager.prototype.handleTrackStopped = function(event) {
2612
+ var _a, _b, _c, _d;
2613
+ if (!((_a = event.participant) === null || _a === void 0 ? void 0 : _a.local)) return;
2614
+ if (event.track.kind === "audio") {
2615
+ if (this._mediaStreamRecorder && this._mediaStreamRecorder.getStatus() === "recording") this._mediaStreamRecorder.pause();
2616
+ }
2617
+ (_c = (_b = this._callbacks).onTrackStopped) === null || _c === void 0 || _c.call(_b, event.track, event.participant ? $22ece045290c996a$var$dailyParticipantToParticipant(event.participant) : undefined);
2618
+ (_d = this.onTrackStoppedCallback) === null || _d === void 0 || _d.call(this, event);
2619
+ };
2620
+ return DailyMediaManager;
2621
+ }((0, $fc49a56cd8739127$export$4a0c46dbbe2ddb67));
2622
+ var $22ece045290c996a$var$dailyParticipantToParticipant = function(p) {
2623
+ return {
2624
+ id: p.user_id,
2625
+ local: p.local,
2626
+ name: p.user_name
2627
+ };
2628
+ };
2629
+ var $22ece045290c996a$var$botParticipant = function() {
2630
+ return {
2631
+ id: "bot",
2632
+ local: false,
2633
+ name: "Bot"
2634
+ };
2635
+ };
2636
+
2637
+
2638
+
2639
+
2640
+ var $032380dbcf3f4e13$var$__extends = undefined && undefined.__extends || function() {
2641
+ var extendStatics = function(d, b) {
2642
+ extendStatics = Object.setPrototypeOf || ({
2643
+ __proto__: []
2644
+ }) instanceof Array && function(d, b) {
2645
+ d.__proto__ = b;
2646
+ } || function(d, b) {
2647
+ for(var p in b)if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
2648
+ };
2649
+ return extendStatics(d, b);
2650
+ };
2651
+ return function(d, b) {
2652
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
2653
+ extendStatics(d, b);
2654
+ function __() {
2655
+ this.constructor = d;
2656
+ }
2657
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
2658
+ };
2659
+ }();
2660
+ var $032380dbcf3f4e13$var$__awaiter = undefined && undefined.__awaiter || function(thisArg, _arguments, P, generator) {
2661
+ function adopt(value) {
2662
+ return value instanceof P ? value : new P(function(resolve) {
2663
+ resolve(value);
2664
+ });
2665
+ }
2666
+ return new (P || (P = Promise))(function(resolve, reject) {
2667
+ function fulfilled(value) {
2668
+ try {
2669
+ step(generator.next(value));
2670
+ } catch (e) {
2671
+ reject(e);
2672
+ }
2673
+ }
2674
+ function rejected(value) {
2675
+ try {
2676
+ step(generator["throw"](value));
2677
+ } catch (e) {
2678
+ reject(e);
2679
+ }
2680
+ }
2681
+ function step(result) {
2682
+ result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
2683
+ }
2684
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
2685
+ });
2686
+ };
2687
+ var $032380dbcf3f4e13$var$__generator = undefined && undefined.__generator || function(thisArg, body) {
2688
+ var _ = {
2689
+ label: 0,
2690
+ sent: function() {
2691
+ if (t[0] & 1) throw t[1];
2692
+ return t[1];
2693
+ },
2694
+ trys: [],
2695
+ ops: []
2696
+ }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
2697
+ return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() {
2698
+ return this;
2699
+ }), g;
2700
+ function verb(n) {
2701
+ return function(v) {
2702
+ return step([
2703
+ n,
2704
+ v
2705
+ ]);
2706
+ };
2707
+ }
2708
+ function step(op) {
2709
+ if (f) throw new TypeError("Generator is already executing.");
2710
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
2711
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
2712
+ if (y = 0, t) op = [
2713
+ op[0] & 2,
2714
+ t.value
2715
+ ];
2716
+ switch(op[0]){
2717
+ case 0:
2718
+ case 1:
2719
+ t = op;
2720
+ break;
2721
+ case 4:
2722
+ _.label++;
2723
+ return {
2724
+ value: op[1],
2725
+ done: false
2726
+ };
2727
+ case 5:
2728
+ _.label++;
2729
+ y = op[1];
2730
+ op = [
2731
+ 0
2732
+ ];
2733
+ continue;
2734
+ case 7:
2735
+ op = _.ops.pop();
2736
+ _.trys.pop();
2737
+ continue;
2738
+ default:
2739
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
2740
+ _ = 0;
2741
+ continue;
2742
+ }
2743
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
2744
+ _.label = op[1];
2745
+ break;
2746
+ }
2747
+ if (op[0] === 6 && _.label < t[1]) {
2748
+ _.label = t[1];
2749
+ t = op;
2750
+ break;
2751
+ }
2752
+ if (t && _.label < t[2]) {
2753
+ _.label = t[2];
2754
+ _.ops.push(op);
2755
+ break;
2756
+ }
2757
+ if (t[2]) _.ops.pop();
2758
+ _.trys.pop();
2759
+ continue;
2760
+ }
2761
+ op = body.call(thisArg, _);
2762
+ } catch (e) {
2763
+ op = [
2764
+ 6,
2765
+ e
2766
+ ];
2767
+ y = 0;
2768
+ } finally{
2769
+ f = t = 0;
2770
+ }
2771
+ if (op[0] & 5) throw op[1];
2772
+ return {
2773
+ value: op[0] ? op[1] : void 0,
2774
+ done: true
2775
+ };
2776
+ }
2777
+ };
2778
+ var $032380dbcf3f4e13$var$__spreadArray = undefined && undefined.__spreadArray || function(to, from, pack) {
2779
+ if (pack || arguments.length === 2) {
2780
+ for(var i = 0, l = from.length, ar; i < l; i++)if (ar || !(i in from)) {
2781
+ if (!ar) ar = Array.prototype.slice.call(from, 0, i);
2782
+ ar[i] = from[i];
2783
+ }
2784
+ }
2785
+ return to.concat(ar || Array.prototype.slice.call(from));
2786
+ };
2787
+ var $032380dbcf3f4e13$var$readyStates = [
2788
+ "CONNECTING",
2789
+ "OPEN",
2790
+ "CLOSING",
2791
+ "CLOSED"
2792
+ ];
2793
+ var $032380dbcf3f4e13$var$KEEP_ALIVE_INTERVAL = 5000;
2794
+ var $032380dbcf3f4e13$var$KEEP_ALIVE_TIMEOUT = 15000;
2795
+ // client side code in soupSFU has a timeout of 15 seconds for command response
2796
+ // 5 seconds seems reasonable that it provides roughly 3 retry attempts
2797
+ var $032380dbcf3f4e13$var$WEBSOCKET_CONNECTION_TIMEOUT = 150000;
2798
+ var $032380dbcf3f4e13$var$DEFAULT_RECONNECT_ATTEMPTS = 2;
2799
+ var $032380dbcf3f4e13$var$MAX_RECONNECT_ATTEMPTS = 10;
2800
+ var $032380dbcf3f4e13$var$DEFAULT_RECONNECT_INTERVAL = 1000;
2801
+ var $032380dbcf3f4e13$var$MAX_RECONNECT_INTERVAL = 30000;
2802
+ var $032380dbcf3f4e13$var$DEFAULT_RECONNECT_DECAY = 1.5;
2803
+ var $032380dbcf3f4e13$var$WEBSOCKET_TIMEOUT_CODE = 4100;
2804
+ var $032380dbcf3f4e13$var$SIG_CONNECTION_CANCELED = "SIG_CONNECTION_CANCELED";
2805
+ var $032380dbcf3f4e13$var$WEBSOCKET_ERROR = "WEBSOCKET_ERROR";
2806
+ var $032380dbcf3f4e13$var$LOG_LEVEL;
2807
+ (function(LOG_LEVEL) {
2808
+ LOG_LEVEL[LOG_LEVEL["DEBUG"] = 0] = "DEBUG";
2809
+ LOG_LEVEL[LOG_LEVEL["ERROR"] = 1] = "ERROR";
2810
+ LOG_LEVEL[LOG_LEVEL["INFO"] = 2] = "INFO";
2811
+ LOG_LEVEL[LOG_LEVEL["WARN"] = 3] = "WARN";
2812
+ })($032380dbcf3f4e13$var$LOG_LEVEL || ($032380dbcf3f4e13$var$LOG_LEVEL = {}));
2813
+ var $032380dbcf3f4e13$var$rWebSocket = /** @class */ function() {
2814
+ function rWebSocket(url, protocols) {
2815
+ this._closedManually = false;
2816
+ this._errored = false;
2817
+ this._rejected = false;
2818
+ this._timed_out = false;
2819
+ this._initialConnectionOk = false;
2820
+ this._ws = new WebSocket(url, protocols);
2821
+ }
2822
+ rWebSocket.prototype.addEventListener = function(type, listener) {
2823
+ this._ws.addEventListener(type, listener);
2824
+ };
2825
+ // Add other WebSocket methods as needed
2826
+ rWebSocket.prototype.close = function(code, reason) {
2827
+ this._ws.close(code, reason);
2828
+ };
2829
+ rWebSocket.prototype.send = function(data) {
2830
+ this._ws.send(data);
2831
+ };
2832
+ Object.defineProperty(rWebSocket.prototype, "url", {
2833
+ // Add getters for WebSocket properties
2834
+ get: function() {
2835
+ return this._ws.url;
2836
+ },
2837
+ enumerable: false,
2838
+ configurable: true
2839
+ });
2840
+ Object.defineProperty(rWebSocket.prototype, "readyState", {
2841
+ get: function() {
2842
+ return this._ws.readyState;
2843
+ },
2844
+ enumerable: false,
2845
+ configurable: true
2846
+ });
2847
+ return rWebSocket;
2848
+ }();
2849
+ /**
2850
+ * Builds on top of Javascript Websockets
2851
+ *
2852
+ * This behaves like the Websocket library in every way, except if it fails to
2853
+ * connect or if it gets disconnected, it will try to reconnect depending on
2854
+ * the maximum number of reconnect attempts set. retry is not enabled for initial
2855
+ * connection. When initial connection fails it is best to check yourself before
2856
+ * you keep wreckin' yourself.
2857
+ *
2858
+ * It is API compatible, so when you have:
2859
+ * ws = new WebSocket('ws://....');
2860
+ * you can replace with:
2861
+ * ws = new ReconnectingWebSocket('ws://....');
2862
+ *
2863
+ * While it is API compatible with the NodeJS ws library, we provide the
2864
+ * following additional properties and events on the ReconnectingWebSocket.
2865
+ *
2866
+ * Events:
2867
+ *
2868
+ * connection-timeout
2869
+ * - Emitted when the web socket connection times out.
2870
+ *
2871
+ * reconnecting
2872
+ * - Emitted after a manual close of the web socket is done and before retrying
2873
+ * the connection.
2874
+ *
2875
+ * reconnect-failed
2876
+ * - Emitted when the number of connection attempts exceeds the set number of
2877
+ * reconnection attempts.
2878
+ *
2879
+ * keep-alive
2880
+ * - Emitted when the set keep alive interval elapses. This event may be used
2881
+ * to have ping pong keep-alive mechanism for web socket health.
2882
+ *
2883
+ * Properties:
2884
+ *
2885
+ * keepAliveTimeout
2886
+ * - The timeout for keep-alive. Default: 15000
2887
+ *
2888
+ * keepAliveInterval
2889
+ * - The interval at which to emit keep-alive event. Default: 5000
2890
+ *
2891
+ * shouldRetryFn
2892
+ * - A callback function which should return boolean to determine if a web
2893
+ * socket reconnection attempt should be made. When not set, connection is
2894
+ * always retried.
2895
+ *
2896
+ * connectionTimeout
2897
+ * - The timeout interval for considering whether the connection timed out.
2898
+ * Default: 20000 ms
2899
+ *
2900
+ * maxReconnectAttempts
2901
+ * - The maximum number of attempts to be made for reconnection. Default: 2
2902
+ *
2903
+ * reconnectInterval
2904
+ * - The interval to wait before attempting a reconnection. Default: 1000 ms
2905
+ */ var $032380dbcf3f4e13$export$4f3d0ffd941ebefb = /** @class */ function(_super) {
2906
+ $032380dbcf3f4e13$var$__extends(ReconnectingWebSocket, _super);
2907
+ function ReconnectingWebSocket(address, protocols, options) {
2908
+ if (options === void 0) options = {};
2909
+ var _a;
2910
+ var _this = _super.call(this) || this;
2911
+ if (!address) throw new Error("Need a valid WebSocket URL");
2912
+ _this._ws = null;
2913
+ _this._url = address;
2914
+ _this._protocols = protocols;
2915
+ _this._parseBlobToJson = (_a = options === null || options === void 0 ? void 0 : options.parseBlobToJson) !== null && _a !== void 0 ? _a : true;
2916
+ _this.init();
2917
+ return _this;
2918
+ }
2919
+ ReconnectingWebSocket.prototype.init = function() {
2920
+ this._keepAliveTimeout = $032380dbcf3f4e13$var$KEEP_ALIVE_TIMEOUT;
2921
+ this._keepAliveInterval = $032380dbcf3f4e13$var$KEEP_ALIVE_INTERVAL;
2922
+ this._disconnected = false;
2923
+ this._keepIntervalID = null;
2924
+ this._shouldRetryFn = null;
2925
+ this._connectionTimeout = $032380dbcf3f4e13$var$WEBSOCKET_CONNECTION_TIMEOUT;
2926
+ this._reconnectAttempts = 0;
2927
+ this._allowedReconnectAttempts = $032380dbcf3f4e13$var$DEFAULT_RECONNECT_ATTEMPTS;
2928
+ this._reconnectInterval = $032380dbcf3f4e13$var$DEFAULT_RECONNECT_INTERVAL;
2929
+ this._maxReconnectInterval = $032380dbcf3f4e13$var$MAX_RECONNECT_INTERVAL;
2930
+ this._reconnectDecay = $032380dbcf3f4e13$var$DEFAULT_RECONNECT_DECAY;
2931
+ };
2932
+ ReconnectingWebSocket.prototype.connect = function() {
2933
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
2934
+ var _this = this;
2935
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
2936
+ return [
2937
+ 2 /*return*/ ,
2938
+ new Promise(function(resolve, reject) {
2939
+ _this._disconnected = false;
2940
+ _this.clearReconnectTimeout();
2941
+ var ws = new $032380dbcf3f4e13$var$rWebSocket(_this._url, _this._protocols);
2942
+ _this.setConnectionTimeout();
2943
+ ws.addEventListener("close", function(evt) {
2944
+ var closeEvent = evt;
2945
+ var code = ws._timed_out ? $032380dbcf3f4e13$var$WEBSOCKET_TIMEOUT_CODE : closeEvent.code;
2946
+ var reason = ws._timed_out ? "websocket connection timed out" : closeEvent.reason;
2947
+ ws._timed_out = false;
2948
+ if (!ws._closedManually && ws._initialConnectionOk) {
2949
+ console.warn("signaling socket closed unexpectedly: ".concat(code).concat(reason ? " " + reason : ""));
2950
+ _this._closeSocket();
2951
+ _this.emit("close", code, reason);
2952
+ } else _this.log("signaling socket closed");
2953
+ if (!ws._closedManually && (ws._errored || ws._timed_out)) {
2954
+ console.warn("signaling socket closed on error: ".concat(code).concat(reason ? " " + reason : ""));
2955
+ if (!ws._rejected) {
2956
+ ws._rejected = true;
2957
+ var err = new Error("WebSocket connection error (".concat(code, "): ").concat(reason));
2958
+ err.name = $032380dbcf3f4e13$var$WEBSOCKET_ERROR;
2959
+ reject(err);
2960
+ }
2961
+ }
2962
+ });
2963
+ ws.addEventListener("open", function(evt) {
2964
+ _this.log("wss connection opened to", $032380dbcf3f4e13$var$LOG_LEVEL.DEBUG, _this._url);
2965
+ _this.clearConnectionTimeout();
2966
+ // now that the timeout closes the socket, in theory this onopen
2967
+ // callback should never happen in the first place, but seems
2968
+ // harmless to leave these safeguards in
2969
+ if (ws._rejected || ws._timed_out) return;
2970
+ if (ws._closedManually || _this._ws && _this._ws !== ws) {
2971
+ ws._rejected = true;
2972
+ ws.close();
2973
+ var err = Error("wss connection interrupted by disconnect or newer connection");
2974
+ err.name = $032380dbcf3f4e13$var$SIG_CONNECTION_CANCELED;
2975
+ reject(err);
2976
+ return;
2977
+ }
2978
+ ws._initialConnectionOk = _this._url;
2979
+ _this._lastMsgRecvTime = Date.now();
2980
+ if (_this._keepAliveInterval) _this._keepIntervalID = setInterval(function() {
2981
+ return _this.checkSocketHealthAndSendKeepAlive();
2982
+ }, _this._keepAliveInterval);
2983
+ _this._ws = ws;
2984
+ _this.emit("open");
2985
+ resolve(ws);
2986
+ });
2987
+ ws.addEventListener("error", function(evt) {
2988
+ // fyi: evt is an Event here, with 0 amount of helpful info. If there
2989
+ // happens to be info about the error, it's included in the
2990
+ // accompanying close event (because that make sense. shakes head)
2991
+ // SO. We do not reject here. Instead, we just set the _errored
2992
+ // flag on the socket so when the close event occurs, it knows to
2993
+ // reject the promise
2994
+ if (!ws._closedManually) {
2995
+ var wsTarget = evt.currentTarget;
2996
+ _this.log("websocket error event: ".concat(wsTarget === null || wsTarget === void 0 ? void 0 : wsTarget.url));
2997
+ }
2998
+ ws._errored = true;
2999
+ });
3000
+ ws.addEventListener("message", function(msg) {
3001
+ _this._handleMessage(msg);
3002
+ });
3003
+ })
3004
+ ];
3005
+ });
3006
+ });
3007
+ };
3008
+ ReconnectingWebSocket.prototype.setConnectionTimeout = function() {
3009
+ var _this = this;
3010
+ this._connectionTimeoutID = setTimeout(function() {
3011
+ return $032380dbcf3f4e13$var$__awaiter(_this, void 0, void 0, function() {
3012
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
3013
+ switch(_a.label){
3014
+ case 0:
3015
+ this.log("Connection reconnect attempt timed out.");
3016
+ this.emit("connection-timeout");
3017
+ this.clearConnectionTimeout();
3018
+ return [
3019
+ 4 /*yield*/ ,
3020
+ this._closeSocket()
3021
+ ];
3022
+ case 1:
3023
+ _a.sent();
3024
+ return [
3025
+ 2 /*return*/
3026
+ ];
3027
+ }
3028
+ });
3029
+ });
3030
+ }, this._connectionTimeout);
3031
+ };
3032
+ ReconnectingWebSocket.prototype.clearConnectionTimeout = function() {
3033
+ clearTimeout(this._connectionTimeoutID);
3034
+ this._connectionTimeoutID = undefined;
3035
+ };
3036
+ ReconnectingWebSocket.prototype.clearReconnectTimeout = function() {
3037
+ clearTimeout(this._reconnectTimeoutID);
3038
+ this._reconnectTimeoutID = undefined;
3039
+ };
3040
+ ReconnectingWebSocket.prototype.clearKeepAliveInterval = function() {
3041
+ if (this._keepIntervalID) {
3042
+ clearInterval(this._keepIntervalID);
3043
+ this._keepIntervalID = null;
3044
+ }
3045
+ };
3046
+ ReconnectingWebSocket.prototype.checkSocketHealthAndSendKeepAlive = function() {
3047
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
3048
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
3049
+ switch(_a.label){
3050
+ case 0:
3051
+ if (!(this._ws && this._ws.readyState === WebSocket.OPEN)) return [
3052
+ 2 /*return*/
3053
+ ];
3054
+ if (!this._keepAliveTimeout || !this._keepAliveInterval) return [
3055
+ 2 /*return*/
3056
+ ];
3057
+ if (!(Date.now() - this._lastMsgRecvTime > this._keepAliveTimeout)) return [
3058
+ 3 /*break*/ ,
3059
+ 2
3060
+ ];
3061
+ this.log("Connection is stale, need to reconnect", $032380dbcf3f4e13$var$LOG_LEVEL.WARN);
3062
+ return [
3063
+ 4 /*yield*/ ,
3064
+ this._closeSocket()
3065
+ ];
3066
+ case 1:
3067
+ _a.sent();
3068
+ return [
3069
+ 2 /*return*/
3070
+ ];
3071
+ case 2:
3072
+ // Only emit the keep-alive event if we haven't sent anything else recently
3073
+ if (Date.now() - this._lastMsgSendTime < this._keepAliveInterval) return [
3074
+ 2 /*return*/
3075
+ ];
3076
+ this.log("Emitting keep-alive", $032380dbcf3f4e13$var$LOG_LEVEL.DEBUG);
3077
+ this.emit("keep-alive");
3078
+ return [
3079
+ 2 /*return*/
3080
+ ];
3081
+ }
3082
+ });
3083
+ });
3084
+ };
3085
+ // We use the word manually here to imply the application using this code
3086
+ // or this code itself will decide to close the socket.
3087
+ ReconnectingWebSocket.prototype._closeSocket = function() {
3088
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
3089
+ var shouldRetry, error_1;
3090
+ var _a;
3091
+ return $032380dbcf3f4e13$var$__generator(this, function(_b) {
3092
+ switch(_b.label){
3093
+ case 0:
3094
+ this.log("Closing");
3095
+ _b.label = 1;
3096
+ case 1:
3097
+ _b.trys.push([
3098
+ 1,
3099
+ 4,
3100
+ ,
3101
+ 5
3102
+ ]);
3103
+ this.clearKeepAliveInterval();
3104
+ this._lastMsgRecvTime = 0;
3105
+ if (this._ws) {
3106
+ this._ws._closedManually = true;
3107
+ this._ws.close();
3108
+ }
3109
+ shouldRetry = ((_a = this._ws) === null || _a === void 0 ? void 0 : _a._initialConnectionOk) && this._shouldRetryFn && this._shouldRetryFn();
3110
+ this._ws = null;
3111
+ if (!shouldRetry) return [
3112
+ 3 /*break*/ ,
3113
+ 3
3114
+ ];
3115
+ this.log("Emitting reconnect", $032380dbcf3f4e13$var$LOG_LEVEL.DEBUG);
3116
+ this.emit("reconnecting");
3117
+ return [
3118
+ 4 /*yield*/ ,
3119
+ this.retryFailedConnection()
3120
+ ];
3121
+ case 2:
3122
+ _b.sent();
3123
+ _b.label = 3;
3124
+ case 3:
3125
+ return [
3126
+ 3 /*break*/ ,
3127
+ 5
3128
+ ];
3129
+ case 4:
3130
+ error_1 = _b.sent();
3131
+ this.log("Error while closing and retrying: ".concat(error_1), $032380dbcf3f4e13$var$LOG_LEVEL.ERROR);
3132
+ return [
3133
+ 3 /*break*/ ,
3134
+ 5
3135
+ ];
3136
+ case 5:
3137
+ return [
3138
+ 2 /*return*/
3139
+ ];
3140
+ }
3141
+ });
3142
+ });
3143
+ };
3144
+ ReconnectingWebSocket.prototype.retryFailedConnection = function() {
3145
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
3146
+ var timeout;
3147
+ var _this = this;
3148
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
3149
+ if (this._reconnectAttempts < this._allowedReconnectAttempts) {
3150
+ if (this._reconnectTimeoutID) {
3151
+ this.log("Retry already scheduled");
3152
+ return [
3153
+ 2 /*return*/
3154
+ ];
3155
+ }
3156
+ this.log("Retrying failed connection");
3157
+ timeout = // The timeout logic is taken from
3158
+ // https://github.com/joewalnes/reconnecting-websocket
3159
+ this._reconnectInterval * Math.pow(this._reconnectDecay, this._reconnectAttempts);
3160
+ timeout = timeout > this._maxReconnectInterval ? this._maxReconnectInterval : timeout;
3161
+ this.log("Reconnecting in ".concat(timeout / 1000, " seconds"));
3162
+ this._reconnectAttempts += 1;
3163
+ this._reconnectTimeoutID = setTimeout(function() {
3164
+ return _this.connect();
3165
+ }, timeout);
3166
+ } else {
3167
+ this.log("Maximum connection retry attempts exceeded", $032380dbcf3f4e13$var$LOG_LEVEL.ERROR);
3168
+ this.emit("reconnect-failed");
3169
+ }
3170
+ return [
3171
+ 2 /*return*/
3172
+ ];
3173
+ });
3174
+ });
3175
+ };
3176
+ ReconnectingWebSocket.prototype.log = function(msg, log_level) {
3177
+ if (log_level === void 0) log_level = $032380dbcf3f4e13$var$LOG_LEVEL.DEBUG;
3178
+ var args = [];
3179
+ for(var _i = 2; _i < arguments.length; _i++)args[_i - 2] = arguments[_i];
3180
+ switch(log_level){
3181
+ case $032380dbcf3f4e13$var$LOG_LEVEL.DEBUG:
3182
+ console.debug.apply(console, $032380dbcf3f4e13$var$__spreadArray([
3183
+ "websocket: ".concat(msg)
3184
+ ], args, false));
3185
+ break;
3186
+ case $032380dbcf3f4e13$var$LOG_LEVEL.ERROR:
3187
+ console.error.apply(console, $032380dbcf3f4e13$var$__spreadArray([
3188
+ "websocket: ".concat(msg)
3189
+ ], args, false));
3190
+ break;
3191
+ case $032380dbcf3f4e13$var$LOG_LEVEL.WARN:
3192
+ console.warn.apply(console, $032380dbcf3f4e13$var$__spreadArray([
3193
+ "websocket: ".concat(msg)
3194
+ ], args, false));
3195
+ break;
3196
+ case $032380dbcf3f4e13$var$LOG_LEVEL.INFO:
3197
+ default:
3198
+ console.log.apply(console, $032380dbcf3f4e13$var$__spreadArray([
3199
+ "websocket: ".concat(msg)
3200
+ ], args, false));
3201
+ break;
3202
+ }
3203
+ };
3204
+ ReconnectingWebSocket.prototype.send = function(data) {
3205
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
3206
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
3207
+ try {
3208
+ if (this._ws && this._ws.readyState === WebSocket.OPEN) {
3209
+ this._lastMsgSendTime = Date.now();
3210
+ this._ws.send(data);
3211
+ } else this.log("Failed to send data, web socket not open.", $032380dbcf3f4e13$var$LOG_LEVEL.ERROR);
3212
+ } catch (error) {
3213
+ this.log("Failed to send data. ".concat(error), $032380dbcf3f4e13$var$LOG_LEVEL.ERROR);
3214
+ }
3215
+ return [
3216
+ 2 /*return*/
3217
+ ];
3218
+ });
3219
+ });
3220
+ };
3221
+ ReconnectingWebSocket.prototype.close = function() {
3222
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
3223
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
3224
+ try {
3225
+ this.log("Closing websocket");
3226
+ this._disconnected = true;
3227
+ this.clearReconnectTimeout();
3228
+ this._closeSocket();
3229
+ } catch (error) {
3230
+ this.log("Failed to close websocket. ".concat(error));
3231
+ }
3232
+ return [
3233
+ 2 /*return*/
3234
+ ];
3235
+ });
3236
+ });
3237
+ };
3238
+ Object.defineProperty(ReconnectingWebSocket.prototype, "readyState", {
3239
+ get: function() {
3240
+ var _a, _b;
3241
+ return (_b = (_a = this._ws) === null || _a === void 0 ? void 0 : _a.readyState) !== null && _b !== void 0 ? _b : WebSocket.CLOSED;
3242
+ },
3243
+ enumerable: false,
3244
+ configurable: true
3245
+ });
3246
+ Object.defineProperty(ReconnectingWebSocket.prototype, "url", {
3247
+ get: function() {
3248
+ return this._url;
3249
+ },
3250
+ enumerable: false,
3251
+ configurable: true
3252
+ });
3253
+ Object.defineProperty(ReconnectingWebSocket.prototype, "keepAliveTimeout", {
3254
+ get: function() {
3255
+ return this._keepAliveTimeout;
3256
+ },
3257
+ set: function(keepAliveTimeout) {
3258
+ if (typeof keepAliveTimeout === "number") {
3259
+ this.log("Setting ACK freshness timeout to ".concat(keepAliveTimeout));
3260
+ this._keepAliveTimeout = keepAliveTimeout;
3261
+ }
3262
+ },
3263
+ enumerable: false,
3264
+ configurable: true
3265
+ });
3266
+ Object.defineProperty(ReconnectingWebSocket.prototype, "keepAliveInterval", {
3267
+ get: function() {
3268
+ return this._keepAliveInterval;
3269
+ },
3270
+ set: function(keepAliveInterval) {
3271
+ if (typeof keepAliveInterval === "number") {
3272
+ this.log("Setting keep-alive interval to ".concat(keepAliveInterval));
3273
+ this._keepAliveInterval = keepAliveInterval;
3274
+ }
3275
+ },
3276
+ enumerable: false,
3277
+ configurable: true
3278
+ });
3279
+ Object.defineProperty(ReconnectingWebSocket.prototype, "shouldRetryFn", {
3280
+ set: function(cb) {
3281
+ if (typeof cb === "function") this._shouldRetryFn = cb;
3282
+ },
3283
+ enumerable: false,
3284
+ configurable: true
3285
+ });
3286
+ Object.defineProperty(ReconnectingWebSocket.prototype, "connectionTimeout", {
3287
+ get: function() {
3288
+ return this._connectionTimeout;
3289
+ },
3290
+ set: function(timeout) {
3291
+ if (typeof timeout === "number") this._connectionTimeout = timeout;
3292
+ },
3293
+ enumerable: false,
3294
+ configurable: true
3295
+ });
3296
+ Object.defineProperty(ReconnectingWebSocket.prototype, "maxReconnectAttempts", {
3297
+ get: function() {
3298
+ return this._allowedReconnectAttempts;
3299
+ },
3300
+ set: function(attempts) {
3301
+ if (attempts > 0 && attempts < $032380dbcf3f4e13$var$MAX_RECONNECT_ATTEMPTS) {
3302
+ this.log("Setting maximum connection retry attempts to ".concat(attempts));
3303
+ this._allowedReconnectAttempts = attempts;
3304
+ } else this._allowedReconnectAttempts = $032380dbcf3f4e13$var$DEFAULT_RECONNECT_ATTEMPTS;
3305
+ },
3306
+ enumerable: false,
3307
+ configurable: true
3308
+ });
3309
+ Object.defineProperty(ReconnectingWebSocket.prototype, "reconnectInterval", {
3310
+ get: function() {
3311
+ return this._reconnectInterval;
3312
+ },
3313
+ set: function(interval) {
3314
+ if (typeof interval === "number") this._reconnectInterval = interval < this._maxReconnectInterval ? interval : this._maxReconnectInterval;
3315
+ },
3316
+ enumerable: false,
3317
+ configurable: true
3318
+ });
3319
+ ReconnectingWebSocket.prototype._handleMessage = function(event) {
3320
+ return $032380dbcf3f4e13$var$__awaiter(this, void 0, void 0, function() {
3321
+ var data, _parsePromise, msg;
3322
+ var _this = this;
3323
+ return $032380dbcf3f4e13$var$__generator(this, function(_a) {
3324
+ switch(_a.label){
3325
+ case 0:
3326
+ this._lastMsgRecvTime = Date.now();
3327
+ data = event.data;
3328
+ _parsePromise = new Promise(function(resolve, reject) {
3329
+ if (typeof data === "string") // Handle text message
3330
+ resolve(data);
3331
+ else if (data instanceof ArrayBuffer) {
3332
+ // Handle binary message
3333
+ var arrayBuffer = data;
3334
+ // Parse the ArrayBuffer as needed
3335
+ // Example: Convert ArrayBuffer to Uint8Array
3336
+ resolve(new Uint8Array(arrayBuffer));
3337
+ // Process the Uint8Array as needed
3338
+ } else if (data instanceof Blob) {
3339
+ if (!_this._parseBlobToJson) {
3340
+ resolve(data);
3341
+ return;
3342
+ }
3343
+ // Handle Blob message
3344
+ var blob = data;
3345
+ // Convert Blob to ArrayBuffer
3346
+ var reader_1 = new FileReader();
3347
+ reader_1.onload = function() {
3348
+ var text = reader_1.result;
3349
+ try {
3350
+ var json = JSON.parse(text);
3351
+ resolve(json);
3352
+ } catch (e) {
3353
+ console.error("Failed to parse JSON from Blob:", e);
3354
+ }
3355
+ };
3356
+ reader_1.readAsText(blob);
3357
+ }
3358
+ });
3359
+ return [
3360
+ 4 /*yield*/ ,
3361
+ _parsePromise
3362
+ ];
3363
+ case 1:
3364
+ msg = _a.sent();
3365
+ this.emit("message", msg);
3366
+ return [
3367
+ 2 /*return*/
3368
+ ];
3369
+ }
3370
+ });
3371
+ });
3372
+ };
3373
+ return ReconnectingWebSocket;
3374
+ }((0, $kR6tG$EventEmitter));
3375
+ [
3376
+ "binaryType",
3377
+ "bufferedAmount",
3378
+ "extensions",
3379
+ "protocol",
3380
+ "readyState",
3381
+ "url",
3382
+ "keepAliveTimeout",
3383
+ "keepAliveInterval",
3384
+ "shouldRetryFn",
3385
+ "connectionTimeout",
3386
+ "maxReconnectAttempts",
3387
+ "reconnectInterval"
3388
+ ].forEach(function(property) {
3389
+ Object.defineProperty($032380dbcf3f4e13$export$4f3d0ffd941ebefb.prototype, property, {
3390
+ enumerable: true
3391
+ });
3392
+ });
3393
+ [
3394
+ "CONNECTING",
3395
+ "OPEN",
3396
+ "CLOSING",
3397
+ "CLOSED"
3398
+ ].forEach(function(property) {
3399
+ Object.defineProperty($032380dbcf3f4e13$export$4f3d0ffd941ebefb.prototype, property, {
3400
+ enumerable: true,
3401
+ value: $032380dbcf3f4e13$var$readyStates.indexOf(property)
3402
+ });
3403
+ });
3404
+ [
3405
+ "CONNECTING",
3406
+ "OPEN",
3407
+ "CLOSING",
3408
+ "CLOSED"
3409
+ ].forEach(function(property) {
3410
+ Object.defineProperty($032380dbcf3f4e13$export$4f3d0ffd941ebefb, property, {
3411
+ enumerable: true,
3412
+ value: $032380dbcf3f4e13$var$readyStates.indexOf(property)
3413
+ });
3414
+ });
3415
+
3416
+
3417
+
3418
+
3419
+ // @generated message type with reflection information, may provide speed optimized methods
3420
+ class $5177d56bd0c995e0$var$TextFrame$Type extends (0, $kR6tG$MessageType) {
3421
+ constructor(){
3422
+ super("pipecat.TextFrame", [
3423
+ {
3424
+ no: 1,
3425
+ name: "id",
3426
+ kind: "scalar",
3427
+ T: 4 /*ScalarType.UINT64*/ ,
3428
+ L: 0 /*LongType.BIGINT*/
3429
+ },
3430
+ {
3431
+ no: 2,
3432
+ name: "name",
3433
+ kind: "scalar",
3434
+ T: 9 /*ScalarType.STRING*/
3435
+ },
3436
+ {
3437
+ no: 3,
3438
+ name: "text",
3439
+ kind: "scalar",
3440
+ T: 9 /*ScalarType.STRING*/
3441
+ }
3442
+ ]);
3443
+ }
3444
+ create(value) {
3445
+ const message = globalThis.Object.create(this.messagePrototype);
3446
+ message.id = 0n;
3447
+ message.name = "";
3448
+ message.text = "";
3449
+ if (value !== undefined) (0, $kR6tG$reflectionMergePartial)(this, message, value);
3450
+ return message;
3451
+ }
3452
+ internalBinaryRead(reader, length, options, target) {
3453
+ let message = target ?? this.create(), end = reader.pos + length;
3454
+ while(reader.pos < end){
3455
+ let [fieldNo, wireType] = reader.tag();
3456
+ switch(fieldNo){
3457
+ case /* uint64 id */ 1:
3458
+ message.id = reader.uint64().toBigInt();
3459
+ break;
3460
+ case /* string name */ 2:
3461
+ message.name = reader.string();
3462
+ break;
3463
+ case /* string text */ 3:
3464
+ message.text = reader.string();
3465
+ break;
3466
+ default:
3467
+ let u = options.readUnknownField;
3468
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3469
+ let d = reader.skip(wireType);
3470
+ if (u !== false) (u === true ? (0, $kR6tG$UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3471
+ }
3472
+ }
3473
+ return message;
3474
+ }
3475
+ internalBinaryWrite(message, writer, options) {
3476
+ /* uint64 id = 1; */ if (message.id !== 0n) writer.tag(1, (0, $kR6tG$WireType).Varint).uint64(message.id);
3477
+ /* string name = 2; */ if (message.name !== "") writer.tag(2, (0, $kR6tG$WireType).LengthDelimited).string(message.name);
3478
+ /* string text = 3; */ if (message.text !== "") writer.tag(3, (0, $kR6tG$WireType).LengthDelimited).string(message.text);
3479
+ let u = options.writeUnknownFields;
3480
+ if (u !== false) (u == true ? (0, $kR6tG$UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3481
+ return writer;
3482
+ }
3483
+ }
3484
+ const $5177d56bd0c995e0$export$78410ada03f6931b = new $5177d56bd0c995e0$var$TextFrame$Type();
3485
+ // @generated message type with reflection information, may provide speed optimized methods
3486
+ class $5177d56bd0c995e0$var$AudioRawFrame$Type extends (0, $kR6tG$MessageType) {
3487
+ constructor(){
3488
+ super("pipecat.AudioRawFrame", [
3489
+ {
3490
+ no: 1,
3491
+ name: "id",
3492
+ kind: "scalar",
3493
+ T: 4 /*ScalarType.UINT64*/ ,
3494
+ L: 0 /*LongType.BIGINT*/
3495
+ },
3496
+ {
3497
+ no: 2,
3498
+ name: "name",
3499
+ kind: "scalar",
3500
+ T: 9 /*ScalarType.STRING*/
3501
+ },
3502
+ {
3503
+ no: 3,
3504
+ name: "audio",
3505
+ kind: "scalar",
3506
+ T: 12 /*ScalarType.BYTES*/
3507
+ },
3508
+ {
3509
+ no: 4,
3510
+ name: "sample_rate",
3511
+ kind: "scalar",
3512
+ T: 13 /*ScalarType.UINT32*/
3513
+ },
3514
+ {
3515
+ no: 5,
3516
+ name: "num_channels",
3517
+ kind: "scalar",
3518
+ T: 13 /*ScalarType.UINT32*/
3519
+ },
3520
+ {
3521
+ no: 6,
3522
+ name: "pts",
3523
+ kind: "scalar",
3524
+ opt: true,
3525
+ T: 4 /*ScalarType.UINT64*/ ,
3526
+ L: 0 /*LongType.BIGINT*/
3527
+ }
3528
+ ]);
3529
+ }
3530
+ create(value) {
3531
+ const message = globalThis.Object.create(this.messagePrototype);
3532
+ message.id = 0n;
3533
+ message.name = "";
3534
+ message.audio = new Uint8Array(0);
3535
+ message.sampleRate = 0;
3536
+ message.numChannels = 0;
3537
+ if (value !== undefined) (0, $kR6tG$reflectionMergePartial)(this, message, value);
3538
+ return message;
3539
+ }
3540
+ internalBinaryRead(reader, length, options, target) {
3541
+ let message = target ?? this.create(), end = reader.pos + length;
3542
+ while(reader.pos < end){
3543
+ let [fieldNo, wireType] = reader.tag();
3544
+ switch(fieldNo){
3545
+ case /* uint64 id */ 1:
3546
+ message.id = reader.uint64().toBigInt();
3547
+ break;
3548
+ case /* string name */ 2:
3549
+ message.name = reader.string();
3550
+ break;
3551
+ case /* bytes audio */ 3:
3552
+ message.audio = reader.bytes();
3553
+ break;
3554
+ case /* uint32 sample_rate */ 4:
3555
+ message.sampleRate = reader.uint32();
3556
+ break;
3557
+ case /* uint32 num_channels */ 5:
3558
+ message.numChannels = reader.uint32();
3559
+ break;
3560
+ case /* optional uint64 pts */ 6:
3561
+ message.pts = reader.uint64().toBigInt();
3562
+ break;
3563
+ default:
3564
+ let u = options.readUnknownField;
3565
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3566
+ let d = reader.skip(wireType);
3567
+ if (u !== false) (u === true ? (0, $kR6tG$UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3568
+ }
3569
+ }
3570
+ return message;
3571
+ }
3572
+ internalBinaryWrite(message, writer, options) {
3573
+ /* uint64 id = 1; */ if (message.id !== 0n) writer.tag(1, (0, $kR6tG$WireType).Varint).uint64(message.id);
3574
+ /* string name = 2; */ if (message.name !== "") writer.tag(2, (0, $kR6tG$WireType).LengthDelimited).string(message.name);
3575
+ /* bytes audio = 3; */ if (message.audio.length) writer.tag(3, (0, $kR6tG$WireType).LengthDelimited).bytes(message.audio);
3576
+ /* uint32 sample_rate = 4; */ if (message.sampleRate !== 0) writer.tag(4, (0, $kR6tG$WireType).Varint).uint32(message.sampleRate);
3577
+ /* uint32 num_channels = 5; */ if (message.numChannels !== 0) writer.tag(5, (0, $kR6tG$WireType).Varint).uint32(message.numChannels);
3578
+ /* optional uint64 pts = 6; */ if (message.pts !== undefined) writer.tag(6, (0, $kR6tG$WireType).Varint).uint64(message.pts);
3579
+ let u = options.writeUnknownFields;
3580
+ if (u !== false) (u == true ? (0, $kR6tG$UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3581
+ return writer;
3582
+ }
3583
+ }
3584
+ const $5177d56bd0c995e0$export$51d8721de3cbff8f = new $5177d56bd0c995e0$var$AudioRawFrame$Type();
3585
+ // @generated message type with reflection information, may provide speed optimized methods
3586
+ class $5177d56bd0c995e0$var$TranscriptionFrame$Type extends (0, $kR6tG$MessageType) {
3587
+ constructor(){
3588
+ super("pipecat.TranscriptionFrame", [
3589
+ {
3590
+ no: 1,
3591
+ name: "id",
3592
+ kind: "scalar",
3593
+ T: 4 /*ScalarType.UINT64*/ ,
3594
+ L: 0 /*LongType.BIGINT*/
3595
+ },
3596
+ {
3597
+ no: 2,
3598
+ name: "name",
3599
+ kind: "scalar",
3600
+ T: 9 /*ScalarType.STRING*/
3601
+ },
3602
+ {
3603
+ no: 3,
3604
+ name: "text",
3605
+ kind: "scalar",
3606
+ T: 9 /*ScalarType.STRING*/
3607
+ },
3608
+ {
3609
+ no: 4,
3610
+ name: "user_id",
3611
+ kind: "scalar",
3612
+ T: 9 /*ScalarType.STRING*/
3613
+ },
3614
+ {
3615
+ no: 5,
3616
+ name: "timestamp",
3617
+ kind: "scalar",
3618
+ T: 9 /*ScalarType.STRING*/
3619
+ }
3620
+ ]);
3621
+ }
3622
+ create(value) {
3623
+ const message = globalThis.Object.create(this.messagePrototype);
3624
+ message.id = 0n;
3625
+ message.name = "";
3626
+ message.text = "";
3627
+ message.userId = "";
3628
+ message.timestamp = "";
3629
+ if (value !== undefined) (0, $kR6tG$reflectionMergePartial)(this, message, value);
3630
+ return message;
3631
+ }
3632
+ internalBinaryRead(reader, length, options, target) {
3633
+ let message = target ?? this.create(), end = reader.pos + length;
3634
+ while(reader.pos < end){
3635
+ let [fieldNo, wireType] = reader.tag();
3636
+ switch(fieldNo){
3637
+ case /* uint64 id */ 1:
3638
+ message.id = reader.uint64().toBigInt();
3639
+ break;
3640
+ case /* string name */ 2:
3641
+ message.name = reader.string();
3642
+ break;
3643
+ case /* string text */ 3:
3644
+ message.text = reader.string();
3645
+ break;
3646
+ case /* string user_id */ 4:
3647
+ message.userId = reader.string();
3648
+ break;
3649
+ case /* string timestamp */ 5:
3650
+ message.timestamp = reader.string();
3651
+ break;
3652
+ default:
3653
+ let u = options.readUnknownField;
3654
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3655
+ let d = reader.skip(wireType);
3656
+ if (u !== false) (u === true ? (0, $kR6tG$UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3657
+ }
3658
+ }
3659
+ return message;
3660
+ }
3661
+ internalBinaryWrite(message, writer, options) {
3662
+ /* uint64 id = 1; */ if (message.id !== 0n) writer.tag(1, (0, $kR6tG$WireType).Varint).uint64(message.id);
3663
+ /* string name = 2; */ if (message.name !== "") writer.tag(2, (0, $kR6tG$WireType).LengthDelimited).string(message.name);
3664
+ /* string text = 3; */ if (message.text !== "") writer.tag(3, (0, $kR6tG$WireType).LengthDelimited).string(message.text);
3665
+ /* string user_id = 4; */ if (message.userId !== "") writer.tag(4, (0, $kR6tG$WireType).LengthDelimited).string(message.userId);
3666
+ /* string timestamp = 5; */ if (message.timestamp !== "") writer.tag(5, (0, $kR6tG$WireType).LengthDelimited).string(message.timestamp);
3667
+ let u = options.writeUnknownFields;
3668
+ if (u !== false) (u == true ? (0, $kR6tG$UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3669
+ return writer;
3670
+ }
3671
+ }
3672
+ const $5177d56bd0c995e0$export$10b388c15a5cdc8a = new $5177d56bd0c995e0$var$TranscriptionFrame$Type();
3673
+ // @generated message type with reflection information, may provide speed optimized methods
3674
+ class $5177d56bd0c995e0$var$MessageFrame$Type extends (0, $kR6tG$MessageType) {
3675
+ constructor(){
3676
+ super("pipecat.MessageFrame", [
3677
+ {
3678
+ no: 1,
3679
+ name: "data",
3680
+ kind: "scalar",
3681
+ T: 9 /*ScalarType.STRING*/
3682
+ }
3683
+ ]);
3684
+ }
3685
+ create(value) {
3686
+ const message = globalThis.Object.create(this.messagePrototype);
3687
+ message.data = "";
3688
+ if (value !== undefined) (0, $kR6tG$reflectionMergePartial)(this, message, value);
3689
+ return message;
3690
+ }
3691
+ internalBinaryRead(reader, length, options, target) {
3692
+ let message = target ?? this.create(), end = reader.pos + length;
3693
+ while(reader.pos < end){
3694
+ let [fieldNo, wireType] = reader.tag();
3695
+ switch(fieldNo){
3696
+ case /* string data */ 1:
3697
+ message.data = reader.string();
3698
+ break;
3699
+ default:
3700
+ let u = options.readUnknownField;
3701
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3702
+ let d = reader.skip(wireType);
3703
+ if (u !== false) (u === true ? (0, $kR6tG$UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3704
+ }
3705
+ }
3706
+ return message;
3707
+ }
3708
+ internalBinaryWrite(message, writer, options) {
3709
+ /* string data = 1; */ if (message.data !== "") writer.tag(1, (0, $kR6tG$WireType).LengthDelimited).string(message.data);
3710
+ let u = options.writeUnknownFields;
3711
+ if (u !== false) (u == true ? (0, $kR6tG$UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3712
+ return writer;
3713
+ }
3714
+ }
3715
+ const $5177d56bd0c995e0$export$bc3f45a6d434f14a = new $5177d56bd0c995e0$var$MessageFrame$Type();
3716
+ // @generated message type with reflection information, may provide speed optimized methods
3717
+ class $5177d56bd0c995e0$var$Frame$Type extends (0, $kR6tG$MessageType) {
3718
+ constructor(){
3719
+ super("pipecat.Frame", [
3720
+ {
3721
+ no: 1,
3722
+ name: "text",
3723
+ kind: "message",
3724
+ oneof: "frame",
3725
+ T: ()=>$5177d56bd0c995e0$export$78410ada03f6931b
3726
+ },
3727
+ {
3728
+ no: 2,
3729
+ name: "audio",
3730
+ kind: "message",
3731
+ oneof: "frame",
3732
+ T: ()=>$5177d56bd0c995e0$export$51d8721de3cbff8f
3733
+ },
3734
+ {
3735
+ no: 3,
3736
+ name: "transcription",
3737
+ kind: "message",
3738
+ oneof: "frame",
3739
+ T: ()=>$5177d56bd0c995e0$export$10b388c15a5cdc8a
3740
+ },
3741
+ {
3742
+ no: 4,
3743
+ name: "message",
3744
+ kind: "message",
3745
+ oneof: "frame",
3746
+ T: ()=>$5177d56bd0c995e0$export$bc3f45a6d434f14a
3747
+ }
3748
+ ]);
3749
+ }
3750
+ create(value) {
3751
+ const message = globalThis.Object.create(this.messagePrototype);
3752
+ message.frame = {
3753
+ oneofKind: undefined
3754
+ };
3755
+ if (value !== undefined) (0, $kR6tG$reflectionMergePartial)(this, message, value);
3756
+ return message;
3757
+ }
3758
+ internalBinaryRead(reader, length, options, target) {
3759
+ let message = target ?? this.create(), end = reader.pos + length;
3760
+ while(reader.pos < end){
3761
+ let [fieldNo, wireType] = reader.tag();
3762
+ switch(fieldNo){
3763
+ case /* pipecat.TextFrame text */ 1:
3764
+ message.frame = {
3765
+ oneofKind: "text",
3766
+ text: $5177d56bd0c995e0$export$78410ada03f6931b.internalBinaryRead(reader, reader.uint32(), options, message.frame.text)
3767
+ };
3768
+ break;
3769
+ case /* pipecat.AudioRawFrame audio */ 2:
3770
+ message.frame = {
3771
+ oneofKind: "audio",
3772
+ audio: $5177d56bd0c995e0$export$51d8721de3cbff8f.internalBinaryRead(reader, reader.uint32(), options, message.frame.audio)
3773
+ };
3774
+ break;
3775
+ case /* pipecat.TranscriptionFrame transcription */ 3:
3776
+ message.frame = {
3777
+ oneofKind: "transcription",
3778
+ transcription: $5177d56bd0c995e0$export$10b388c15a5cdc8a.internalBinaryRead(reader, reader.uint32(), options, message.frame.transcription)
3779
+ };
3780
+ break;
3781
+ case /* pipecat.MessageFrame message */ 4:
3782
+ message.frame = {
3783
+ oneofKind: "message",
3784
+ message: $5177d56bd0c995e0$export$bc3f45a6d434f14a.internalBinaryRead(reader, reader.uint32(), options, message.frame.message)
3785
+ };
3786
+ break;
3787
+ default:
3788
+ let u = options.readUnknownField;
3789
+ if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
3790
+ let d = reader.skip(wireType);
3791
+ if (u !== false) (u === true ? (0, $kR6tG$UnknownFieldHandler).onRead : u)(this.typeName, message, fieldNo, wireType, d);
3792
+ }
3793
+ }
3794
+ return message;
3795
+ }
3796
+ internalBinaryWrite(message, writer, options) {
3797
+ /* pipecat.TextFrame text = 1; */ if (message.frame.oneofKind === "text") $5177d56bd0c995e0$export$78410ada03f6931b.internalBinaryWrite(message.frame.text, writer.tag(1, (0, $kR6tG$WireType).LengthDelimited).fork(), options).join();
3798
+ /* pipecat.AudioRawFrame audio = 2; */ if (message.frame.oneofKind === "audio") $5177d56bd0c995e0$export$51d8721de3cbff8f.internalBinaryWrite(message.frame.audio, writer.tag(2, (0, $kR6tG$WireType).LengthDelimited).fork(), options).join();
3799
+ /* pipecat.TranscriptionFrame transcription = 3; */ if (message.frame.oneofKind === "transcription") $5177d56bd0c995e0$export$10b388c15a5cdc8a.internalBinaryWrite(message.frame.transcription, writer.tag(3, (0, $kR6tG$WireType).LengthDelimited).fork(), options).join();
3800
+ /* pipecat.MessageFrame message = 4; */ if (message.frame.oneofKind === "message") $5177d56bd0c995e0$export$bc3f45a6d434f14a.internalBinaryWrite(message.frame.message, writer.tag(4, (0, $kR6tG$WireType).LengthDelimited).fork(), options).join();
3801
+ let u = options.writeUnknownFields;
3802
+ if (u !== false) (u == true ? (0, $kR6tG$UnknownFieldHandler).onWrite : u)(this.typeName, message, writer);
3803
+ return writer;
3804
+ }
3805
+ }
3806
+ const $5177d56bd0c995e0$export$b89a827e9254211a = new $5177d56bd0c995e0$var$Frame$Type();
3807
+
3808
+
3809
+ class $7f42eda74f1b1632$export$de21836fc42c6f9c extends (0, $kR6tG$Transport) {
3810
+ constructor(){
3811
+ super();
3812
+ this.audioQueue = [];
3813
+ this._mediaManager = new (0, $22ece045290c996a$export$c95c65abc5f47125)(true, true, undefined, undefined, 512, $7f42eda74f1b1632$export$de21836fc42c6f9c.RECORDER_SAMPLE_RATE);
3814
+ this._mediaManager.setUserAudioCallback(this.handleUserAudioStream.bind(this));
3815
+ this._ws = null;
3816
+ }
3817
+ initialize(options, messageHandler) {
3818
+ this._options = options;
3819
+ this._callbacks = options.callbacks ?? {};
3820
+ this._onMessage = messageHandler;
3821
+ this._mediaManager.setRTVIOptions(options);
3822
+ this.state = "disconnected";
3823
+ }
3824
+ async initDevices() {
3825
+ this.state = "initializing";
3826
+ await this._mediaManager.initialize();
3827
+ this.state = "initialized";
3828
+ }
3829
+ async connect(authBundle, abortController) {
3830
+ this.state = "connecting";
3831
+ try {
3832
+ this._ws = this.initializeWebsocket(authBundle);
3833
+ await this._ws.connect();
3834
+ await this._mediaManager.connect();
3835
+ this.state = "connected";
3836
+ this._callbacks.onConnected?.();
3837
+ } catch (error) {
3838
+ const msg = `Failed to connect to websocket: ${error}`;
3839
+ (0, $kR6tG$logger).error(msg);
3840
+ this.state = "error";
3841
+ throw new (0, $kR6tG$TransportStartError)(msg);
3842
+ }
3843
+ }
3844
+ async disconnect() {
3845
+ this.state = "disconnecting";
3846
+ await this._mediaManager.disconnect();
3847
+ await this._ws?.close();
3848
+ this.state = "disconnected";
3849
+ this._callbacks.onDisconnected?.();
3850
+ }
3851
+ getAllMics() {
3852
+ return this._mediaManager.getAllMics();
3853
+ }
3854
+ getAllCams() {
3855
+ return this._mediaManager.getAllCams();
3856
+ }
3857
+ getAllSpeakers() {
3858
+ return this._mediaManager.getAllSpeakers();
3859
+ }
3860
+ async updateMic(micId) {
3861
+ return this._mediaManager.updateMic(micId);
3862
+ }
3863
+ updateCam(camId) {
3864
+ return this._mediaManager.updateCam(camId);
3865
+ }
3866
+ updateSpeaker(speakerId) {
3867
+ return this._mediaManager.updateSpeaker(speakerId);
3868
+ }
3869
+ get selectedMic() {
3870
+ return this._mediaManager.selectedMic;
3871
+ }
3872
+ get selectedSpeaker() {
3873
+ return this._mediaManager.selectedSpeaker;
3874
+ }
3875
+ enableMic(enable) {
3876
+ this._mediaManager.enableMic(enable);
3877
+ }
3878
+ get isMicEnabled() {
3879
+ return this._mediaManager.isMicEnabled;
3880
+ }
3881
+ get state() {
3882
+ return this._state;
3883
+ }
3884
+ set state(state) {
3885
+ if (this._state === state) return;
3886
+ this._state = state;
3887
+ this._callbacks.onTransportStateChanged?.(state);
3888
+ }
3889
+ get expiry() {
3890
+ return this._expiry;
3891
+ }
3892
+ tracks() {
3893
+ return this._mediaManager.tracks();
3894
+ }
3895
+ initializeWebsocket(authBundle) {
3896
+ const ws = new (0, $032380dbcf3f4e13$export$4f3d0ffd941ebefb)(`${authBundle.ws_url}`, undefined, {
3897
+ parseBlobToJson: false
3898
+ });
3899
+ // disabling the keep alive, there is no API for it inside Pipecat
3900
+ ws.keepAliveInterval = 0;
3901
+ ws.on("open", ()=>{
3902
+ (0, $kR6tG$logger).debug("Websocket connection opened");
3903
+ });
3904
+ ws.on("message", async (data)=>{
3905
+ let arrayBuffer = await data.arrayBuffer();
3906
+ const parsedFrame = (0, $5177d56bd0c995e0$export$b89a827e9254211a).fromBinary(new Uint8Array(arrayBuffer)).frame;
3907
+ if (parsedFrame.oneofKind === "audio") {
3908
+ // We should be able to use parsedFrame.audio.audio.buffer but for
3909
+ // some reason that contains all the bytes from the protobuf message.
3910
+ const audioVector = Array.from(parsedFrame.audio.audio);
3911
+ const uint8Array = new Uint8Array(audioVector);
3912
+ const int16Array = new Int16Array(uint8Array.buffer);
3913
+ this._mediaManager.bufferBotAudio(int16Array);
3914
+ } else if (parsedFrame.oneofKind === "message") {
3915
+ let jsonText = parsedFrame.message.data;
3916
+ try {
3917
+ let jsonMessage = JSON.parse(jsonText);
3918
+ if (jsonMessage.label === "rtvi-ai") this._onMessage(jsonMessage);
3919
+ } catch {
3920
+ (0, $kR6tG$logger).warn("Failed to parse message", jsonText);
3921
+ }
3922
+ }
3923
+ });
3924
+ ws.on("error", (error)=>{
3925
+ this.connectionError(`websocket error: ${error}`);
3926
+ });
3927
+ ws.on("connection-timeout", ()=>{
3928
+ this.connectionError("websocket connection timed out");
3929
+ });
3930
+ ws.on("close", (code)=>{
3931
+ this.connectionError(`websocket connection closed. Code: ${code}`);
3932
+ });
3933
+ ws.on("reconnect-failed", ()=>{
3934
+ this.connectionError(`websocket reconnect failed`);
3935
+ });
3936
+ return ws;
3937
+ }
3938
+ sendReadyMessage() {
3939
+ this.state = "ready";
3940
+ this.sendMessage((0, $kR6tG$RTVIMessage).clientReady());
3941
+ }
3942
+ handleUserAudioStream(data) {
3943
+ if (this.state === "ready") try {
3944
+ this.flushAudioQueue();
3945
+ this._sendAudioInput(data);
3946
+ } catch (error) {
3947
+ (0, $kR6tG$logger).error("Error sending audio stream to websocket:", error);
3948
+ this.state = "error";
3949
+ }
3950
+ else this.audioQueue.push(data);
3951
+ }
3952
+ flushAudioQueue() {
3953
+ if (this.audioQueue.length <= 0) return;
3954
+ (0, $kR6tG$logger).info("Will flush audio queue", this.audioQueue.length);
3955
+ while(this.audioQueue.length > 0){
3956
+ const queuedData = this.audioQueue.shift();
3957
+ if (queuedData) this._sendAudioInput(queuedData);
3958
+ }
3959
+ }
3960
+ sendMessage(message) {
3961
+ (0, $kR6tG$logger).debug("Received message to send to Web Socket", message);
3962
+ const frame = (0, $5177d56bd0c995e0$export$b89a827e9254211a).create({
3963
+ frame: {
3964
+ oneofKind: "message",
3965
+ message: {
3966
+ data: JSON.stringify(message)
3967
+ }
3968
+ }
3969
+ });
3970
+ this._sendMsg(frame);
3971
+ }
3972
+ async _sendAudioInput(data) {
3973
+ const pcmByteArray = new Uint8Array(data);
3974
+ const frame = (0, $5177d56bd0c995e0$export$b89a827e9254211a).create({
3975
+ frame: {
3976
+ oneofKind: "audio",
3977
+ audio: {
3978
+ id: 0n,
3979
+ name: "audio",
3980
+ audio: pcmByteArray,
3981
+ sampleRate: $7f42eda74f1b1632$export$de21836fc42c6f9c.RECORDER_SAMPLE_RATE,
3982
+ numChannels: 1
3983
+ }
3984
+ }
3985
+ });
3986
+ await this._sendMsg(frame);
3987
+ }
3988
+ async _sendMsg(msg) {
3989
+ if (!this._ws) {
3990
+ (0, $kR6tG$logger).error("sendMsg called but WS is null");
3991
+ return;
3992
+ }
3993
+ if (this._ws.readyState !== WebSocket.OPEN) {
3994
+ (0, $kR6tG$logger).error("attempt to send to closed socket");
3995
+ return;
3996
+ }
3997
+ if (!msg) {
3998
+ (0, $kR6tG$logger).error("need a msg to send a msg");
3999
+ return;
4000
+ }
4001
+ try {
4002
+ const encodedFrame = new Uint8Array((0, $5177d56bd0c995e0$export$b89a827e9254211a).toBinary(msg));
4003
+ await this._ws.send(encodedFrame);
4004
+ } catch (e) {
4005
+ (0, $kR6tG$logger).error("sendMsg error", e);
4006
+ }
4007
+ }
4008
+ connectionError(errorMsg) {
4009
+ console.error(errorMsg);
4010
+ this.state = "error";
4011
+ this.disconnect();
4012
+ }
4013
+ // Not implemented
4014
+ enableScreenShare(enable) {
4015
+ (0, $kR6tG$logger).error("startScreenShare not implemented for WebSocketTransport");
4016
+ throw new Error("Not implemented");
4017
+ }
4018
+ get isSharingScreen() {
4019
+ (0, $kR6tG$logger).error("isSharingScreen not implemented for WebSocketTransport");
4020
+ return false;
4021
+ }
4022
+ enableCam(enable) {
4023
+ (0, $kR6tG$logger).error("enableCam not implemented for WebSocketTransport");
4024
+ throw new Error("Not implemented");
4025
+ }
4026
+ get isCamEnabled() {
4027
+ (0, $kR6tG$logger).error("isCamEnabled not implemented for WebSocketTransport");
4028
+ return false;
4029
+ }
4030
+ get selectedCam() {
4031
+ (0, $kR6tG$logger).error("selectedCam not implemented for WebSocketTransport");
4032
+ throw new Error("Not implemented");
4033
+ }
4034
+ }
4035
+ $7f42eda74f1b1632$export$de21836fc42c6f9c.RECORDER_SAMPLE_RATE = 16000;
4036
+
4037
+
4038
+
4039
+
4040
+ export {$fc49a56cd8739127$export$45c5b9bfba2f6304 as WavMediaManager, $22ece045290c996a$export$c95c65abc5f47125 as DailyMediaManager, $7f42eda74f1b1632$export$de21836fc42c6f9c as WebSocketTransport};
4041
+ //# sourceMappingURL=index.module.js.map