hume 0.10.4-beta.5 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -77,7 +77,10 @@ class Chat {
77
77
  queryParams[name] = value;
78
78
  }
79
79
  }
80
- const socket = new core.ReconnectingWebSocket(`wss://${((_a = core.Supplier.get(this._options.environment)) !== null && _a !== void 0 ? _a : environments.HumeEnvironment.Production).replace("https://", "")}/v0/evi/chat?${qs_1.default.stringify(queryParams)}`, [], {
80
+ const environ = ((_a = core.Supplier.get(this._options.environment)) !== null && _a !== void 0 ? _a : environments.HumeEnvironment.Production)
81
+ .replace("https://", "wss://")
82
+ .replace("http://", "ws://");
83
+ const socket = new core.ReconnectingWebSocket(`${environ}/v0/evi/chat?${qs_1.default.stringify(queryParams)}`, [], {
81
84
  debug: (_b = args.debug) !== null && _b !== void 0 ? _b : false,
82
85
  maxRetries: (_c = args.reconnectAttempts) !== null && _c !== void 0 ? _c : 30,
83
86
  });
@@ -175,7 +175,6 @@ class ReconnectingWebSocket {
175
175
  if (this._shouldReconnect)
176
176
  this._connect();
177
177
  };
178
- console.log('constructing...');
179
178
  this._url = url;
180
179
  this._protocols = protocols;
181
180
  this._options = options;
@@ -186,7 +185,6 @@ class ReconnectingWebSocket {
186
185
  if (this._options.startClosed) {
187
186
  this._shouldReconnect = false;
188
187
  }
189
- console.log('all good');
190
188
  this._connect();
191
189
  }
192
190
  static get CONNECTING() {
@@ -435,7 +433,7 @@ class ReconnectingWebSocket {
435
433
  }
436
434
  // Set lock for this attempt
437
435
  this._connectLock = true;
438
- const { maxRetries = DEFAULT.maxRetries, connectionTimeout = DEFAULT.connectionTimeout, } = this._options;
436
+ const { maxRetries = DEFAULT.maxRetries, connectionTimeout = DEFAULT.connectionTimeout } = this._options;
439
437
  // Max retries check
440
438
  if (this._retryCount >= maxRetries) {
441
439
  this._shutdown(new Error(`Max retries (${maxRetries}) reached. Giving up.`), "Max retries reached");
@@ -456,7 +454,7 @@ class ReconnectingWebSocket {
456
454
  return;
457
455
  }
458
456
  this._debug("connect", { url, protocols: this._protocols });
459
- this._ws = this._protocols ? new (this._WebSocket)(url, this._protocols) : new (this._WebSocket)(url);
457
+ this._ws = this._protocols ? new this._WebSocket(url, this._protocols) : new this._WebSocket(url);
460
458
  this._ws.binaryType = this._binaryType;
461
459
  this._addListeners();
462
460
  this._connectLock = false;
@@ -77,7 +77,10 @@ class Chat {
77
77
  queryParams[name] = value;
78
78
  }
79
79
  }
80
- const socket = new core.ReconnectingWebSocket(`wss://${((_a = core.Supplier.get(this._options.environment)) !== null && _a !== void 0 ? _a : environments.HumeEnvironment.Production).replace("https://", "")}/v0/evi/chat?${qs_1.default.stringify(queryParams)}`, [], {
80
+ const environ = ((_a = core.Supplier.get(this._options.environment)) !== null && _a !== void 0 ? _a : environments.HumeEnvironment.Production)
81
+ .replace("https://", "wss://")
82
+ .replace("http://", "ws://");
83
+ const socket = new core.ReconnectingWebSocket(`${environ}/v0/evi/chat?${qs_1.default.stringify(queryParams)}`, [], {
81
84
  debug: (_b = args.debug) !== null && _b !== void 0 ? _b : false,
82
85
  maxRetries: (_c = args.reconnectAttempts) !== null && _c !== void 0 ? _c : 30,
83
86
  });
@@ -175,7 +175,6 @@ class ReconnectingWebSocket {
175
175
  if (this._shouldReconnect)
176
176
  this._connect();
177
177
  };
178
- console.log('constructing...');
179
178
  this._url = url;
180
179
  this._protocols = protocols;
181
180
  this._options = options;
@@ -186,7 +185,6 @@ class ReconnectingWebSocket {
186
185
  if (this._options.startClosed) {
187
186
  this._shouldReconnect = false;
188
187
  }
189
- console.log('all good');
190
188
  this._connect();
191
189
  }
192
190
  static get CONNECTING() {
@@ -435,7 +433,7 @@ class ReconnectingWebSocket {
435
433
  }
436
434
  // Set lock for this attempt
437
435
  this._connectLock = true;
438
- const { maxRetries = DEFAULT.maxRetries, connectionTimeout = DEFAULT.connectionTimeout, } = this._options;
436
+ const { maxRetries = DEFAULT.maxRetries, connectionTimeout = DEFAULT.connectionTimeout } = this._options;
439
437
  // Max retries check
440
438
  if (this._retryCount >= maxRetries) {
441
439
  this._shutdown(new Error(`Max retries (${maxRetries}) reached. Giving up.`), "Max retries reached");
@@ -456,7 +454,7 @@ class ReconnectingWebSocket {
456
454
  return;
457
455
  }
458
456
  this._debug("connect", { url, protocols: this._protocols });
459
- this._ws = this._protocols ? new (this._WebSocket)(url, this._protocols) : new (this._WebSocket)(url);
457
+ this._ws = this._protocols ? new this._WebSocket(url, this._protocols) : new this._WebSocket(url);
460
458
  this._ws.binaryType = this._binaryType;
461
459
  this._addListeners();
462
460
  this._connectLock = false;
@@ -0,0 +1,174 @@
1
+ import type { AudioOutput } from "api/resources/empathicVoice";
2
+ /**
3
+ * Options for configuring an {@link EVIWebAudioPlayer}.
4
+ *
5
+ * @default `{}` for sensible defaults.
6
+ */
7
+ export interface EVIWebAudioPlayerOptions {
8
+ /**
9
+ * Initial master gain, via a `GainNode`, from `0` (_silent_) to `1` (_full volume_).
10
+ * Values outside this range are clamped.
11
+ *
12
+ * @default 1
13
+ */
14
+ volume?: number;
15
+ /**
16
+ * Real-time FFT (frequency-domain) settings **only** for visualization.
17
+ *
18
+ * - **Disable**: omit or `{ enabled: false }` – no `AnalyserNode` is created.
19
+ * - **Defaults**: `{ enabled: true }` → 2048-point FFT at 16 ms (~60 Hz), mapped to 24 Bark bands.
20
+ * - **Custom**: supply {@link EVIWebAudioPlayerFFTOptions} to override `size`, `interval`, or `transform`.
21
+ */
22
+ fft?: EVIWebAudioPlayerFFTOptions;
23
+ }
24
+ /**
25
+ * FFT (frequency-domain) options for visualization.
26
+ *
27
+ * Pass `{ enabled: true }` for defaults, or omit/disable entirely for zero overhead.
28
+ */
29
+ export type EVIWebAudioPlayerFFTOptions = FftEnabled | FftDisabled;
30
+ type FftDisabled = {
31
+ /**
32
+ * Turn visualization data OFF—skip analyser creation entirely (zero extra CPU).
33
+ */
34
+ enabled: false;
35
+ };
36
+ type FftEnabled = {
37
+ /**
38
+ * Turn visualization data ON—create an `AnalyserNode`, poll it, and emit `'fft'` events.
39
+ */
40
+ enabled: true;
41
+ /**
42
+ * FFT size (power-of-two, 32 – 32768).
43
+ * Defaults to 2048 → 1024 bins (~ 23 Hz at 48 kHz).
44
+ * @default 2048
45
+ */
46
+ size?: number;
47
+ /**
48
+ * Polling interval, in **milliseconds**.
49
+ * Default 16 ms (~ 60 Hz) to sync with `requestAnimationFrame()`.
50
+ * @default 16
51
+ */
52
+ interval?: number;
53
+ /**
54
+ * Custom post-processing for raw magnitude data. Omit for built-in 24-band Bark mapping.
55
+ *
56
+ * @param bins PCM byte magnitudes (0 – 255) from `AnalyserNode`.
57
+ * @param sampleRate `AudioContext` sample rate in Hz.
58
+ * @returns Payload emitted with each `'fft'` event.
59
+ */
60
+ transform?: (bins: Uint8Array, sampleRate: number) => number[];
61
+ };
62
+ type PlayerEventMap = {
63
+ play: CustomEvent<{
64
+ id: string;
65
+ }>;
66
+ stop: CustomEvent<{
67
+ id: string;
68
+ }>;
69
+ fft: CustomEvent<{
70
+ fft: number[];
71
+ }>;
72
+ error: CustomEvent<{
73
+ message: string;
74
+ }>;
75
+ };
76
+ /**
77
+ * A sequential, glitch-free Web-Audio player for **EVI** audio output.
78
+ *
79
+ * - **Decoding & playback**: base-64 PCM chunks feed an `AudioWorkletNode` and play in order, without gaps.
80
+ * - **One-time init**: await {@link init} in a user-gesture to build audio graph and unlock the browser’s
81
+ * `AudioContext`; later calls are no-ops.
82
+ * - **Optional FFT**: `{ fft: { enabled: true } }` adds an `AnalyserNode` and emits `'fft'` events; omit to skip.
83
+ * - **Controls**: {@link setVolume}, {@link mute}, {@link unmute}, {@link stop}, {@link dispose}.
84
+ * - **Events**: listen for `'play'`, `'stop'`, `'fft'`, `'error'`.
85
+ */
86
+ export declare class EVIWebAudioPlayer extends EventTarget {
87
+ #private;
88
+ private readonly opts;
89
+ /** `true` while any clip is currently audible. */
90
+ get playing(): boolean;
91
+ /** `true` if gain is set to 0 via {@link mute}. */
92
+ get muted(): boolean;
93
+ /** Current output gain (0‑1). */
94
+ get volume(): number;
95
+ /** Most recent FFT frame (empty when analyser disabled). */
96
+ get fft(): number[];
97
+ constructor(opts?: EVIWebAudioPlayerOptions);
98
+ /**
99
+ * Generate an empty FFT frame array.
100
+ * Useful as an initial or placeholder FFT dataset before any real analysis.
101
+ *
102
+ * @returns A number[] filled with zeros, length equal to the Bark band count (24).
103
+ */
104
+ static emptyFft(): number[];
105
+ /**
106
+ * * Subscribes to a player event and returns `this` for chaining.
107
+ *
108
+ * @param type One of `'play'`, `'stop'`, `'fft'`, or `'error'`.
109
+ * @param fn Handler invoked with the event’s typed `detail` payload.
110
+ * @param opts Optional `AddEventListenerOptions` (e.g. `{ once: true }`).
111
+ *
112
+ * @example
113
+ * ```ts
114
+ * const player = new EVIWebAudioPlayer();
115
+ * player
116
+ * .on('play', e => console.log('play', e.detail.id))
117
+ * .on('stop', e => console.log('stop', e.detail.id))
118
+ * .on('fft', e => console.log('stop', e.detail.fft))
119
+ * .on('error', e => console.error('error', e.detail.message));
120
+ * ```
121
+ */
122
+ on<K extends keyof PlayerEventMap>(type: K, fn: (e: PlayerEventMap[K]) => void, opts?: AddEventListenerOptions): this;
123
+ /**
124
+ * Set up and start the player’s Web-Audio pipeline.
125
+ *
126
+ * - Creates a **suspended** `AudioContext`, loads the worklet processor, wires `AudioWorkletNode → (AnalyserNode?) → GainNode → destination`, then calls `resume()`.
127
+ * - Must be awaited inside a user-gesture (click/tap/key); later calls are no-ops.
128
+ * - If `fft.enabled` is `false` (or `fft` is omitted), no `AnalyserNode` or polling timer is created.
129
+ *
130
+ * **Safari quirk:** Safari locks an `AudioContext` to the device’s current sample rate at creation.
131
+ * If you open a Bluetooth headset mic afterward, the OS may switch to the 16 kHz HFP profile and down-sample playback, which sounds “telephone-y.”
132
+ * To avoid this, call `getUserMedia()` (or otherwise open audio input) **before** `init()`.
133
+ *
134
+ * @throws {Error} If the browser lacks `AudioWorklet` support, or if `AudioContext.resume()` is rejected (autoplay policy, device error).
135
+ */
136
+ init(): Promise<void>;
137
+ /**
138
+ * Queue one {@link AudioOutput} message for playback.
139
+ *
140
+ * Decodes the base-64 PCM data, sends it to the `AudioWorkletNode` for glitch-free, in-order playback, and emits `'play'` for the first chunk of a new stream.
141
+ *
142
+ * @param message The `AudioOutput` message received from EVI’s WebSocket.
143
+ *
144
+ * @see {@link https://dev.hume.ai/reference/empathic-voice-interface-evi/chat/chat#receive.Audio-Output.type API Reference}
145
+ */
146
+ enqueue(message: AudioOutput): Promise<void>;
147
+ /**
148
+ * Flush the worklet queue and output silence.
149
+ */
150
+ stop(): void;
151
+ /**
152
+ * Set the master gain ({@link volume}) to a value between `0` (_silent_) and `1` (_full volume_).
153
+ *
154
+ * - Clamps out-of-range values.
155
+ * - If called before {@link init}, stores volume for when `AudioContext` is created.
156
+ * - If currently {@link muted}, updates stored volume but keeps output silent until {@link unmute}.
157
+ *
158
+ * @param volume Desired gain; clamped to [0, 1].
159
+ */
160
+ setVolume(volume: number): void;
161
+ /**
162
+ * Mute output instantly by setting the gain to 0. Retains the last volume internally for later restore.
163
+ */
164
+ mute(): void;
165
+ /**
166
+ * Restore output gain to the last set volume (via setVolume).
167
+ */
168
+ unmute(): void;
169
+ /**
170
+ * Tear down all Web-Audio resources (worklet, analyser, gain, context) and reset state so {@link init} can be called again.
171
+ */
172
+ dispose(): void;
173
+ }
174
+ export {};
@@ -0,0 +1,314 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
12
+ if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
13
+ if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
14
+ return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
15
+ };
16
+ var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
17
+ if (kind === "m") throw new TypeError("Private method is not writable");
18
+ if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
19
+ if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
20
+ return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
21
+ };
22
+ var _EVIWebAudioPlayer_instances, _a, _EVIWebAudioPlayer_DEFAULT_WORKLET_URL, _EVIWebAudioPlayer_DEFAULT_FFT_SIZE, _EVIWebAudioPlayer_DEFAULT_FFT_INTERVAL, _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES, _EVIWebAudioPlayer_BYTE_MAX, _EVIWebAudioPlayer_ctx, _EVIWebAudioPlayer_workletNode, _EVIWebAudioPlayer_analyserNode, _EVIWebAudioPlayer_gainNode, _EVIWebAudioPlayer_initialized, _EVIWebAudioPlayer_playing, _EVIWebAudioPlayer_muted, _EVIWebAudioPlayer_volume, _EVIWebAudioPlayer_fft, _EVIWebAudioPlayer_fftTimer, _EVIWebAudioPlayer_fftOptions, _EVIWebAudioPlayer_linearHzToBark, _EVIWebAudioPlayer_startAnalyserPollingIfEnabled, _EVIWebAudioPlayer_emitError;
23
+ Object.defineProperty(exports, "__esModule", { value: true });
24
+ exports.EVIWebAudioPlayer = void 0;
25
+ const convertBase64ToBlob_1 = require("./convertBase64ToBlob");
26
+ /**
27
+ * A sequential, glitch-free Web-Audio player for **EVI** audio output.
28
+ *
29
+ * - **Decoding & playback**: base-64 PCM chunks feed an `AudioWorkletNode` and play in order, without gaps.
30
+ * - **One-time init**: await {@link init} in a user-gesture to build audio graph and unlock the browser’s
31
+ * `AudioContext`; later calls are no-ops.
32
+ * - **Optional FFT**: `{ fft: { enabled: true } }` adds an `AnalyserNode` and emits `'fft'` events; omit to skip.
33
+ * - **Controls**: {@link setVolume}, {@link mute}, {@link unmute}, {@link stop}, {@link dispose}.
34
+ * - **Events**: listen for `'play'`, `'stop'`, `'fft'`, `'error'`.
35
+ */
36
+ class EVIWebAudioPlayer extends EventTarget {
37
+ /** `true` while any clip is currently audible. */
38
+ get playing() {
39
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_playing, "f");
40
+ }
41
+ /** `true` if gain is set to 0 via {@link mute}. */
42
+ get muted() {
43
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_muted, "f");
44
+ }
45
+ /** Current output gain (0‑1). */
46
+ get volume() {
47
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_volume, "f");
48
+ }
49
+ /** Most recent FFT frame (empty when analyser disabled). */
50
+ get fft() {
51
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_fft, "f");
52
+ }
53
+ constructor(opts = {}) {
54
+ var _b, _c;
55
+ super();
56
+ _EVIWebAudioPlayer_instances.add(this);
57
+ this.opts = opts;
58
+ _EVIWebAudioPlayer_ctx.set(this, null);
59
+ _EVIWebAudioPlayer_workletNode.set(this, null);
60
+ _EVIWebAudioPlayer_analyserNode.set(this, null);
61
+ _EVIWebAudioPlayer_gainNode.set(this, null);
62
+ _EVIWebAudioPlayer_initialized.set(this, false);
63
+ _EVIWebAudioPlayer_playing.set(this, false);
64
+ _EVIWebAudioPlayer_muted.set(this, false);
65
+ _EVIWebAudioPlayer_volume.set(this, void 0);
66
+ _EVIWebAudioPlayer_fft.set(this, _a.emptyFft());
67
+ _EVIWebAudioPlayer_fftTimer.set(this, null);
68
+ _EVIWebAudioPlayer_fftOptions.set(this, null);
69
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_volume, (_b = opts.volume) !== null && _b !== void 0 ? _b : 1.0, "f");
70
+ // Resolve FFT options if enabled
71
+ if ((_c = opts.fft) === null || _c === void 0 ? void 0 : _c.enabled) {
72
+ const { size, interval, transform } = opts.fft;
73
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fftOptions, {
74
+ size: size !== null && size !== void 0 ? size : __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_DEFAULT_FFT_SIZE),
75
+ interval: interval !== null && interval !== void 0 ? interval : __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_DEFAULT_FFT_INTERVAL),
76
+ transform: transform !== null && transform !== void 0 ? transform : __classPrivateFieldGet(_a, _a, "m", _EVIWebAudioPlayer_linearHzToBark),
77
+ }, "f");
78
+ }
79
+ }
80
+ /**
81
+ * Generate an empty FFT frame array.
82
+ * Useful as an initial or placeholder FFT dataset before any real analysis.
83
+ *
84
+ * @returns A number[] filled with zeros, length equal to the Bark band count (24).
85
+ */
86
+ static emptyFft() {
87
+ return Array(__classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES).length).fill(0);
88
+ }
89
+ /**
90
+ * * Subscribes to a player event and returns `this` for chaining.
91
+ *
92
+ * @param type One of `'play'`, `'stop'`, `'fft'`, or `'error'`.
93
+ * @param fn Handler invoked with the event’s typed `detail` payload.
94
+ * @param opts Optional `AddEventListenerOptions` (e.g. `{ once: true }`).
95
+ *
96
+ * @example
97
+ * ```ts
98
+ * const player = new EVIWebAudioPlayer();
99
+ * player
100
+ * .on('play', e => console.log('play', e.detail.id))
101
+ * .on('stop', e => console.log('stop', e.detail.id))
102
+ * .on('fft', e => console.log('stop', e.detail.fft))
103
+ * .on('error', e => console.error('error', e.detail.message));
104
+ * ```
105
+ */
106
+ on(type, fn, opts) {
107
+ super.addEventListener(type, fn, opts);
108
+ return this;
109
+ }
110
+ /**
111
+ * Set up and start the player’s Web-Audio pipeline.
112
+ *
113
+ * - Creates a **suspended** `AudioContext`, loads the worklet processor, wires `AudioWorkletNode → (AnalyserNode?) → GainNode → destination`, then calls `resume()`.
114
+ * - Must be awaited inside a user-gesture (click/tap/key); later calls are no-ops.
115
+ * - If `fft.enabled` is `false` (or `fft` is omitted), no `AnalyserNode` or polling timer is created.
116
+ *
117
+ * **Safari quirk:** Safari locks an `AudioContext` to the device’s current sample rate at creation.
118
+ * If you open a Bluetooth headset mic afterward, the OS may switch to the 16 kHz HFP profile and down-sample playback, which sounds “telephone-y.”
119
+ * To avoid this, call `getUserMedia()` (or otherwise open audio input) **before** `init()`.
120
+ *
121
+ * @throws {Error} If the browser lacks `AudioWorklet` support, or if `AudioContext.resume()` is rejected (autoplay policy, device error).
122
+ */
123
+ init() {
124
+ return __awaiter(this, void 0, void 0, function* () {
125
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_initialized, "f"))
126
+ return;
127
+ // Create the AudioContext
128
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_ctx, new AudioContext(), "f");
129
+ // Fail fast if AudioWorklet isn’t supported
130
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").audioWorklet) {
131
+ const msg = "AudioWorklet is not supported in this browser";
132
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, msg);
133
+ throw new Error(msg);
134
+ }
135
+ try {
136
+ // Build GainNode
137
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_gainNode, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").createGain(), "f");
138
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.value = __classPrivateFieldGet(this, _EVIWebAudioPlayer_volume, "f");
139
+ // Build AnalyserNode (optional)
140
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f")) {
141
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_analyserNode, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").createAnalyser(), "f");
142
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f").fftSize = __classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f").size;
143
+ }
144
+ // Loads the AudioWorklet processor module.
145
+ yield __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").audioWorklet.addModule(__classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_DEFAULT_WORKLET_URL));
146
+ // Build AudioWorkletNode
147
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_workletNode, new AudioWorkletNode(__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f"), "audio-processor"), "f");
148
+ // When the worklet posts { type: "ended" }, mark playback stopped and emit a `'stop'` event.
149
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f").port.onmessage = (e) => {
150
+ if (e.data.type === "ended") {
151
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, false, "f");
152
+ this.dispatchEvent(new CustomEvent("stop", { detail: { id: "stream" } }));
153
+ }
154
+ };
155
+ // Audio graph nodes
156
+ const workletNode = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f"); // AudioWorkletNode (PCM processor)
157
+ const analyserNode = __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f"); // Optional AnalyserNode (FFT)
158
+ const gainNode = __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f"); // GainNode (volume control)
159
+ const destination = __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").destination; // AudioDestinationNode (speakers)
160
+ // Analyser node is filtered out of audio graph if null (FFT disabled)
161
+ const audioGraph = [workletNode, analyserNode, gainNode, destination].filter(Boolean);
162
+ // Wire nodes: AudioWorkletNode → (AnalyserNode?) → GainNode → AudioDestinationNode
163
+ audioGraph.reduce((prev, next) => (prev.connect(next), next));
164
+ // If an analyser is configured, begin polling it at the resolved interval and dispatching `'fft'` events for each frame.
165
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_startAnalyserPollingIfEnabled).call(this);
166
+ // Resume the AudioContext now that the audio graph is fully wired.
167
+ // Browsers allow `resume()` only inside a user-gesture callback.
168
+ // Any rejection (autoplay policy, hardware issue, etc.) is caught by the outer catch-block below, which emits an 'error' event and re-throws.
169
+ yield __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").resume();
170
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_initialized, true, "f");
171
+ }
172
+ catch (err) {
173
+ const suffix = err instanceof Error ? `: ${err.message}` : String(err);
174
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, `Failed to initialize audio player${suffix}`);
175
+ throw err;
176
+ }
177
+ });
178
+ }
179
+ /**
180
+ * Queue one {@link AudioOutput} message for playback.
181
+ *
182
+ * Decodes the base-64 PCM data, sends it to the `AudioWorkletNode` for glitch-free, in-order playback, and emits `'play'` for the first chunk of a new stream.
183
+ *
184
+ * @param message The `AudioOutput` message received from EVI’s WebSocket.
185
+ *
186
+ * @see {@link https://dev.hume.ai/reference/empathic-voice-interface-evi/chat/chat#receive.Audio-Output.type API Reference}
187
+ */
188
+ enqueue(message) {
189
+ return __awaiter(this, void 0, void 0, function* () {
190
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_initialized, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f")) {
191
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, "Audio player is not initialized");
192
+ return;
193
+ }
194
+ try {
195
+ const { data, id } = message;
196
+ const blob = (0, convertBase64ToBlob_1.convertBase64ToBlob)(data);
197
+ const buffer = yield blob.arrayBuffer();
198
+ const audio = yield __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").decodeAudioData(buffer);
199
+ const pcmData = audio.getChannelData(0);
200
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f").port.postMessage({ type: "audio", data: pcmData });
201
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, true, "f");
202
+ this.dispatchEvent(new CustomEvent("play", { detail: { id } }));
203
+ }
204
+ catch (err) {
205
+ const msg = err instanceof Error ? err.message : "Unknown error";
206
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, `Failed to queue clip: ${msg}`);
207
+ }
208
+ });
209
+ }
210
+ /**
211
+ * Flush the worklet queue and output silence.
212
+ */
213
+ stop() {
214
+ var _b;
215
+ // Clear buffered audio from the worklet queue
216
+ (_b = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _b === void 0 ? void 0 : _b.port.postMessage({ type: "fadeAndClear" });
217
+ // Restart analyser polling so fft events continue after stopping or clearing the queue
218
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_startAnalyserPollingIfEnabled).call(this);
219
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, false, "f");
220
+ this.dispatchEvent(new CustomEvent("stop", { detail: { id: "manual" } }));
221
+ }
222
+ /**
223
+ * Set the master gain ({@link volume}) to a value between `0` (_silent_) and `1` (_full volume_).
224
+ *
225
+ * - Clamps out-of-range values.
226
+ * - If called before {@link init}, stores volume for when `AudioContext` is created.
227
+ * - If currently {@link muted}, updates stored volume but keeps output silent until {@link unmute}.
228
+ *
229
+ * @param volume Desired gain; clamped to [0, 1].
230
+ */
231
+ setVolume(volume) {
232
+ const clampedVolume = Math.max(0, Math.min(volume, 1));
233
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_volume, clampedVolume, "f");
234
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f") && __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f") && !__classPrivateFieldGet(this, _EVIWebAudioPlayer_muted, "f")) {
235
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.setValueAtTime(clampedVolume, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").currentTime);
236
+ }
237
+ }
238
+ /**
239
+ * Mute output instantly by setting the gain to 0. Retains the last volume internally for later restore.
240
+ */
241
+ mute() {
242
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f"))
243
+ return;
244
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.setValueAtTime(0, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").currentTime);
245
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_muted, true, "f");
246
+ }
247
+ /**
248
+ * Restore output gain to the last set volume (via setVolume).
249
+ */
250
+ unmute() {
251
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f"))
252
+ return;
253
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.setValueAtTime(__classPrivateFieldGet(this, _EVIWebAudioPlayer_volume, "f"), __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").currentTime);
254
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_muted, false, "f");
255
+ }
256
+ /**
257
+ * Tear down all Web-Audio resources (worklet, analyser, gain, context) and reset state so {@link init} can be called again.
258
+ */
259
+ dispose() {
260
+ var _b, _c, _d, _e, _f, _g, _h;
261
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f") != null) {
262
+ clearInterval(__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f"));
263
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fftTimer, null, "f");
264
+ }
265
+ (_b = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _b === void 0 ? void 0 : _b.port.postMessage({ type: "fadeAndClear" });
266
+ (_c = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _c === void 0 ? void 0 : _c.port.postMessage({ type: "end" });
267
+ (_d = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _d === void 0 ? void 0 : _d.port.close();
268
+ (_e = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _e === void 0 ? void 0 : _e.disconnect();
269
+ (_f = __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f")) === null || _f === void 0 ? void 0 : _f.disconnect();
270
+ (_g = __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f")) === null || _g === void 0 ? void 0 : _g.disconnect();
271
+ (_h = __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f")) === null || _h === void 0 ? void 0 : _h.close().catch(() => void 0);
272
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_initialized, false, "f");
273
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, false, "f");
274
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fft, _a.emptyFft(), "f");
275
+ }
276
+ }
277
+ exports.EVIWebAudioPlayer = EVIWebAudioPlayer;
278
+ _a = EVIWebAudioPlayer, _EVIWebAudioPlayer_ctx = new WeakMap(), _EVIWebAudioPlayer_workletNode = new WeakMap(), _EVIWebAudioPlayer_analyserNode = new WeakMap(), _EVIWebAudioPlayer_gainNode = new WeakMap(), _EVIWebAudioPlayer_initialized = new WeakMap(), _EVIWebAudioPlayer_playing = new WeakMap(), _EVIWebAudioPlayer_muted = new WeakMap(), _EVIWebAudioPlayer_volume = new WeakMap(), _EVIWebAudioPlayer_fft = new WeakMap(), _EVIWebAudioPlayer_fftTimer = new WeakMap(), _EVIWebAudioPlayer_fftOptions = new WeakMap(), _EVIWebAudioPlayer_instances = new WeakSet(), _EVIWebAudioPlayer_linearHzToBark = function _EVIWebAudioPlayer_linearHzToBark(linearData, sampleRate) {
279
+ const maxFrequency = sampleRate / 2;
280
+ const frequencyResolution = maxFrequency / linearData.length;
281
+ return __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES).map((barkFreq) => {
282
+ var _b;
283
+ const linearDataIndex = Math.round(barkFreq / frequencyResolution);
284
+ const magnitude = (_b = linearData[linearDataIndex]) !== null && _b !== void 0 ? _b : 0;
285
+ return (magnitude / __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_BYTE_MAX)) * 2;
286
+ });
287
+ }, _EVIWebAudioPlayer_startAnalyserPollingIfEnabled = function _EVIWebAudioPlayer_startAnalyserPollingIfEnabled() {
288
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f"))
289
+ return;
290
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f"))
291
+ clearInterval(__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f"));
292
+ const { interval, transform } = __classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f");
293
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fftTimer, window.setInterval(() => {
294
+ const bins = new Uint8Array(__classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f").frequencyBinCount);
295
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f").getByteFrequencyData(bins);
296
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fft, transform(bins, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").sampleRate), "f");
297
+ this.dispatchEvent(new CustomEvent("fft", { detail: { fft: __classPrivateFieldGet(this, _EVIWebAudioPlayer_fft, "f") } }));
298
+ }, interval), "f");
299
+ }, _EVIWebAudioPlayer_emitError = function _EVIWebAudioPlayer_emitError(message) {
300
+ this.dispatchEvent(new CustomEvent("error", { detail: { message } }));
301
+ };
302
+ /** Default URL of the `audio-worklet.js` processor module, fetched from Hume AI’s CDN. */
303
+ _EVIWebAudioPlayer_DEFAULT_WORKLET_URL = { value: "https://storage.googleapis.com/evi-react-sdk-assets/audio-worklet-20250506.js" };
304
+ /** Default FFT size (power-of-two). */
305
+ _EVIWebAudioPlayer_DEFAULT_FFT_SIZE = { value: 2048 };
306
+ /** Default analyser poll interval (16 ms). */
307
+ _EVIWebAudioPlayer_DEFAULT_FFT_INTERVAL = { value: 16 };
308
+ /** Bark‑scale center frequencies (hz) used by the default transform. https://en.wikipedia.org/wiki/Bark_scale */
309
+ _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES = { value: [
310
+ 50, 150, 250, 350, 450, 570, 700, 840, 1000, 1170, 1370, 1600, 1850, 2150, 2500, 2900, 3400, 4000, 4800, 5800,
311
+ 7000, 8500, 10500, 13500,
312
+ ] };
313
+ /** Max byte magnitude (255) returned by `AnalyserNode.getByteFrequencyData`. */
314
+ _EVIWebAudioPlayer_BYTE_MAX = { value: 255 };
@@ -2,8 +2,6 @@
2
2
  * Fetches a new access token from the Hume API using the provided API key and Secret key.
3
3
  *
4
4
  * @param args - The arguments for the request.
5
- * @returns Promise that resolves to the new access token or null.
6
- * @throws If the base64 encoding fails.
7
5
  * @example
8
6
  * ```typescript
9
7
  * async function getToken() {
@@ -20,4 +18,4 @@ export declare const fetchAccessToken: ({ apiKey, secretKey, host, }: {
20
18
  apiKey: string;
21
19
  secretKey: string;
22
20
  host?: string;
23
- }) => Promise<string | null>;
21
+ }) => Promise<string>;
@@ -16,8 +16,6 @@ const zod_1 = require("zod");
16
16
  * Fetches a new access token from the Hume API using the provided API key and Secret key.
17
17
  *
18
18
  * @param args - The arguments for the request.
19
- * @returns Promise that resolves to the new access token or null.
20
- * @throws If the base64 encoding fails.
21
19
  * @example
22
20
  * ```typescript
23
21
  * async function getToken() {
@@ -33,7 +31,7 @@ const zod_1 = require("zod");
33
31
  const fetchAccessToken = (_a) => __awaiter(void 0, [_a], void 0, function* ({ apiKey, secretKey, host = "api.hume.ai", }) {
34
32
  const authString = `${apiKey}:${secretKey}`;
35
33
  const encoded = (0, base64Encode_1.base64Encode)(authString);
36
- const response = yield fetch(`https://${host}/oauth2-cc/token`, {
34
+ const res = yield fetch(`https://${host}/oauth2-cc/token`, {
37
35
  method: "POST",
38
36
  headers: {
39
37
  "Content-Type": "application/x-www-form-urlencoded",
@@ -43,32 +41,14 @@ const fetchAccessToken = (_a) => __awaiter(void 0, [_a], void 0, function* ({ ap
43
41
  grant_type: "client_credentials",
44
42
  }).toString(),
45
43
  cache: "no-cache",
44
+ });
45
+ return zod_1.z
46
+ .object({
47
+ access_token: zod_1.z.string(),
46
48
  })
47
- .then((res) => {
48
- // if reading response as json fails, return empty object
49
- // this can happen when request returns XML due to server error
50
- return res
51
- .json()
52
- .then((d) => d)
53
- .catch(() => ({}));
54
- })
55
- .then((data) => {
56
- // extract access_token value from received object
57
- return zod_1.z
58
- .object({
59
- access_token: zod_1.z.string(),
60
- })
61
- .transform((data) => {
62
- return data.access_token;
63
- })
64
- .safeParse(data);
49
+ .transform((data) => {
50
+ return data.access_token;
65
51
  })
66
- .catch(() => ({
67
- success: false,
68
- }));
69
- if (!response.success) {
70
- return null;
71
- }
72
- return response.data;
52
+ .parse(yield res.json());
73
53
  });
74
54
  exports.fetchAccessToken = fetchAccessToken;
@@ -8,3 +8,4 @@ export { fetchAccessToken } from "./fetchAccessToken";
8
8
  export { getAudioStream } from "./getAudioStream";
9
9
  export { MimeType, getBrowserSupportedMimeType } from "./getBrowserSupportedMimeType";
10
10
  export { HumeClient } from "./HumeClient";
11
+ export { EVIWebAudioPlayer, EVIWebAudioPlayerFFTOptions, EVIWebAudioPlayerOptions } from "./EVIWebAudioPlayer";
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.HumeClient = exports.getBrowserSupportedMimeType = exports.MimeType = exports.getAudioStream = exports.fetchAccessToken = exports.checkForAudioTracks = exports.ensureSingleValidAudioTrack = exports.convertBlobToBase64 = exports.convertBase64ToBlob = exports.base64Encode = exports.base64Decode = void 0;
3
+ exports.EVIWebAudioPlayer = exports.HumeClient = exports.getBrowserSupportedMimeType = exports.MimeType = exports.getAudioStream = exports.fetchAccessToken = exports.checkForAudioTracks = exports.ensureSingleValidAudioTrack = exports.convertBlobToBase64 = exports.convertBase64ToBlob = exports.base64Encode = exports.base64Decode = void 0;
4
4
  var base64Decode_1 = require("./base64Decode");
5
5
  Object.defineProperty(exports, "base64Decode", { enumerable: true, get: function () { return base64Decode_1.base64Decode; } });
6
6
  var base64Encode_1 = require("./base64Encode");
@@ -22,3 +22,5 @@ Object.defineProperty(exports, "MimeType", { enumerable: true, get: function ()
22
22
  Object.defineProperty(exports, "getBrowserSupportedMimeType", { enumerable: true, get: function () { return getBrowserSupportedMimeType_1.getBrowserSupportedMimeType; } });
23
23
  var HumeClient_1 = require("./HumeClient");
24
24
  Object.defineProperty(exports, "HumeClient", { enumerable: true, get: function () { return HumeClient_1.HumeClient; } });
25
+ var EVIWebAudioPlayer_1 = require("./EVIWebAudioPlayer");
26
+ Object.defineProperty(exports, "EVIWebAudioPlayer", { enumerable: true, get: function () { return EVIWebAudioPlayer_1.EVIWebAudioPlayer; } });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hume",
3
- "version": "0.10.4-beta.5",
3
+ "version": "0.11.0",
4
4
  "private": false,
5
5
  "repository": "https://github.com/HumeAI/hume-typescript-sdk",
6
6
  "main": "./index.js",
@@ -0,0 +1,174 @@
1
+ import type { AudioOutput } from "api/resources/empathicVoice";
2
+ /**
3
+ * Options for configuring an {@link EVIWebAudioPlayer}.
4
+ *
5
+ * @default `{}` for sensible defaults.
6
+ */
7
+ export interface EVIWebAudioPlayerOptions {
8
+ /**
9
+ * Initial master gain, via a `GainNode`, from `0` (_silent_) to `1` (_full volume_).
10
+ * Values outside this range are clamped.
11
+ *
12
+ * @default 1
13
+ */
14
+ volume?: number;
15
+ /**
16
+ * Real-time FFT (frequency-domain) settings **only** for visualization.
17
+ *
18
+ * - **Disable**: omit or `{ enabled: false }` – no `AnalyserNode` is created.
19
+ * - **Defaults**: `{ enabled: true }` → 2048-point FFT at 16 ms (~60 Hz), mapped to 24 Bark bands.
20
+ * - **Custom**: supply {@link EVIWebAudioPlayerFFTOptions} to override `size`, `interval`, or `transform`.
21
+ */
22
+ fft?: EVIWebAudioPlayerFFTOptions;
23
+ }
24
+ /**
25
+ * FFT (frequency-domain) options for visualization.
26
+ *
27
+ * Pass `{ enabled: true }` for defaults, or omit/disable entirely for zero overhead.
28
+ */
29
+ export type EVIWebAudioPlayerFFTOptions = FftEnabled | FftDisabled;
30
+ type FftDisabled = {
31
+ /**
32
+ * Turn visualization data OFF—skip analyser creation entirely (zero extra CPU).
33
+ */
34
+ enabled: false;
35
+ };
36
+ type FftEnabled = {
37
+ /**
38
+ * Turn visualization data ON—create an `AnalyserNode`, poll it, and emit `'fft'` events.
39
+ */
40
+ enabled: true;
41
+ /**
42
+ * FFT size (power-of-two, 32 – 32768).
43
+ * Defaults to 2048 → 1024 bins (~ 23 Hz at 48 kHz).
44
+ * @default 2048
45
+ */
46
+ size?: number;
47
+ /**
48
+ * Polling interval, in **milliseconds**.
49
+ * Default 16 ms (~ 60 Hz) to sync with `requestAnimationFrame()`.
50
+ * @default 16
51
+ */
52
+ interval?: number;
53
+ /**
54
+ * Custom post-processing for raw magnitude data. Omit for built-in 24-band Bark mapping.
55
+ *
56
+ * @param bins PCM byte magnitudes (0 – 255) from `AnalyserNode`.
57
+ * @param sampleRate `AudioContext` sample rate in Hz.
58
+ * @returns Payload emitted with each `'fft'` event.
59
+ */
60
+ transform?: (bins: Uint8Array, sampleRate: number) => number[];
61
+ };
62
+ type PlayerEventMap = {
63
+ play: CustomEvent<{
64
+ id: string;
65
+ }>;
66
+ stop: CustomEvent<{
67
+ id: string;
68
+ }>;
69
+ fft: CustomEvent<{
70
+ fft: number[];
71
+ }>;
72
+ error: CustomEvent<{
73
+ message: string;
74
+ }>;
75
+ };
76
+ /**
77
+ * A sequential, glitch-free Web-Audio player for **EVI** audio output.
78
+ *
79
+ * - **Decoding & playback**: base-64 PCM chunks feed an `AudioWorkletNode` and play in order, without gaps.
80
+ * - **One-time init**: await {@link init} in a user-gesture to build audio graph and unlock the browser’s
81
+ * `AudioContext`; later calls are no-ops.
82
+ * - **Optional FFT**: `{ fft: { enabled: true } }` adds an `AnalyserNode` and emits `'fft'` events; omit to skip.
83
+ * - **Controls**: {@link setVolume}, {@link mute}, {@link unmute}, {@link stop}, {@link dispose}.
84
+ * - **Events**: listen for `'play'`, `'stop'`, `'fft'`, `'error'`.
85
+ */
86
+ export declare class EVIWebAudioPlayer extends EventTarget {
87
+ #private;
88
+ private readonly opts;
89
+ /** `true` while any clip is currently audible. */
90
+ get playing(): boolean;
91
+ /** `true` if gain is set to 0 via {@link mute}. */
92
+ get muted(): boolean;
93
+ /** Current output gain (0‑1). */
94
+ get volume(): number;
95
+ /** Most recent FFT frame (empty when analyser disabled). */
96
+ get fft(): number[];
97
+ constructor(opts?: EVIWebAudioPlayerOptions);
98
+ /**
99
+ * Generate an empty FFT frame array.
100
+ * Useful as an initial or placeholder FFT dataset before any real analysis.
101
+ *
102
+ * @returns A number[] filled with zeros, length equal to the Bark band count (24).
103
+ */
104
+ static emptyFft(): number[];
105
+ /**
106
+ * * Subscribes to a player event and returns `this` for chaining.
107
+ *
108
+ * @param type One of `'play'`, `'stop'`, `'fft'`, or `'error'`.
109
+ * @param fn Handler invoked with the event’s typed `detail` payload.
110
+ * @param opts Optional `AddEventListenerOptions` (e.g. `{ once: true }`).
111
+ *
112
+ * @example
113
+ * ```ts
114
+ * const player = new EVIWebAudioPlayer();
115
+ * player
116
+ * .on('play', e => console.log('play', e.detail.id))
117
+ * .on('stop', e => console.log('stop', e.detail.id))
118
+ * .on('fft', e => console.log('stop', e.detail.fft))
119
+ * .on('error', e => console.error('error', e.detail.message));
120
+ * ```
121
+ */
122
+ on<K extends keyof PlayerEventMap>(type: K, fn: (e: PlayerEventMap[K]) => void, opts?: AddEventListenerOptions): this;
123
+ /**
124
+ * Set up and start the player’s Web-Audio pipeline.
125
+ *
126
+ * - Creates a **suspended** `AudioContext`, loads the worklet processor, wires `AudioWorkletNode → (AnalyserNode?) → GainNode → destination`, then calls `resume()`.
127
+ * - Must be awaited inside a user-gesture (click/tap/key); later calls are no-ops.
128
+ * - If `fft.enabled` is `false` (or `fft` is omitted), no `AnalyserNode` or polling timer is created.
129
+ *
130
+ * **Safari quirk:** Safari locks an `AudioContext` to the device’s current sample rate at creation.
131
+ * If you open a Bluetooth headset mic afterward, the OS may switch to the 16 kHz HFP profile and down-sample playback, which sounds “telephone-y.”
132
+ * To avoid this, call `getUserMedia()` (or otherwise open audio input) **before** `init()`.
133
+ *
134
+ * @throws {Error} If the browser lacks `AudioWorklet` support, or if `AudioContext.resume()` is rejected (autoplay policy, device error).
135
+ */
136
+ init(): Promise<void>;
137
+ /**
138
+ * Queue one {@link AudioOutput} message for playback.
139
+ *
140
+ * Decodes the base-64 PCM data, sends it to the `AudioWorkletNode` for glitch-free, in-order playback, and emits `'play'` for the first chunk of a new stream.
141
+ *
142
+ * @param message The `AudioOutput` message received from EVI’s WebSocket.
143
+ *
144
+ * @see {@link https://dev.hume.ai/reference/empathic-voice-interface-evi/chat/chat#receive.Audio-Output.type API Reference}
145
+ */
146
+ enqueue(message: AudioOutput): Promise<void>;
147
+ /**
148
+ * Flush the worklet queue and output silence.
149
+ */
150
+ stop(): void;
151
+ /**
152
+ * Set the master gain ({@link volume}) to a value between `0` (_silent_) and `1` (_full volume_).
153
+ *
154
+ * - Clamps out-of-range values.
155
+ * - If called before {@link init}, stores volume for when `AudioContext` is created.
156
+ * - If currently {@link muted}, updates stored volume but keeps output silent until {@link unmute}.
157
+ *
158
+ * @param volume Desired gain; clamped to [0, 1].
159
+ */
160
+ setVolume(volume: number): void;
161
+ /**
162
+ * Mute output instantly by setting the gain to 0. Retains the last volume internally for later restore.
163
+ */
164
+ mute(): void;
165
+ /**
166
+ * Restore output gain to the last set volume (via setVolume).
167
+ */
168
+ unmute(): void;
169
+ /**
170
+ * Tear down all Web-Audio resources (worklet, analyser, gain, context) and reset state so {@link init} can be called again.
171
+ */
172
+ dispose(): void;
173
+ }
174
+ export {};
@@ -0,0 +1,314 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
12
+ if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
13
+ if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
14
+ return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
15
+ };
16
+ var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
17
+ if (kind === "m") throw new TypeError("Private method is not writable");
18
+ if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
19
+ if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
20
+ return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
21
+ };
22
+ var _EVIWebAudioPlayer_instances, _a, _EVIWebAudioPlayer_DEFAULT_WORKLET_URL, _EVIWebAudioPlayer_DEFAULT_FFT_SIZE, _EVIWebAudioPlayer_DEFAULT_FFT_INTERVAL, _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES, _EVIWebAudioPlayer_BYTE_MAX, _EVIWebAudioPlayer_ctx, _EVIWebAudioPlayer_workletNode, _EVIWebAudioPlayer_analyserNode, _EVIWebAudioPlayer_gainNode, _EVIWebAudioPlayer_initialized, _EVIWebAudioPlayer_playing, _EVIWebAudioPlayer_muted, _EVIWebAudioPlayer_volume, _EVIWebAudioPlayer_fft, _EVIWebAudioPlayer_fftTimer, _EVIWebAudioPlayer_fftOptions, _EVIWebAudioPlayer_linearHzToBark, _EVIWebAudioPlayer_startAnalyserPollingIfEnabled, _EVIWebAudioPlayer_emitError;
23
+ Object.defineProperty(exports, "__esModule", { value: true });
24
+ exports.EVIWebAudioPlayer = void 0;
25
+ const convertBase64ToBlob_1 = require("./convertBase64ToBlob");
26
+ /**
27
+ * A sequential, glitch-free Web-Audio player for **EVI** audio output.
28
+ *
29
+ * - **Decoding & playback**: base-64 PCM chunks feed an `AudioWorkletNode` and play in order, without gaps.
30
+ * - **One-time init**: await {@link init} in a user-gesture to build audio graph and unlock the browser’s
31
+ * `AudioContext`; later calls are no-ops.
32
+ * - **Optional FFT**: `{ fft: { enabled: true } }` adds an `AnalyserNode` and emits `'fft'` events; omit to skip.
33
+ * - **Controls**: {@link setVolume}, {@link mute}, {@link unmute}, {@link stop}, {@link dispose}.
34
+ * - **Events**: listen for `'play'`, `'stop'`, `'fft'`, `'error'`.
35
+ */
36
+ class EVIWebAudioPlayer extends EventTarget {
37
+ /** `true` while any clip is currently audible. */
38
+ get playing() {
39
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_playing, "f");
40
+ }
41
+ /** `true` if gain is set to 0 via {@link mute}. */
42
+ get muted() {
43
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_muted, "f");
44
+ }
45
+ /** Current output gain (0‑1). */
46
+ get volume() {
47
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_volume, "f");
48
+ }
49
+ /** Most recent FFT frame (empty when analyser disabled). */
50
+ get fft() {
51
+ return __classPrivateFieldGet(this, _EVIWebAudioPlayer_fft, "f");
52
+ }
53
+ constructor(opts = {}) {
54
+ var _b, _c;
55
+ super();
56
+ _EVIWebAudioPlayer_instances.add(this);
57
+ this.opts = opts;
58
+ _EVIWebAudioPlayer_ctx.set(this, null);
59
+ _EVIWebAudioPlayer_workletNode.set(this, null);
60
+ _EVIWebAudioPlayer_analyserNode.set(this, null);
61
+ _EVIWebAudioPlayer_gainNode.set(this, null);
62
+ _EVIWebAudioPlayer_initialized.set(this, false);
63
+ _EVIWebAudioPlayer_playing.set(this, false);
64
+ _EVIWebAudioPlayer_muted.set(this, false);
65
+ _EVIWebAudioPlayer_volume.set(this, void 0);
66
+ _EVIWebAudioPlayer_fft.set(this, _a.emptyFft());
67
+ _EVIWebAudioPlayer_fftTimer.set(this, null);
68
+ _EVIWebAudioPlayer_fftOptions.set(this, null);
69
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_volume, (_b = opts.volume) !== null && _b !== void 0 ? _b : 1.0, "f");
70
+ // Resolve FFT options if enabled
71
+ if ((_c = opts.fft) === null || _c === void 0 ? void 0 : _c.enabled) {
72
+ const { size, interval, transform } = opts.fft;
73
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fftOptions, {
74
+ size: size !== null && size !== void 0 ? size : __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_DEFAULT_FFT_SIZE),
75
+ interval: interval !== null && interval !== void 0 ? interval : __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_DEFAULT_FFT_INTERVAL),
76
+ transform: transform !== null && transform !== void 0 ? transform : __classPrivateFieldGet(_a, _a, "m", _EVIWebAudioPlayer_linearHzToBark),
77
+ }, "f");
78
+ }
79
+ }
80
+ /**
81
+ * Generate an empty FFT frame array.
82
+ * Useful as an initial or placeholder FFT dataset before any real analysis.
83
+ *
84
+ * @returns A number[] filled with zeros, length equal to the Bark band count (24).
85
+ */
86
+ static emptyFft() {
87
+ return Array(__classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES).length).fill(0);
88
+ }
89
+ /**
90
+ * * Subscribes to a player event and returns `this` for chaining.
91
+ *
92
+ * @param type One of `'play'`, `'stop'`, `'fft'`, or `'error'`.
93
+ * @param fn Handler invoked with the event’s typed `detail` payload.
94
+ * @param opts Optional `AddEventListenerOptions` (e.g. `{ once: true }`).
95
+ *
96
+ * @example
97
+ * ```ts
98
+ * const player = new EVIWebAudioPlayer();
99
+ * player
100
+ * .on('play', e => console.log('play', e.detail.id))
101
+ * .on('stop', e => console.log('stop', e.detail.id))
102
+ * .on('fft', e => console.log('stop', e.detail.fft))
103
+ * .on('error', e => console.error('error', e.detail.message));
104
+ * ```
105
+ */
106
+ on(type, fn, opts) {
107
+ super.addEventListener(type, fn, opts);
108
+ return this;
109
+ }
110
+ /**
111
+ * Set up and start the player’s Web-Audio pipeline.
112
+ *
113
+ * - Creates a **suspended** `AudioContext`, loads the worklet processor, wires `AudioWorkletNode → (AnalyserNode?) → GainNode → destination`, then calls `resume()`.
114
+ * - Must be awaited inside a user-gesture (click/tap/key); later calls are no-ops.
115
+ * - If `fft.enabled` is `false` (or `fft` is omitted), no `AnalyserNode` or polling timer is created.
116
+ *
117
+ * **Safari quirk:** Safari locks an `AudioContext` to the device’s current sample rate at creation.
118
+ * If you open a Bluetooth headset mic afterward, the OS may switch to the 16 kHz HFP profile and down-sample playback, which sounds “telephone-y.”
119
+ * To avoid this, call `getUserMedia()` (or otherwise open audio input) **before** `init()`.
120
+ *
121
+ * @throws {Error} If the browser lacks `AudioWorklet` support, or if `AudioContext.resume()` is rejected (autoplay policy, device error).
122
+ */
123
+ init() {
124
+ return __awaiter(this, void 0, void 0, function* () {
125
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_initialized, "f"))
126
+ return;
127
+ // Create the AudioContext
128
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_ctx, new AudioContext(), "f");
129
+ // Fail fast if AudioWorklet isn’t supported
130
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").audioWorklet) {
131
+ const msg = "AudioWorklet is not supported in this browser";
132
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, msg);
133
+ throw new Error(msg);
134
+ }
135
+ try {
136
+ // Build GainNode
137
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_gainNode, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").createGain(), "f");
138
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.value = __classPrivateFieldGet(this, _EVIWebAudioPlayer_volume, "f");
139
+ // Build AnalyserNode (optional)
140
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f")) {
141
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_analyserNode, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").createAnalyser(), "f");
142
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f").fftSize = __classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f").size;
143
+ }
144
+ // Loads the AudioWorklet processor module.
145
+ yield __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").audioWorklet.addModule(__classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_DEFAULT_WORKLET_URL));
146
+ // Build AudioWorkletNode
147
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_workletNode, new AudioWorkletNode(__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f"), "audio-processor"), "f");
148
+ // When the worklet posts { type: "ended" }, mark playback stopped and emit a `'stop'` event.
149
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f").port.onmessage = (e) => {
150
+ if (e.data.type === "ended") {
151
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, false, "f");
152
+ this.dispatchEvent(new CustomEvent("stop", { detail: { id: "stream" } }));
153
+ }
154
+ };
155
+ // Audio graph nodes
156
+ const workletNode = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f"); // AudioWorkletNode (PCM processor)
157
+ const analyserNode = __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f"); // Optional AnalyserNode (FFT)
158
+ const gainNode = __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f"); // GainNode (volume control)
159
+ const destination = __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").destination; // AudioDestinationNode (speakers)
160
+ // Analyser node is filtered out of audio graph if null (FFT disabled)
161
+ const audioGraph = [workletNode, analyserNode, gainNode, destination].filter(Boolean);
162
+ // Wire nodes: AudioWorkletNode → (AnalyserNode?) → GainNode → AudioDestinationNode
163
+ audioGraph.reduce((prev, next) => (prev.connect(next), next));
164
+ // If an analyser is configured, begin polling it at the resolved interval and dispatching `'fft'` events for each frame.
165
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_startAnalyserPollingIfEnabled).call(this);
166
+ // Resume the AudioContext now that the audio graph is fully wired.
167
+ // Browsers allow `resume()` only inside a user-gesture callback.
168
+ // Any rejection (autoplay policy, hardware issue, etc.) is caught by the outer catch-block below, which emits an 'error' event and re-throws.
169
+ yield __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").resume();
170
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_initialized, true, "f");
171
+ }
172
+ catch (err) {
173
+ const suffix = err instanceof Error ? `: ${err.message}` : String(err);
174
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, `Failed to initialize audio player${suffix}`);
175
+ throw err;
176
+ }
177
+ });
178
+ }
179
+ /**
180
+ * Queue one {@link AudioOutput} message for playback.
181
+ *
182
+ * Decodes the base-64 PCM data, sends it to the `AudioWorkletNode` for glitch-free, in-order playback, and emits `'play'` for the first chunk of a new stream.
183
+ *
184
+ * @param message The `AudioOutput` message received from EVI’s WebSocket.
185
+ *
186
+ * @see {@link https://dev.hume.ai/reference/empathic-voice-interface-evi/chat/chat#receive.Audio-Output.type API Reference}
187
+ */
188
+ enqueue(message) {
189
+ return __awaiter(this, void 0, void 0, function* () {
190
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_initialized, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f")) {
191
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, "Audio player is not initialized");
192
+ return;
193
+ }
194
+ try {
195
+ const { data, id } = message;
196
+ const blob = (0, convertBase64ToBlob_1.convertBase64ToBlob)(data);
197
+ const buffer = yield blob.arrayBuffer();
198
+ const audio = yield __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").decodeAudioData(buffer);
199
+ const pcmData = audio.getChannelData(0);
200
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f").port.postMessage({ type: "audio", data: pcmData });
201
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, true, "f");
202
+ this.dispatchEvent(new CustomEvent("play", { detail: { id } }));
203
+ }
204
+ catch (err) {
205
+ const msg = err instanceof Error ? err.message : "Unknown error";
206
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_emitError).call(this, `Failed to queue clip: ${msg}`);
207
+ }
208
+ });
209
+ }
210
+ /**
211
+ * Flush the worklet queue and output silence.
212
+ */
213
+ stop() {
214
+ var _b;
215
+ // Clear buffered audio from the worklet queue
216
+ (_b = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _b === void 0 ? void 0 : _b.port.postMessage({ type: "fadeAndClear" });
217
+ // Restart analyser polling so fft events continue after stopping or clearing the queue
218
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_instances, "m", _EVIWebAudioPlayer_startAnalyserPollingIfEnabled).call(this);
219
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, false, "f");
220
+ this.dispatchEvent(new CustomEvent("stop", { detail: { id: "manual" } }));
221
+ }
222
+ /**
223
+ * Set the master gain ({@link volume}) to a value between `0` (_silent_) and `1` (_full volume_).
224
+ *
225
+ * - Clamps out-of-range values.
226
+ * - If called before {@link init}, stores volume for when `AudioContext` is created.
227
+ * - If currently {@link muted}, updates stored volume but keeps output silent until {@link unmute}.
228
+ *
229
+ * @param volume Desired gain; clamped to [0, 1].
230
+ */
231
+ setVolume(volume) {
232
+ const clampedVolume = Math.max(0, Math.min(volume, 1));
233
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_volume, clampedVolume, "f");
234
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f") && __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f") && !__classPrivateFieldGet(this, _EVIWebAudioPlayer_muted, "f")) {
235
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.setValueAtTime(clampedVolume, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").currentTime);
236
+ }
237
+ }
238
+ /**
239
+ * Mute output instantly by setting the gain to 0. Retains the last volume internally for later restore.
240
+ */
241
+ mute() {
242
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f"))
243
+ return;
244
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.setValueAtTime(0, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").currentTime);
245
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_muted, true, "f");
246
+ }
247
+ /**
248
+ * Restore output gain to the last set volume (via setVolume).
249
+ */
250
+ unmute() {
251
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f"))
252
+ return;
253
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f").gain.setValueAtTime(__classPrivateFieldGet(this, _EVIWebAudioPlayer_volume, "f"), __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").currentTime);
254
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_muted, false, "f");
255
+ }
256
+ /**
257
+ * Tear down all Web-Audio resources (worklet, analyser, gain, context) and reset state so {@link init} can be called again.
258
+ */
259
+ dispose() {
260
+ var _b, _c, _d, _e, _f, _g, _h;
261
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f") != null) {
262
+ clearInterval(__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f"));
263
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fftTimer, null, "f");
264
+ }
265
+ (_b = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _b === void 0 ? void 0 : _b.port.postMessage({ type: "fadeAndClear" });
266
+ (_c = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _c === void 0 ? void 0 : _c.port.postMessage({ type: "end" });
267
+ (_d = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _d === void 0 ? void 0 : _d.port.close();
268
+ (_e = __classPrivateFieldGet(this, _EVIWebAudioPlayer_workletNode, "f")) === null || _e === void 0 ? void 0 : _e.disconnect();
269
+ (_f = __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f")) === null || _f === void 0 ? void 0 : _f.disconnect();
270
+ (_g = __classPrivateFieldGet(this, _EVIWebAudioPlayer_gainNode, "f")) === null || _g === void 0 ? void 0 : _g.disconnect();
271
+ (_h = __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f")) === null || _h === void 0 ? void 0 : _h.close().catch(() => void 0);
272
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_initialized, false, "f");
273
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_playing, false, "f");
274
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fft, _a.emptyFft(), "f");
275
+ }
276
+ }
277
+ exports.EVIWebAudioPlayer = EVIWebAudioPlayer;
278
+ _a = EVIWebAudioPlayer, _EVIWebAudioPlayer_ctx = new WeakMap(), _EVIWebAudioPlayer_workletNode = new WeakMap(), _EVIWebAudioPlayer_analyserNode = new WeakMap(), _EVIWebAudioPlayer_gainNode = new WeakMap(), _EVIWebAudioPlayer_initialized = new WeakMap(), _EVIWebAudioPlayer_playing = new WeakMap(), _EVIWebAudioPlayer_muted = new WeakMap(), _EVIWebAudioPlayer_volume = new WeakMap(), _EVIWebAudioPlayer_fft = new WeakMap(), _EVIWebAudioPlayer_fftTimer = new WeakMap(), _EVIWebAudioPlayer_fftOptions = new WeakMap(), _EVIWebAudioPlayer_instances = new WeakSet(), _EVIWebAudioPlayer_linearHzToBark = function _EVIWebAudioPlayer_linearHzToBark(linearData, sampleRate) {
279
+ const maxFrequency = sampleRate / 2;
280
+ const frequencyResolution = maxFrequency / linearData.length;
281
+ return __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES).map((barkFreq) => {
282
+ var _b;
283
+ const linearDataIndex = Math.round(barkFreq / frequencyResolution);
284
+ const magnitude = (_b = linearData[linearDataIndex]) !== null && _b !== void 0 ? _b : 0;
285
+ return (magnitude / __classPrivateFieldGet(_a, _a, "f", _EVIWebAudioPlayer_BYTE_MAX)) * 2;
286
+ });
287
+ }, _EVIWebAudioPlayer_startAnalyserPollingIfEnabled = function _EVIWebAudioPlayer_startAnalyserPollingIfEnabled() {
288
+ if (!__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f") || !__classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f"))
289
+ return;
290
+ if (__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f"))
291
+ clearInterval(__classPrivateFieldGet(this, _EVIWebAudioPlayer_fftTimer, "f"));
292
+ const { interval, transform } = __classPrivateFieldGet(this, _EVIWebAudioPlayer_fftOptions, "f");
293
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fftTimer, window.setInterval(() => {
294
+ const bins = new Uint8Array(__classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f").frequencyBinCount);
295
+ __classPrivateFieldGet(this, _EVIWebAudioPlayer_analyserNode, "f").getByteFrequencyData(bins);
296
+ __classPrivateFieldSet(this, _EVIWebAudioPlayer_fft, transform(bins, __classPrivateFieldGet(this, _EVIWebAudioPlayer_ctx, "f").sampleRate), "f");
297
+ this.dispatchEvent(new CustomEvent("fft", { detail: { fft: __classPrivateFieldGet(this, _EVIWebAudioPlayer_fft, "f") } }));
298
+ }, interval), "f");
299
+ }, _EVIWebAudioPlayer_emitError = function _EVIWebAudioPlayer_emitError(message) {
300
+ this.dispatchEvent(new CustomEvent("error", { detail: { message } }));
301
+ };
302
+ /** Default URL of the `audio-worklet.js` processor module, fetched from Hume AI’s CDN. */
303
+ _EVIWebAudioPlayer_DEFAULT_WORKLET_URL = { value: "https://storage.googleapis.com/evi-react-sdk-assets/audio-worklet-20250506.js" };
304
+ /** Default FFT size (power-of-two). */
305
+ _EVIWebAudioPlayer_DEFAULT_FFT_SIZE = { value: 2048 };
306
+ /** Default analyser poll interval (16 ms). */
307
+ _EVIWebAudioPlayer_DEFAULT_FFT_INTERVAL = { value: 16 };
308
+ /** Bark‑scale center frequencies (hz) used by the default transform. https://en.wikipedia.org/wiki/Bark_scale */
309
+ _EVIWebAudioPlayer_BARK_CENTER_FREQUENCIES = { value: [
310
+ 50, 150, 250, 350, 450, 570, 700, 840, 1000, 1170, 1370, 1600, 1850, 2150, 2500, 2900, 3400, 4000, 4800, 5800,
311
+ 7000, 8500, 10500, 13500,
312
+ ] };
313
+ /** Max byte magnitude (255) returned by `AnalyserNode.getByteFrequencyData`. */
314
+ _EVIWebAudioPlayer_BYTE_MAX = { value: 255 };
@@ -2,8 +2,6 @@
2
2
  * Fetches a new access token from the Hume API using the provided API key and Secret key.
3
3
  *
4
4
  * @param args - The arguments for the request.
5
- * @returns Promise that resolves to the new access token or null.
6
- * @throws If the base64 encoding fails.
7
5
  * @example
8
6
  * ```typescript
9
7
  * async function getToken() {
@@ -20,4 +18,4 @@ export declare const fetchAccessToken: ({ apiKey, secretKey, host, }: {
20
18
  apiKey: string;
21
19
  secretKey: string;
22
20
  host?: string;
23
- }) => Promise<string | null>;
21
+ }) => Promise<string>;
@@ -16,8 +16,6 @@ const zod_1 = require("zod");
16
16
  * Fetches a new access token from the Hume API using the provided API key and Secret key.
17
17
  *
18
18
  * @param args - The arguments for the request.
19
- * @returns Promise that resolves to the new access token or null.
20
- * @throws If the base64 encoding fails.
21
19
  * @example
22
20
  * ```typescript
23
21
  * async function getToken() {
@@ -33,7 +31,7 @@ const zod_1 = require("zod");
33
31
  const fetchAccessToken = (_a) => __awaiter(void 0, [_a], void 0, function* ({ apiKey, secretKey, host = "api.hume.ai", }) {
34
32
  const authString = `${apiKey}:${secretKey}`;
35
33
  const encoded = (0, base64Encode_1.base64Encode)(authString);
36
- const response = yield fetch(`https://${host}/oauth2-cc/token`, {
34
+ const res = yield fetch(`https://${host}/oauth2-cc/token`, {
37
35
  method: "POST",
38
36
  headers: {
39
37
  "Content-Type": "application/x-www-form-urlencoded",
@@ -43,32 +41,14 @@ const fetchAccessToken = (_a) => __awaiter(void 0, [_a], void 0, function* ({ ap
43
41
  grant_type: "client_credentials",
44
42
  }).toString(),
45
43
  cache: "no-cache",
44
+ });
45
+ return zod_1.z
46
+ .object({
47
+ access_token: zod_1.z.string(),
46
48
  })
47
- .then((res) => {
48
- // if reading response as json fails, return empty object
49
- // this can happen when request returns XML due to server error
50
- return res
51
- .json()
52
- .then((d) => d)
53
- .catch(() => ({}));
54
- })
55
- .then((data) => {
56
- // extract access_token value from received object
57
- return zod_1.z
58
- .object({
59
- access_token: zod_1.z.string(),
60
- })
61
- .transform((data) => {
62
- return data.access_token;
63
- })
64
- .safeParse(data);
49
+ .transform((data) => {
50
+ return data.access_token;
65
51
  })
66
- .catch(() => ({
67
- success: false,
68
- }));
69
- if (!response.success) {
70
- return null;
71
- }
72
- return response.data;
52
+ .parse(yield res.json());
73
53
  });
74
54
  exports.fetchAccessToken = fetchAccessToken;
@@ -8,3 +8,4 @@ export { fetchAccessToken } from "./fetchAccessToken";
8
8
  export { getAudioStream } from "./getAudioStream";
9
9
  export { MimeType, getBrowserSupportedMimeType } from "./getBrowserSupportedMimeType";
10
10
  export { HumeClient } from "./HumeClient";
11
+ export { EVIWebAudioPlayer, EVIWebAudioPlayerFFTOptions, EVIWebAudioPlayerOptions } from "./EVIWebAudioPlayer";
package/wrapper/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.HumeClient = exports.getBrowserSupportedMimeType = exports.MimeType = exports.getAudioStream = exports.fetchAccessToken = exports.checkForAudioTracks = exports.ensureSingleValidAudioTrack = exports.convertBlobToBase64 = exports.convertBase64ToBlob = exports.base64Encode = exports.base64Decode = void 0;
3
+ exports.EVIWebAudioPlayer = exports.HumeClient = exports.getBrowserSupportedMimeType = exports.MimeType = exports.getAudioStream = exports.fetchAccessToken = exports.checkForAudioTracks = exports.ensureSingleValidAudioTrack = exports.convertBlobToBase64 = exports.convertBase64ToBlob = exports.base64Encode = exports.base64Decode = void 0;
4
4
  var base64Decode_1 = require("./base64Decode");
5
5
  Object.defineProperty(exports, "base64Decode", { enumerable: true, get: function () { return base64Decode_1.base64Decode; } });
6
6
  var base64Encode_1 = require("./base64Encode");
@@ -22,3 +22,5 @@ Object.defineProperty(exports, "MimeType", { enumerable: true, get: function ()
22
22
  Object.defineProperty(exports, "getBrowserSupportedMimeType", { enumerable: true, get: function () { return getBrowserSupportedMimeType_1.getBrowserSupportedMimeType; } });
23
23
  var HumeClient_1 = require("./HumeClient");
24
24
  Object.defineProperty(exports, "HumeClient", { enumerable: true, get: function () { return HumeClient_1.HumeClient; } });
25
+ var EVIWebAudioPlayer_1 = require("./EVIWebAudioPlayer");
26
+ Object.defineProperty(exports, "EVIWebAudioPlayer", { enumerable: true, get: function () { return EVIWebAudioPlayer_1.EVIWebAudioPlayer; } });