@juandinella/audio-bands 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,19 +4,17 @@
4
4
 
5
5
  **Demo**: [audio-bands.juandinella.com](https://audio-bands.juandinella.com)
6
6
 
7
- Headless audio frequency analysis for the browser. Get real-time `bass`, `mid`, and `high` values normalized to `0–1` from a music track, a microphone, or both at the same time. No renderer included.
7
+ Headless audio analysis for the browser. Get normalized `bass`, `mid`, `high`, custom named bands, raw FFT bins, or time-domain waveform data without shipping a renderer.
8
8
 
9
9
  ```ts
10
10
  const { bass, mid, high } = audio.getBands();
11
- // bass: 0.73, mid: 0.41, high: 0.12
12
-
11
+ const custom = audio.getCustomBands();
13
12
  const fft = audio.getFftData();
14
- // Uint8Array(128) — raw frequency bins, 0–255 each
15
13
  ```
16
14
 
17
15
  ## Why
18
16
 
19
- Every audio visualization library either handles only playback (no analysis) or draws its own canvas and hides the data. This one only gives you numbers.
17
+ Most audio libraries either only play audio or immediately draw a canvas for you. This one stays lower level: it gives you usable analysis data and lets you decide how to render it.
20
18
 
21
19
  ## Install
22
20
 
@@ -24,87 +22,76 @@ Every audio visualization library either handles only playback (no analysis) or
24
22
  npm install @juandinella/audio-bands
25
23
  ```
26
24
 
27
- The root entrypoint is framework-agnostic. If you use the React hook, install `react` and import it from `@juandinella/audio-bands/react`.
25
+ ### Entry points
26
+
27
+ - `@juandinella/audio-bands`: main framework-agnostic export
28
+ - `@juandinella/audio-bands/core`: explicit core-only entry
29
+ - `@juandinella/audio-bands/react`: React hook
30
+
31
+ If you use the React hook, install `react` as well.
28
32
 
29
33
  ## Usage
30
34
 
31
35
  ### Vanilla JS
32
36
 
33
- Works in Vue, Svelte, plain HTML — anything.
34
-
35
- ```js
37
+ ```ts
36
38
  import { AudioBands } from '@juandinella/audio-bands';
37
39
 
38
40
  const audio = new AudioBands({
39
- onPlay: () => console.log('playing'),
40
- onPause: () => console.log('paused'),
41
- onError: () => console.error('failed to load'),
42
- onMicStart: () => console.log('mic on'),
43
- onMicStop: () => console.log('mic off'),
41
+ music: {
42
+ fftSize: 512,
43
+ smoothingTimeConstant: 0.7,
44
+ },
45
+ customBands: {
46
+ presence: { from: 0.25, to: 0.5 },
47
+ air: { from: 0.5, to: 1 },
48
+ },
49
+ onLoadError: (error) => console.error('track error', error),
50
+ onMicError: (error) => console.error('mic error', error),
44
51
  });
45
52
 
46
53
  await audio.load('/track.mp3');
47
54
 
48
- // Call inside your animation loop
49
55
  function loop() {
50
56
  const { bass, mid, high, overall } = audio.getBands();
51
- // drive your canvas, SVG, CSS, WebGL — whatever
57
+ const custom = audio.getCustomBands();
58
+ const fft = audio.getFftData();
59
+ const waveform = audio.getWaveform();
52
60
 
53
- const fft = audio.getFftData(); // raw bins for spectrum visualizations
54
61
  requestAnimationFrame(loop);
55
62
  }
56
- requestAnimationFrame(loop);
57
63
 
58
- // Clean up when done
59
- audio.destroy();
64
+ requestAnimationFrame(loop);
60
65
  ```
61
66
 
62
67
  ### React hook
63
68
 
64
69
  ```tsx
65
70
  import { useAudioBands } from '@juandinella/audio-bands/react';
66
- import { useEffect, useRef } from 'react';
67
71
 
68
72
  function Visualizer() {
69
- const { loadTrack, togglePlayPause, toggleMic, getBands, isPlaying } =
70
- useAudioBands();
71
- const canvasRef = useRef<HTMLCanvasElement>(null);
72
-
73
- useEffect(() => {
74
- loadTrack('/track.mp3');
75
- }, []);
76
-
77
- useEffect(() => {
78
- const canvas = canvasRef.current!;
79
- const ctx = canvas.getContext('2d')!;
80
- let raf: number;
81
-
82
- function loop() {
83
- const { bass, mid, high } = getBands();
84
-
85
- ctx.clearRect(0, 0, canvas.width, canvas.height);
86
- ctx.beginPath();
87
- ctx.arc(
88
- canvas.width / 2,
89
- canvas.height / 2,
90
- 20 + bass * 80,
91
- 0,
92
- Math.PI * 2,
93
- );
94
- ctx.fill();
95
-
96
- raf = requestAnimationFrame(loop);
97
- }
98
-
99
- raf = requestAnimationFrame(loop);
100
- return () => cancelAnimationFrame(raf);
101
- }, [getBands]);
73
+ const {
74
+ isPlaying,
75
+ hasTrack,
76
+ loadError,
77
+ micError,
78
+ loadTrack,
79
+ togglePlayPause,
80
+ toggleMic,
81
+ getBands,
82
+ getCustomBands,
83
+ } = useAudioBands({
84
+ customBands: {
85
+ presence: { from: 0.25, to: 0.5 },
86
+ },
87
+ });
102
88
 
103
89
  return (
104
90
  <>
105
- <canvas ref={canvasRef} width={400} height={400} />
91
+ <button onClick={() => loadTrack('/track.mp3')}>load</button>
106
92
  <button onClick={togglePlayPause}>{isPlaying ? 'Pause' : 'Play'}</button>
107
93
  <button onClick={toggleMic}>Toggle mic</button>
94
+ <pre>{JSON.stringify({ hasTrack, loadError, micError, ...getBands(), ...getCustomBands() }, null, 2)}</pre>
108
95
  </>
109
96
  );
110
97
  }
@@ -113,93 +100,135 @@ function Visualizer() {
113
100
  ### Mic input
114
101
 
115
102
  ```ts
116
- // Enable mic — browser will ask for permission
117
103
  await audio.enableMic();
118
104
 
119
- // Get frequency bands from the mic
120
- const { bass } = audio.getBands('mic');
105
+ const micBands = audio.getBands('mic');
106
+ const micCustomBands = audio.getCustomBands('mic');
107
+ const waveform = audio.getWaveform('mic');
108
+ ```
121
109
 
122
- // Get raw waveform data (time-domain)
123
- const waveform = audio.getWaveform(); // Uint8Array | null
110
+ ## When To Use Bands Vs FFT
124
111
 
125
- // Disable mic and stop the stream
126
- audio.disableMic();
127
- ```
112
+ Use `getBands()` when you want stable, simple control signals:
113
+
114
+ - pulsing a blob with low-end energy
115
+ - scaling UI based on overall intensity
116
+ - animating typography or CSS variables
117
+ - driving scenes where three broad zones are enough
118
+
119
+ Use `getCustomBands()` when the default bass/mid/high split is too coarse, but you still want named, high-level buckets:
120
+
121
+ - separate `presence`, `air`, or `sub`
122
+ - tune bands to your own design system or animation logic
123
+ - keep your render code semantic instead of index-based
124
+
125
+ Use `getFftData()` when you need bin-level detail:
126
+
127
+ - bar visualizers
128
+ - line spectrums
129
+ - log interpolation
130
+ - any renderer that maps directly over bins
131
+
132
+ Rule of thumb:
133
+
134
+ - `getBands()` for product UI
135
+ - `getCustomBands()` for art direction
136
+ - `getFftData()` for visualizers
128
137
 
129
138
  ## API
130
139
 
131
- ### `AudioBands` (vanilla JS)
140
+ ### `AudioBands`
132
141
 
133
142
  ```ts
134
- new AudioBands(callbacks?: AudioBandsCallbacks)
143
+ new AudioBands(options?: AudioBandsOptions)
135
144
  ```
136
145
 
137
- | Method | Description |
138
- | --------------------- | --------------------------------------------------------------------------------------------- |
139
- | `load(url)` | Load and play an audio file. Resolves when playback starts. |
140
- | `togglePlayPause()` | Toggle playback. |
141
- | `enableMic()` | Request mic access and start analysis. |
142
- | `disableMic()` | Stop mic stream and clean up. |
143
- | `getBands(source?)` | Returns `Bands` for `'music'` (default) or `'mic'`. Call inside RAF. |
144
- | `getFftData(source?)` | Returns raw `Uint8Array` of frequency bins (0–255) for `'music'` or `'mic'`. Call inside RAF. |
145
- | `getWaveform()` | Returns raw time-domain `Uint8Array` from mic. Call inside RAF. |
146
- | `destroy()` | Stop playback, release mic, close AudioContext. |
146
+ #### Methods
147
147
 
148
- ### `useAudioBands()` (React)
148
+ | Method | Description |
149
+ | ----------------------- | ----------- |
150
+ | `load(url)` | Load and play a track. Rejects with `AudioBandsError` on failure. |
151
+ | `togglePlayPause()` | Toggle the current track. |
152
+ | `enableMic()` | Request microphone access and start mic analysis. Rejects with `AudioBandsError` on failure. |
153
+ | `disableMic()` | Stop mic input and clean up the stream. |
154
+ | `getBands(source?)` | Returns normalized `{ bass, mid, high, overall }`. |
155
+ | `getCustomBands(source?)` | Returns normalized values for configured custom bands. |
156
+ | `getFftData(source?)` | Returns raw `Uint8Array` frequency bins. |
157
+ | `getWaveform(source?)` | Returns raw time-domain data for `'music'` or `'mic'`. |
158
+ | `getState()` | Returns the current playback/mic/error state. |
159
+ | `destroy()` | Stop playback, release the mic and close the `AudioContext`. |
149
160
 
150
- Same capabilities as `AudioBands`. `destroy()` is called automatically on unmount.
161
+ ### `useAudioBands()`
151
162
 
152
163
  ```ts
153
164
  const {
154
165
  isPlaying,
155
166
  micActive,
167
+ hasTrack,
156
168
  audioError,
169
+ loadError,
170
+ micError,
171
+ state,
157
172
  loadTrack,
158
173
  togglePlayPause,
159
174
  toggleMic,
160
175
  getBands,
176
+ getCustomBands,
161
177
  getFftData,
162
178
  getWaveform,
163
- } = useAudioBands();
179
+ } = useAudioBands(options);
164
180
  ```
165
181
 
166
- Import it from:
182
+ ### `AudioBandsOptions`
167
183
 
168
184
  ```ts
169
- import { useAudioBands } from '@juandinella/audio-bands/react';
170
- ```
171
-
172
- ### `Bands`
173
-
174
- ```ts
175
- type Bands = {
176
- bass: number; // 0–1 — low frequencies (0–8% of spectrum)
177
- mid: number; // 0–1 — mid frequencies (8–40%)
178
- high: number; // 0–1 — high frequencies (40–100%)
179
- overall: number; // 0–1 — weighted mix: bass×0.5 + mid×0.3 + high×0.2
185
+ type AudioBandsOptions = {
186
+ music?: {
187
+ fftSize?: number;
188
+ smoothingTimeConstant?: number;
189
+ };
190
+ mic?: {
191
+ fftSize?: number;
192
+ smoothingTimeConstant?: number;
193
+ };
194
+ bandRanges?: {
195
+ bass?: { from: number; to: number };
196
+ mid?: { from: number; to: number };
197
+ high?: { from: number; to: number };
198
+ };
199
+ customBands?: Record<string, { from: number; to: number }>;
200
+ onError?: (error: AudioBandsError) => void;
201
+ onLoadError?: (error: AudioBandsError) => void;
202
+ onMicError?: (error: AudioBandsError) => void;
203
+ onStateChange?: (state: AudioBandsState) => void;
204
+ onPlay?: () => void;
205
+ onPause?: () => void;
206
+ onMicStart?: () => void;
207
+ onMicStop?: () => void;
180
208
  };
181
209
  ```
182
210
 
183
- ### `AudioBandsCallbacks`
211
+ ### `AudioBandsState`
184
212
 
185
213
  ```ts
186
- type AudioBandsCallbacks = {
187
- onPlay?: () => void;
188
- onPause?: () => void;
189
- onError?: (error?: unknown) => void;
190
- onMicStart?: () => void;
191
- onMicStop?: () => void;
214
+ type AudioBandsState = {
215
+ isPlaying: boolean;
216
+ micActive: boolean;
217
+ hasTrack: boolean; // a track source is assigned, even if playback later fails
218
+ loadError: AudioBandsError | null;
219
+ micError: AudioBandsError | null;
192
220
  };
193
221
  ```
194
222
 
195
223
  ## Notes
196
224
 
197
- - `AudioContext` is created lazily on the first call to `load()` or `enableMic()`. Browsers require a user gesture before audio can start.
198
- - The root package export does not depend on React. The React hook lives at `@juandinella/audio-bands/react`.
199
- - The mic analyser is **not** connected to `AudioContext.destination`, so there is no feedback loop.
200
- - `getBands()`, `getFftData()`, and `getWaveform()` read live data from the audio graph. Call them inside `requestAnimationFrame`, not in response to React state.
201
- - `getFftData()` returns the same underlying buffer on every call. Copy it if you need to compare frames: `Array.from(fft)`.
202
- - `load()` and `enableMic()` reject on browser playback/permission errors. Use `try/catch` if you need custom handling.
225
+ - `AudioContext` is created lazily on the first call to `load()` or `enableMic()`.
226
+ - `hasTrack` means a track source is currently assigned to the instance. It can still be `true` if `play()` fails due to autoplay policy or another playback error.
227
+ - The mic analyser is not connected to `AudioContext.destination`, so it will not feed back into the speakers.
228
+ - `getBands()`, `getCustomBands()`, `getFftData()`, and `getWaveform()` read live data. Call them inside `requestAnimationFrame`, not from React state updates.
229
+ - `getFftData()` returns the same underlying buffer on each call. Copy it if you need frame-to-frame comparisons.
230
+ - `fftSize` must be a power of two between `32` and `32768`.
231
+ - Band ranges are normalized from `0` to `1`, where `0` is the start of the analyser spectrum and `1` is the end.
203
232
 
204
233
  ## License
205
234
 
@@ -0,0 +1,358 @@
1
+ // src/errors.ts
2
+ var AudioBandsError = class extends Error {
3
+ constructor(kind, code, message, cause) {
4
+ super(message);
5
+ this.name = "AudioBandsError";
6
+ this.kind = kind;
7
+ this.code = code;
8
+ this.cause = cause;
9
+ }
10
+ };
11
+
12
+ // src/core.ts
13
+ var DEFAULT_MUSIC_ANALYSER = {
14
+ fftSize: 256,
15
+ smoothingTimeConstant: 0.85
16
+ };
17
+ var DEFAULT_MIC_ANALYSER = {
18
+ fftSize: 256,
19
+ smoothingTimeConstant: 0.8
20
+ };
21
+ var DEFAULT_CLASSIC_RANGES = {
22
+ bass: { from: 0, to: 0.08 },
23
+ mid: { from: 0.08, to: 0.4 },
24
+ high: { from: 0.4, to: 1 }
25
+ };
26
+ var ZERO = { bass: 0, mid: 0, high: 0, overall: 0 };
27
+ function avg(arr, from, to) {
28
+ let sum = 0;
29
+ for (let i = from; i < to; i++) sum += arr[i];
30
+ return sum / (to - from);
31
+ }
32
+ function isPowerOfTwo(value) {
33
+ return (value & value - 1) === 0;
34
+ }
35
+ function normalizeAnalyserConfig(config, fallback) {
36
+ const fftSize = config?.fftSize ?? fallback.fftSize;
37
+ const smoothingTimeConstant = config?.smoothingTimeConstant ?? fallback.smoothingTimeConstant;
38
+ if (!Number.isInteger(fftSize) || fftSize < 32 || fftSize > 32768 || !isPowerOfTwo(fftSize)) {
39
+ throw new AudioBandsError(
40
+ "config",
41
+ "invalid_config",
42
+ "fftSize must be a power of two between 32 and 32768"
43
+ );
44
+ }
45
+ if (typeof smoothingTimeConstant !== "number" || smoothingTimeConstant < 0 || smoothingTimeConstant > 1) {
46
+ throw new AudioBandsError(
47
+ "config",
48
+ "invalid_config",
49
+ "smoothingTimeConstant must be between 0 and 1"
50
+ );
51
+ }
52
+ return { fftSize, smoothingTimeConstant };
53
+ }
54
+ function normalizeRange(name, range) {
55
+ const normalized = range ?? DEFAULT_CLASSIC_RANGES[name];
56
+ if (typeof normalized?.from !== "number" || typeof normalized?.to !== "number" || normalized.from < 0 || normalized.to > 1 || normalized.from >= normalized.to) {
57
+ throw new AudioBandsError(
58
+ "config",
59
+ "invalid_config",
60
+ `Band range "${name}" must satisfy 0 <= from < to <= 1`
61
+ );
62
+ }
63
+ return normalized;
64
+ }
65
+ function normalizeClassicRanges(ranges) {
66
+ return {
67
+ bass: normalizeRange("bass", ranges?.bass),
68
+ mid: normalizeRange("mid", ranges?.mid),
69
+ high: normalizeRange("high", ranges?.high)
70
+ };
71
+ }
72
+ function normalizeCustomBands(customBands) {
73
+ if (!customBands) return {};
74
+ return Object.fromEntries(
75
+ Object.entries(customBands).map(([name, range]) => [name, normalizeRange(name, range)])
76
+ );
77
+ }
78
+ function getIndexes(len, range) {
79
+ const from = Math.max(0, Math.min(len - 1, Math.floor(len * range.from)));
80
+ const to = Math.max(from + 1, Math.min(len, Math.floor(len * range.to)));
81
+ return [from, to];
82
+ }
83
+ function getRangeValue(data, range) {
84
+ const [from, to] = getIndexes(data.length, range);
85
+ return avg(data, from, to) / 255;
86
+ }
87
+ function fillFrequencyData(analyser, data) {
88
+ analyser.getByteFrequencyData(data);
89
+ return data;
90
+ }
91
+ function computeBands(data, ranges) {
92
+ const bass = getRangeValue(data, ranges.bass);
93
+ const mid = getRangeValue(data, ranges.mid);
94
+ const high = getRangeValue(data, ranges.high);
95
+ return {
96
+ bass,
97
+ mid,
98
+ high,
99
+ overall: bass * 0.5 + mid * 0.3 + high * 0.2
100
+ };
101
+ }
102
+ function computeCustomBands(data, ranges) {
103
+ return Object.fromEntries(
104
+ Object.entries(ranges).map(([name, range]) => [name, getRangeValue(data, range)])
105
+ );
106
+ }
107
+ function cloneState(state) {
108
+ return { ...state };
109
+ }
110
+ var AudioBands = class {
111
+ constructor(options = {}) {
112
+ this.state = {
113
+ isPlaying: false,
114
+ micActive: false,
115
+ hasTrack: false,
116
+ loadError: null,
117
+ micError: null
118
+ };
119
+ this.ctx = null;
120
+ this.musicAnalyser = null;
121
+ this.musicData = null;
122
+ this.musicWaveformData = null;
123
+ this.micAnalyser = null;
124
+ this.micData = null;
125
+ this.micWaveformData = null;
126
+ this.audioEl = null;
127
+ this.musicSource = null;
128
+ this.micSource = null;
129
+ this.micStream = null;
130
+ this.destroyed = false;
131
+ this.options = options;
132
+ this.musicConfig = normalizeAnalyserConfig(options.music, DEFAULT_MUSIC_ANALYSER);
133
+ this.micConfig = normalizeAnalyserConfig(options.mic, DEFAULT_MIC_ANALYSER);
134
+ this.classicRanges = normalizeClassicRanges(options.bandRanges);
135
+ this.customBandRanges = normalizeCustomBands(options.customBands);
136
+ }
137
+ getState() {
138
+ return cloneState(this.state);
139
+ }
140
+ getCustomBands(source = "music") {
141
+ const data = this.readFrequencyData(source);
142
+ if (!data) return computeCustomBands(new Uint8Array(1), this.customBandRanges);
143
+ return computeCustomBands(data, this.customBandRanges);
144
+ }
145
+ async load(url) {
146
+ let ctx;
147
+ try {
148
+ ctx = this.ensureCtx();
149
+ } catch (error) {
150
+ throw this.handleError("load", error);
151
+ }
152
+ this.teardownMusic();
153
+ const audio = new Audio();
154
+ audio.crossOrigin = "anonymous";
155
+ audio.src = url;
156
+ audio.loop = true;
157
+ this.audioEl = audio;
158
+ this.setState({ hasTrack: true, loadError: null });
159
+ const source = ctx.createMediaElementSource(audio);
160
+ source.connect(this.musicAnalyser);
161
+ this.musicSource = source;
162
+ try {
163
+ await audio.play();
164
+ this.setState({ isPlaying: true, loadError: null });
165
+ this.options.onPlay?.();
166
+ } catch (error) {
167
+ throw this.handleError("load", error, "load_error");
168
+ }
169
+ }
170
+ togglePlayPause() {
171
+ const audio = this.audioEl;
172
+ if (!audio) return;
173
+ if (audio.paused) {
174
+ void audio.play().then(() => {
175
+ this.setState({ isPlaying: true, loadError: null });
176
+ this.options.onPlay?.();
177
+ }).catch((error) => {
178
+ this.handleError("load", error, "playback_error");
179
+ });
180
+ return;
181
+ }
182
+ audio.pause();
183
+ this.setState({ isPlaying: false });
184
+ this.options.onPause?.();
185
+ }
186
+ async enableMic() {
187
+ let ctx;
188
+ try {
189
+ ctx = this.ensureCtx();
190
+ } catch (error) {
191
+ throw this.handleError("mic", error);
192
+ }
193
+ if (this.micStream) return;
194
+ try {
195
+ const stream = await navigator.mediaDevices.getUserMedia({
196
+ audio: true,
197
+ video: false
198
+ });
199
+ this.micStream = stream;
200
+ const analyser = this.createAnalyser(ctx, this.micConfig);
201
+ this.micAnalyser = analyser;
202
+ this.micData = new Uint8Array(
203
+ analyser.frequencyBinCount
204
+ );
205
+ this.micWaveformData = new Uint8Array(
206
+ analyser.fftSize
207
+ );
208
+ const source = ctx.createMediaStreamSource(stream);
209
+ source.connect(analyser);
210
+ this.micSource = source;
211
+ this.setState({ micActive: true, micError: null });
212
+ this.options.onMicStart?.();
213
+ } catch (error) {
214
+ throw this.handleError("mic", error, "mic_error");
215
+ }
216
+ }
217
+ disableMic() {
218
+ const hadMic = Boolean(this.micStream || this.micSource || this.micAnalyser);
219
+ this.micStream?.getTracks().forEach((track) => track.stop());
220
+ this.micStream = null;
221
+ try {
222
+ this.micSource?.disconnect();
223
+ } catch {
224
+ }
225
+ this.micSource = null;
226
+ this.micAnalyser = null;
227
+ this.micData = null;
228
+ this.micWaveformData = null;
229
+ this.setState({ micActive: false });
230
+ if (hadMic) this.options.onMicStop?.();
231
+ }
232
+ getBands(source = "music") {
233
+ const data = this.readFrequencyData(source);
234
+ if (!data) return { ...ZERO };
235
+ return computeBands(data, this.classicRanges);
236
+ }
237
+ getFftData(source = "music") {
238
+ return this.readFrequencyData(source);
239
+ }
240
+ getWaveform(source = "music") {
241
+ return this.readWaveformData(source);
242
+ }
243
+ destroy() {
244
+ if (this.destroyed) return;
245
+ this.teardownMusic();
246
+ this.disableMic();
247
+ void this.ctx?.close();
248
+ this.ctx = null;
249
+ this.musicAnalyser = null;
250
+ this.musicData = null;
251
+ this.musicWaveformData = null;
252
+ this.setState({ isPlaying: false, micActive: false, hasTrack: false });
253
+ this.options = {};
254
+ this.destroyed = true;
255
+ }
256
+ readFrequencyData(source) {
257
+ if (source === "mic") {
258
+ if (!this.micAnalyser || !this.micData) return null;
259
+ return fillFrequencyData(this.micAnalyser, this.micData);
260
+ }
261
+ if (!this.musicAnalyser || !this.musicData) return null;
262
+ return fillFrequencyData(this.musicAnalyser, this.musicData);
263
+ }
264
+ readWaveformData(source) {
265
+ if (source === "mic") {
266
+ if (!this.micAnalyser || !this.micWaveformData) return null;
267
+ this.micAnalyser.getByteTimeDomainData(this.micWaveformData);
268
+ return this.micWaveformData;
269
+ }
270
+ if (!this.musicAnalyser || !this.musicWaveformData) return null;
271
+ this.musicAnalyser.getByteTimeDomainData(this.musicWaveformData);
272
+ return this.musicWaveformData;
273
+ }
274
+ ensureCtx() {
275
+ if (this.destroyed) {
276
+ throw new AudioBandsError(
277
+ "lifecycle",
278
+ "destroyed",
279
+ "This AudioBands instance was destroyed"
280
+ );
281
+ }
282
+ if (this.ctx) return this.ctx;
283
+ const Ctx = window.AudioContext || window.webkitAudioContext;
284
+ if (!Ctx) {
285
+ throw new AudioBandsError(
286
+ "lifecycle",
287
+ "unsupported_audio_context",
288
+ "AudioContext is not supported in this environment"
289
+ );
290
+ }
291
+ const ctx = new Ctx();
292
+ const analyser = this.createAnalyser(ctx, this.musicConfig);
293
+ analyser.connect(ctx.destination);
294
+ this.ctx = ctx;
295
+ this.musicAnalyser = analyser;
296
+ this.musicData = new Uint8Array(
297
+ analyser.frequencyBinCount
298
+ );
299
+ this.musicWaveformData = new Uint8Array(
300
+ analyser.fftSize
301
+ );
302
+ return ctx;
303
+ }
304
+ createAnalyser(ctx, config) {
305
+ const analyser = ctx.createAnalyser();
306
+ analyser.fftSize = config.fftSize;
307
+ analyser.smoothingTimeConstant = config.smoothingTimeConstant;
308
+ return analyser;
309
+ }
310
+ handleError(kind, error, fallbackCode = kind === "mic" ? "mic_error" : "load_error") {
311
+ const wrapped = error instanceof AudioBandsError ? error : new AudioBandsError(
312
+ kind,
313
+ fallbackCode,
314
+ kind === "mic" ? "Failed to access microphone input" : "Failed to load or play audio track",
315
+ error
316
+ );
317
+ if (kind === "load") {
318
+ this.setState({ isPlaying: false, loadError: wrapped });
319
+ this.options.onLoadError?.(wrapped);
320
+ } else {
321
+ this.setState({ micActive: false, micError: wrapped });
322
+ this.options.onMicError?.(wrapped);
323
+ }
324
+ this.options.onError?.(wrapped);
325
+ return wrapped;
326
+ }
327
+ setState(patch) {
328
+ let changed = false;
329
+ for (const [key, value] of Object.entries(patch)) {
330
+ if (this.state[key] !== value) {
331
+ this.state[key] = value;
332
+ changed = true;
333
+ }
334
+ }
335
+ if (changed) this.options.onStateChange?.(this.getState());
336
+ }
337
+ teardownMusic() {
338
+ this.audioEl?.pause();
339
+ if (this.audioEl) {
340
+ this.audioEl.src = "";
341
+ this.audioEl.load();
342
+ }
343
+ this.audioEl = null;
344
+ try {
345
+ this.musicSource?.disconnect();
346
+ } catch {
347
+ }
348
+ this.musicSource = null;
349
+ this.musicWaveformData = this.musicAnalyser ? new Uint8Array(this.musicAnalyser.fftSize) : null;
350
+ this.setState({ isPlaying: false, hasTrack: false });
351
+ }
352
+ };
353
+
354
+ export {
355
+ AudioBandsError,
356
+ AudioBands
357
+ };
358
+ //# sourceMappingURL=chunk-33JHLQZJ.js.map