@juandinella/audio-bands 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,19 +4,17 @@
4
4
 
5
5
  **Demo**: [audio-bands.juandinella.com](https://audio-bands.juandinella.com)
6
6
 
7
- Headless audio frequency analysis for the browser. Get real-time `bass`, `mid`, and `high` values normalized to `0–1` from a music track, a microphone, or both at the same time. No renderer included.
7
+ Headless audio analysis for the browser. Get normalized `bass`, `mid`, `high`, custom named bands, raw FFT bins, or mic waveform data without shipping a renderer.
8
8
 
9
9
  ```ts
10
10
  const { bass, mid, high } = audio.getBands();
11
- // bass: 0.73, mid: 0.41, high: 0.12
12
-
11
+ const custom = audio.getCustomBands();
13
12
  const fft = audio.getFftData();
14
- // Uint8Array(128) — raw frequency bins, 0–255 each
15
13
  ```
16
14
 
17
15
  ## Why
18
16
 
19
- Every audio visualization library either handles only playback (no analysis) or draws its own canvas and hides the data. This one only gives you numbers.
17
+ Most audio libraries either only play audio or immediately draw a canvas for you. This one stays lower level: it gives you usable analysis data and lets you decide how to render it.
20
18
 
21
19
  ## Install
22
20
 
@@ -24,87 +22,75 @@ Every audio visualization library either handles only playback (no analysis) or
24
22
  npm install @juandinella/audio-bands
25
23
  ```
26
24
 
27
- The root entrypoint is framework-agnostic. If you use the React hook, install `react` and import it from `@juandinella/audio-bands/react`.
25
+ ### Entry points
26
+
27
+ - `@juandinella/audio-bands`: main framework-agnostic export
28
+ - `@juandinella/audio-bands/core`: explicit core-only entry
29
+ - `@juandinella/audio-bands/react`: React hook
30
+
31
+ If you use the React hook, install `react` as well.
28
32
 
29
33
  ## Usage
30
34
 
31
35
  ### Vanilla JS
32
36
 
33
- Works in Vue, Svelte, plain HTML — anything.
34
-
35
- ```js
37
+ ```ts
36
38
  import { AudioBands } from '@juandinella/audio-bands';
37
39
 
38
40
  const audio = new AudioBands({
39
- onPlay: () => console.log('playing'),
40
- onPause: () => console.log('paused'),
41
- onError: () => console.error('failed to load'),
42
- onMicStart: () => console.log('mic on'),
43
- onMicStop: () => console.log('mic off'),
41
+ music: {
42
+ fftSize: 512,
43
+ smoothingTimeConstant: 0.7,
44
+ },
45
+ customBands: {
46
+ presence: { from: 0.25, to: 0.5 },
47
+ air: { from: 0.5, to: 1 },
48
+ },
49
+ onLoadError: (error) => console.error('track error', error),
50
+ onMicError: (error) => console.error('mic error', error),
44
51
  });
45
52
 
46
53
  await audio.load('/track.mp3');
47
54
 
48
- // Call inside your animation loop
49
55
  function loop() {
50
56
  const { bass, mid, high, overall } = audio.getBands();
51
- // drive your canvas, SVG, CSS, WebGL — whatever
57
+ const custom = audio.getCustomBands();
58
+ const fft = audio.getFftData();
52
59
 
53
- const fft = audio.getFftData(); // raw bins for spectrum visualizations
54
60
  requestAnimationFrame(loop);
55
61
  }
56
- requestAnimationFrame(loop);
57
62
 
58
- // Clean up when done
59
- audio.destroy();
63
+ requestAnimationFrame(loop);
60
64
  ```
61
65
 
62
66
  ### React hook
63
67
 
64
68
  ```tsx
65
69
  import { useAudioBands } from '@juandinella/audio-bands/react';
66
- import { useEffect, useRef } from 'react';
67
70
 
68
71
  function Visualizer() {
69
- const { loadTrack, togglePlayPause, toggleMic, getBands, isPlaying } =
70
- useAudioBands();
71
- const canvasRef = useRef<HTMLCanvasElement>(null);
72
-
73
- useEffect(() => {
74
- loadTrack('/track.mp3');
75
- }, []);
76
-
77
- useEffect(() => {
78
- const canvas = canvasRef.current!;
79
- const ctx = canvas.getContext('2d')!;
80
- let raf: number;
81
-
82
- function loop() {
83
- const { bass, mid, high } = getBands();
84
-
85
- ctx.clearRect(0, 0, canvas.width, canvas.height);
86
- ctx.beginPath();
87
- ctx.arc(
88
- canvas.width / 2,
89
- canvas.height / 2,
90
- 20 + bass * 80,
91
- 0,
92
- Math.PI * 2,
93
- );
94
- ctx.fill();
95
-
96
- raf = requestAnimationFrame(loop);
97
- }
98
-
99
- raf = requestAnimationFrame(loop);
100
- return () => cancelAnimationFrame(raf);
101
- }, [getBands]);
72
+ const {
73
+ isPlaying,
74
+ hasTrack,
75
+ loadError,
76
+ micError,
77
+ loadTrack,
78
+ togglePlayPause,
79
+ toggleMic,
80
+ getBands,
81
+ getCustomBands,
82
+ } = useAudioBands({
83
+ customBands: {
84
+ presence: { from: 0.25, to: 0.5 },
85
+ },
86
+ });
102
87
 
103
88
  return (
104
89
  <>
105
- <canvas ref={canvasRef} width={400} height={400} />
90
+ <button onClick={() => loadTrack('/track.mp3')}>load</button>
106
91
  <button onClick={togglePlayPause}>{isPlaying ? 'Pause' : 'Play'}</button>
107
92
  <button onClick={toggleMic}>Toggle mic</button>
93
+ <pre>{JSON.stringify({ hasTrack, loadError, micError, ...getBands(), ...getCustomBands() }, null, 2)}</pre>
108
94
  </>
109
95
  );
110
96
  }
@@ -113,93 +99,135 @@ function Visualizer() {
113
99
  ### Mic input
114
100
 
115
101
  ```ts
116
- // Enable mic — browser will ask for permission
117
102
  await audio.enableMic();
118
103
 
119
- // Get frequency bands from the mic
120
- const { bass } = audio.getBands('mic');
104
+ const micBands = audio.getBands('mic');
105
+ const micCustomBands = audio.getCustomBands('mic');
106
+ const waveform = audio.getWaveform();
107
+ ```
121
108
 
122
- // Get raw waveform data (time-domain)
123
- const waveform = audio.getWaveform(); // Uint8Array | null
109
+ ## When To Use Bands Vs FFT
124
110
 
125
- // Disable mic and stop the stream
126
- audio.disableMic();
127
- ```
111
+ Use `getBands()` when you want stable, simple control signals:
112
+
113
+ - pulsing a blob with low-end energy
114
+ - scaling UI based on overall intensity
115
+ - animating typography or CSS variables
116
+ - driving scenes where three broad zones are enough
117
+
118
+ Use `getCustomBands()` when the default bass/mid/high split is too coarse, but you still want named, high-level buckets:
119
+
120
+ - separate `presence`, `air`, or `sub`
121
+ - tune bands to your own design system or animation logic
122
+ - keep your render code semantic instead of index-based
123
+
124
+ Use `getFftData()` when you need bin-level detail:
125
+
126
+ - bar visualizers
127
+ - line spectrums
128
+ - log interpolation
129
+ - any renderer that maps directly over bins
130
+
131
+ Rule of thumb:
132
+
133
+ - `getBands()` for product UI
134
+ - `getCustomBands()` for art direction
135
+ - `getFftData()` for visualizers
128
136
 
129
137
  ## API
130
138
 
131
- ### `AudioBands` (vanilla JS)
139
+ ### `AudioBands`
132
140
 
133
141
  ```ts
134
- new AudioBands(callbacks?: AudioBandsCallbacks)
142
+ new AudioBands(options?: AudioBandsOptions)
135
143
  ```
136
144
 
137
- | Method | Description |
138
- | --------------------- | --------------------------------------------------------------------------------------------- |
139
- | `load(url)` | Load and play an audio file. Resolves when playback starts. |
140
- | `togglePlayPause()` | Toggle playback. |
141
- | `enableMic()` | Request mic access and start analysis. |
142
- | `disableMic()` | Stop mic stream and clean up. |
143
- | `getBands(source?)` | Returns `Bands` for `'music'` (default) or `'mic'`. Call inside RAF. |
144
- | `getFftData(source?)` | Returns raw `Uint8Array` of frequency bins (0–255) for `'music'` or `'mic'`. Call inside RAF. |
145
- | `getWaveform()` | Returns raw time-domain `Uint8Array` from mic. Call inside RAF. |
146
- | `destroy()` | Stop playback, release mic, close AudioContext. |
145
+ #### Methods
147
146
 
148
- ### `useAudioBands()` (React)
147
+ | Method | Description |
148
+ | ----------------------- | ----------- |
149
+ | `load(url)` | Load and play a track. Rejects with `AudioBandsError` on failure. |
150
+ | `togglePlayPause()` | Toggle the current track. |
151
+ | `enableMic()` | Request microphone access and start mic analysis. Rejects with `AudioBandsError` on failure. |
152
+ | `disableMic()` | Stop mic input and clean up the stream. |
153
+ | `getBands(source?)` | Returns normalized `{ bass, mid, high, overall }`. |
154
+ | `getCustomBands(source?)` | Returns normalized values for configured custom bands. |
155
+ | `getFftData(source?)` | Returns raw `Uint8Array` frequency bins. |
156
+ | `getWaveform()` | Returns raw mic time-domain data. |
157
+ | `getState()` | Returns the current playback/mic/error state. |
158
+ | `destroy()` | Stop playback, release the mic and close the `AudioContext`. |
149
159
 
150
- Same capabilities as `AudioBands`. `destroy()` is called automatically on unmount.
160
+ ### `useAudioBands()`
151
161
 
152
162
  ```ts
153
163
  const {
154
164
  isPlaying,
155
165
  micActive,
166
+ hasTrack,
156
167
  audioError,
168
+ loadError,
169
+ micError,
170
+ state,
157
171
  loadTrack,
158
172
  togglePlayPause,
159
173
  toggleMic,
160
174
  getBands,
175
+ getCustomBands,
161
176
  getFftData,
162
177
  getWaveform,
163
- } = useAudioBands();
178
+ } = useAudioBands(options);
164
179
  ```
165
180
 
166
- Import it from:
181
+ ### `AudioBandsOptions`
167
182
 
168
183
  ```ts
169
- import { useAudioBands } from '@juandinella/audio-bands/react';
170
- ```
171
-
172
- ### `Bands`
173
-
174
- ```ts
175
- type Bands = {
176
- bass: number; // 0–1 — low frequencies (0–8% of spectrum)
177
- mid: number; // 0–1 — mid frequencies (8–40%)
178
- high: number; // 0–1 — high frequencies (40–100%)
179
- overall: number; // 0–1 — weighted mix: bass×0.5 + mid×0.3 + high×0.2
184
+ type AudioBandsOptions = {
185
+ music?: {
186
+ fftSize?: number;
187
+ smoothingTimeConstant?: number;
188
+ };
189
+ mic?: {
190
+ fftSize?: number;
191
+ smoothingTimeConstant?: number;
192
+ };
193
+ bandRanges?: {
194
+ bass?: { from: number; to: number };
195
+ mid?: { from: number; to: number };
196
+ high?: { from: number; to: number };
197
+ };
198
+ customBands?: Record<string, { from: number; to: number }>;
199
+ onError?: (error: AudioBandsError) => void;
200
+ onLoadError?: (error: AudioBandsError) => void;
201
+ onMicError?: (error: AudioBandsError) => void;
202
+ onStateChange?: (state: AudioBandsState) => void;
203
+ onPlay?: () => void;
204
+ onPause?: () => void;
205
+ onMicStart?: () => void;
206
+ onMicStop?: () => void;
180
207
  };
181
208
  ```
182
209
 
183
- ### `AudioBandsCallbacks`
210
+ ### `AudioBandsState`
184
211
 
185
212
  ```ts
186
- type AudioBandsCallbacks = {
187
- onPlay?: () => void;
188
- onPause?: () => void;
189
- onError?: (error?: unknown) => void;
190
- onMicStart?: () => void;
191
- onMicStop?: () => void;
213
+ type AudioBandsState = {
214
+ isPlaying: boolean;
215
+ micActive: boolean;
216
+ hasTrack: boolean; // a track source is assigned, even if playback later fails
217
+ loadError: AudioBandsError | null;
218
+ micError: AudioBandsError | null;
192
219
  };
193
220
  ```
194
221
 
195
222
  ## Notes
196
223
 
197
- - `AudioContext` is created lazily on the first call to `load()` or `enableMic()`. Browsers require a user gesture before audio can start.
198
- - The root package export does not depend on React. The React hook lives at `@juandinella/audio-bands/react`.
199
- - The mic analyser is **not** connected to `AudioContext.destination`, so there is no feedback loop.
200
- - `getBands()`, `getFftData()`, and `getWaveform()` read live data from the audio graph. Call them inside `requestAnimationFrame`, not in response to React state.
201
- - `getFftData()` returns the same underlying buffer on every call. Copy it if you need to compare frames: `Array.from(fft)`.
202
- - `load()` and `enableMic()` reject on browser playback/permission errors. Use `try/catch` if you need custom handling.
224
+ - `AudioContext` is created lazily on the first call to `load()` or `enableMic()`.
225
+ - `hasTrack` means a track source is currently assigned to the instance. It can still be `true` if `play()` fails due to autoplay policy or another playback error.
226
+ - The mic analyser is not connected to `AudioContext.destination`, so it will not feed back into the speakers.
227
+ - `getBands()`, `getCustomBands()`, `getFftData()`, and `getWaveform()` read live data. Call them inside `requestAnimationFrame`, not from React state updates.
228
+ - `getFftData()` returns the same underlying buffer on each call. Copy it if you need frame-to-frame comparisons.
229
+ - `fftSize` must be a power of two between `32` and `32768`.
230
+ - Band ranges are normalized from `0` to `1`, where `0` is the start of the analyser spectrum and `1` is the end.
203
231
 
204
232
  ## License
205
233
 
@@ -0,0 +1,344 @@
1
+ // src/errors.ts
2
+ var AudioBandsError = class extends Error {
3
+ constructor(kind, code, message, cause) {
4
+ super(message);
5
+ this.name = "AudioBandsError";
6
+ this.kind = kind;
7
+ this.code = code;
8
+ this.cause = cause;
9
+ }
10
+ };
11
+
12
+ // src/core.ts
13
+ var DEFAULT_MUSIC_ANALYSER = {
14
+ fftSize: 256,
15
+ smoothingTimeConstant: 0.85
16
+ };
17
+ var DEFAULT_MIC_ANALYSER = {
18
+ fftSize: 256,
19
+ smoothingTimeConstant: 0.8
20
+ };
21
+ var DEFAULT_CLASSIC_RANGES = {
22
+ bass: { from: 0, to: 0.08 },
23
+ mid: { from: 0.08, to: 0.4 },
24
+ high: { from: 0.4, to: 1 }
25
+ };
26
+ var ZERO = { bass: 0, mid: 0, high: 0, overall: 0 };
27
+ function avg(arr, from, to) {
28
+ let sum = 0;
29
+ for (let i = from; i < to; i++) sum += arr[i];
30
+ return sum / (to - from);
31
+ }
32
+ function isPowerOfTwo(value) {
33
+ return (value & value - 1) === 0;
34
+ }
35
+ function normalizeAnalyserConfig(config, fallback) {
36
+ const fftSize = config?.fftSize ?? fallback.fftSize;
37
+ const smoothingTimeConstant = config?.smoothingTimeConstant ?? fallback.smoothingTimeConstant;
38
+ if (!Number.isInteger(fftSize) || fftSize < 32 || fftSize > 32768 || !isPowerOfTwo(fftSize)) {
39
+ throw new AudioBandsError(
40
+ "config",
41
+ "invalid_config",
42
+ "fftSize must be a power of two between 32 and 32768"
43
+ );
44
+ }
45
+ if (typeof smoothingTimeConstant !== "number" || smoothingTimeConstant < 0 || smoothingTimeConstant > 1) {
46
+ throw new AudioBandsError(
47
+ "config",
48
+ "invalid_config",
49
+ "smoothingTimeConstant must be between 0 and 1"
50
+ );
51
+ }
52
+ return { fftSize, smoothingTimeConstant };
53
+ }
54
+ function normalizeRange(name, range) {
55
+ const normalized = range ?? DEFAULT_CLASSIC_RANGES[name];
56
+ if (typeof normalized?.from !== "number" || typeof normalized?.to !== "number" || normalized.from < 0 || normalized.to > 1 || normalized.from >= normalized.to) {
57
+ throw new AudioBandsError(
58
+ "config",
59
+ "invalid_config",
60
+ `Band range "${name}" must satisfy 0 <= from < to <= 1`
61
+ );
62
+ }
63
+ return normalized;
64
+ }
65
+ function normalizeClassicRanges(ranges) {
66
+ return {
67
+ bass: normalizeRange("bass", ranges?.bass),
68
+ mid: normalizeRange("mid", ranges?.mid),
69
+ high: normalizeRange("high", ranges?.high)
70
+ };
71
+ }
72
+ function normalizeCustomBands(customBands) {
73
+ if (!customBands) return {};
74
+ return Object.fromEntries(
75
+ Object.entries(customBands).map(([name, range]) => [name, normalizeRange(name, range)])
76
+ );
77
+ }
78
+ function getIndexes(len, range) {
79
+ const from = Math.max(0, Math.min(len - 1, Math.floor(len * range.from)));
80
+ const to = Math.max(from + 1, Math.min(len, Math.floor(len * range.to)));
81
+ return [from, to];
82
+ }
83
+ function getRangeValue(data, range) {
84
+ const [from, to] = getIndexes(data.length, range);
85
+ return avg(data, from, to) / 255;
86
+ }
87
+ function fillFrequencyData(analyser, data) {
88
+ analyser.getByteFrequencyData(data);
89
+ return data;
90
+ }
91
+ function computeBands(data, ranges) {
92
+ const bass = getRangeValue(data, ranges.bass);
93
+ const mid = getRangeValue(data, ranges.mid);
94
+ const high = getRangeValue(data, ranges.high);
95
+ return {
96
+ bass,
97
+ mid,
98
+ high,
99
+ overall: bass * 0.5 + mid * 0.3 + high * 0.2
100
+ };
101
+ }
102
+ function computeCustomBands(data, ranges) {
103
+ return Object.fromEntries(
104
+ Object.entries(ranges).map(([name, range]) => [name, getRangeValue(data, range)])
105
+ );
106
+ }
107
+ function cloneState(state) {
108
+ return { ...state };
109
+ }
110
+ var AudioBands = class {
111
+ constructor(options = {}) {
112
+ this.state = {
113
+ isPlaying: false,
114
+ micActive: false,
115
+ hasTrack: false,
116
+ loadError: null,
117
+ micError: null
118
+ };
119
+ this.ctx = null;
120
+ this.musicAnalyser = null;
121
+ this.musicData = null;
122
+ this.micAnalyser = null;
123
+ this.micData = null;
124
+ this.micWaveformData = null;
125
+ this.audioEl = null;
126
+ this.musicSource = null;
127
+ this.micSource = null;
128
+ this.micStream = null;
129
+ this.destroyed = false;
130
+ this.options = options;
131
+ this.musicConfig = normalizeAnalyserConfig(options.music, DEFAULT_MUSIC_ANALYSER);
132
+ this.micConfig = normalizeAnalyserConfig(options.mic, DEFAULT_MIC_ANALYSER);
133
+ this.classicRanges = normalizeClassicRanges(options.bandRanges);
134
+ this.customBandRanges = normalizeCustomBands(options.customBands);
135
+ }
136
+ getState() {
137
+ return cloneState(this.state);
138
+ }
139
+ getCustomBands(source = "music") {
140
+ const data = this.readFrequencyData(source);
141
+ if (!data) return computeCustomBands(new Uint8Array(1), this.customBandRanges);
142
+ return computeCustomBands(data, this.customBandRanges);
143
+ }
144
+ async load(url) {
145
+ let ctx;
146
+ try {
147
+ ctx = this.ensureCtx();
148
+ } catch (error) {
149
+ throw this.handleError("load", error);
150
+ }
151
+ this.teardownMusic();
152
+ const audio = new Audio();
153
+ audio.crossOrigin = "anonymous";
154
+ audio.src = url;
155
+ audio.loop = true;
156
+ this.audioEl = audio;
157
+ this.setState({ hasTrack: true, loadError: null });
158
+ const source = ctx.createMediaElementSource(audio);
159
+ source.connect(this.musicAnalyser);
160
+ this.musicSource = source;
161
+ try {
162
+ await audio.play();
163
+ this.setState({ isPlaying: true, loadError: null });
164
+ this.options.onPlay?.();
165
+ } catch (error) {
166
+ throw this.handleError("load", error, "load_error");
167
+ }
168
+ }
169
+ togglePlayPause() {
170
+ const audio = this.audioEl;
171
+ if (!audio) return;
172
+ if (audio.paused) {
173
+ void audio.play().then(() => {
174
+ this.setState({ isPlaying: true, loadError: null });
175
+ this.options.onPlay?.();
176
+ }).catch((error) => {
177
+ this.handleError("load", error, "playback_error");
178
+ });
179
+ return;
180
+ }
181
+ audio.pause();
182
+ this.setState({ isPlaying: false });
183
+ this.options.onPause?.();
184
+ }
185
+ async enableMic() {
186
+ let ctx;
187
+ try {
188
+ ctx = this.ensureCtx();
189
+ } catch (error) {
190
+ throw this.handleError("mic", error);
191
+ }
192
+ if (this.micStream) return;
193
+ try {
194
+ const stream = await navigator.mediaDevices.getUserMedia({
195
+ audio: true,
196
+ video: false
197
+ });
198
+ this.micStream = stream;
199
+ const analyser = this.createAnalyser(ctx, this.micConfig);
200
+ this.micAnalyser = analyser;
201
+ this.micData = new Uint8Array(
202
+ analyser.frequencyBinCount
203
+ );
204
+ this.micWaveformData = new Uint8Array(
205
+ analyser.fftSize
206
+ );
207
+ const source = ctx.createMediaStreamSource(stream);
208
+ source.connect(analyser);
209
+ this.micSource = source;
210
+ this.setState({ micActive: true, micError: null });
211
+ this.options.onMicStart?.();
212
+ } catch (error) {
213
+ throw this.handleError("mic", error, "mic_error");
214
+ }
215
+ }
216
+ disableMic() {
217
+ const hadMic = Boolean(this.micStream || this.micSource || this.micAnalyser);
218
+ this.micStream?.getTracks().forEach((track) => track.stop());
219
+ this.micStream = null;
220
+ try {
221
+ this.micSource?.disconnect();
222
+ } catch {
223
+ }
224
+ this.micSource = null;
225
+ this.micAnalyser = null;
226
+ this.micData = null;
227
+ this.micWaveformData = null;
228
+ this.setState({ micActive: false });
229
+ if (hadMic) this.options.onMicStop?.();
230
+ }
231
+ getBands(source = "music") {
232
+ const data = this.readFrequencyData(source);
233
+ if (!data) return { ...ZERO };
234
+ return computeBands(data, this.classicRanges);
235
+ }
236
+ getFftData(source = "music") {
237
+ return this.readFrequencyData(source);
238
+ }
239
+ getWaveform() {
240
+ if (!this.micAnalyser || !this.micWaveformData) return null;
241
+ this.micAnalyser.getByteTimeDomainData(this.micWaveformData);
242
+ return this.micWaveformData;
243
+ }
244
+ destroy() {
245
+ if (this.destroyed) return;
246
+ this.teardownMusic();
247
+ this.disableMic();
248
+ void this.ctx?.close();
249
+ this.ctx = null;
250
+ this.musicAnalyser = null;
251
+ this.musicData = null;
252
+ this.setState({ isPlaying: false, micActive: false, hasTrack: false });
253
+ this.options = {};
254
+ this.destroyed = true;
255
+ }
256
+ readFrequencyData(source) {
257
+ if (source === "mic") {
258
+ if (!this.micAnalyser || !this.micData) return null;
259
+ return fillFrequencyData(this.micAnalyser, this.micData);
260
+ }
261
+ if (!this.musicAnalyser || !this.musicData) return null;
262
+ return fillFrequencyData(this.musicAnalyser, this.musicData);
263
+ }
264
+ ensureCtx() {
265
+ if (this.destroyed) {
266
+ throw new AudioBandsError(
267
+ "lifecycle",
268
+ "destroyed",
269
+ "This AudioBands instance was destroyed"
270
+ );
271
+ }
272
+ if (this.ctx) return this.ctx;
273
+ const Ctx = window.AudioContext || window.webkitAudioContext;
274
+ if (!Ctx) {
275
+ throw new AudioBandsError(
276
+ "lifecycle",
277
+ "unsupported_audio_context",
278
+ "AudioContext is not supported in this environment"
279
+ );
280
+ }
281
+ const ctx = new Ctx();
282
+ const analyser = this.createAnalyser(ctx, this.musicConfig);
283
+ analyser.connect(ctx.destination);
284
+ this.ctx = ctx;
285
+ this.musicAnalyser = analyser;
286
+ this.musicData = new Uint8Array(
287
+ analyser.frequencyBinCount
288
+ );
289
+ return ctx;
290
+ }
291
+ createAnalyser(ctx, config) {
292
+ const analyser = ctx.createAnalyser();
293
+ analyser.fftSize = config.fftSize;
294
+ analyser.smoothingTimeConstant = config.smoothingTimeConstant;
295
+ return analyser;
296
+ }
297
+ handleError(kind, error, fallbackCode = kind === "mic" ? "mic_error" : "load_error") {
298
+ const wrapped = error instanceof AudioBandsError ? error : new AudioBandsError(
299
+ kind,
300
+ fallbackCode,
301
+ kind === "mic" ? "Failed to access microphone input" : "Failed to load or play audio track",
302
+ error
303
+ );
304
+ if (kind === "load") {
305
+ this.setState({ isPlaying: false, loadError: wrapped });
306
+ this.options.onLoadError?.(wrapped);
307
+ } else {
308
+ this.setState({ micActive: false, micError: wrapped });
309
+ this.options.onMicError?.(wrapped);
310
+ }
311
+ this.options.onError?.(wrapped);
312
+ return wrapped;
313
+ }
314
+ setState(patch) {
315
+ let changed = false;
316
+ for (const [key, value] of Object.entries(patch)) {
317
+ if (this.state[key] !== value) {
318
+ this.state[key] = value;
319
+ changed = true;
320
+ }
321
+ }
322
+ if (changed) this.options.onStateChange?.(this.getState());
323
+ }
324
+ teardownMusic() {
325
+ this.audioEl?.pause();
326
+ if (this.audioEl) {
327
+ this.audioEl.src = "";
328
+ this.audioEl.load();
329
+ }
330
+ this.audioEl = null;
331
+ try {
332
+ this.musicSource?.disconnect();
333
+ } catch {
334
+ }
335
+ this.musicSource = null;
336
+ this.setState({ isPlaying: false, hasTrack: false });
337
+ }
338
+ };
339
+
340
+ export {
341
+ AudioBandsError,
342
+ AudioBands
343
+ };
344
+ //# sourceMappingURL=chunk-UAMH5Y33.js.map