@juandinella/audio-bands 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +187 -0
- package/dist/index.cjs +235 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +63 -0
- package/dist/index.d.ts +63 -0
- package/dist/index.js +207 -0
- package/dist/index.js.map +1 -0
- package/package.json +44 -0
package/README.md
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
# audio-bands
|
|
2
|
+
|
|
3
|
+
Headless audio frequency analysis for the browser. Get real-time `bass`, `mid`, and `high` values normalized to `0–1` from a music track, a microphone, or both at the same time. No renderer included.
|
|
4
|
+
|
|
5
|
+
```ts
|
|
6
|
+
const { bass, mid, high } = audio.getBands();
|
|
7
|
+
// bass: 0.73, mid: 0.41, high: 0.12
|
|
8
|
+
|
|
9
|
+
const fft = audio.getFftData();
|
|
10
|
+
// Uint8Array(128) — raw frequency bins, 0–255 each
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Why
|
|
14
|
+
|
|
15
|
+
Every audio visualization library either handles only playback (no analysis) or draws its own canvas and hides the data. This one only gives you numbers.
|
|
16
|
+
|
|
17
|
+
## Install
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
npm install @juandinella/audio-bands
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
React is an optional peer dependency. The core class works in any framework or plain HTML.
|
|
24
|
+
|
|
25
|
+
## Usage
|
|
26
|
+
|
|
27
|
+
### Vanilla JS
|
|
28
|
+
|
|
29
|
+
Works in Vue, Svelte, plain HTML — anything.
|
|
30
|
+
|
|
31
|
+
```js
|
|
32
|
+
import { AudioBands } from '@juandinella/audio-bands';
|
|
33
|
+
|
|
34
|
+
const audio = new AudioBands({
|
|
35
|
+
onPlay: () => console.log('playing'),
|
|
36
|
+
onPause: () => console.log('paused'),
|
|
37
|
+
onError: () => console.error('failed to load'),
|
|
38
|
+
onMicStart: () => console.log('mic on'),
|
|
39
|
+
onMicStop: () => console.log('mic off'),
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
await audio.load('/track.mp3');
|
|
43
|
+
|
|
44
|
+
// Call inside your animation loop
|
|
45
|
+
function loop() {
|
|
46
|
+
const { bass, mid, high, overall } = audio.getBands();
|
|
47
|
+
// drive your canvas, SVG, CSS, WebGL — whatever
|
|
48
|
+
|
|
49
|
+
const fft = audio.getFftData(); // raw bins for spectrum visualizations
|
|
50
|
+
requestAnimationFrame(loop);
|
|
51
|
+
}
|
|
52
|
+
requestAnimationFrame(loop);
|
|
53
|
+
|
|
54
|
+
// Clean up when done
|
|
55
|
+
audio.destroy();
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### React hook
|
|
59
|
+
|
|
60
|
+
```tsx
|
|
61
|
+
import { useAudioBands } from '@juandinella/audio-bands';
|
|
62
|
+
import { useEffect, useRef } from 'react';
|
|
63
|
+
|
|
64
|
+
function Visualizer() {
|
|
65
|
+
const { loadTrack, togglePlayPause, toggleMic, getBands, isPlaying } = useAudioBands();
|
|
66
|
+
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
67
|
+
|
|
68
|
+
useEffect(() => {
|
|
69
|
+
loadTrack('/track.mp3');
|
|
70
|
+
}, []);
|
|
71
|
+
|
|
72
|
+
useEffect(() => {
|
|
73
|
+
const canvas = canvasRef.current!;
|
|
74
|
+
const ctx = canvas.getContext('2d')!;
|
|
75
|
+
let raf: number;
|
|
76
|
+
|
|
77
|
+
function loop() {
|
|
78
|
+
const { bass, mid, high } = getBands();
|
|
79
|
+
|
|
80
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
81
|
+
ctx.beginPath();
|
|
82
|
+
ctx.arc(canvas.width / 2, canvas.height / 2, 20 + bass * 80, 0, Math.PI * 2);
|
|
83
|
+
ctx.fill();
|
|
84
|
+
|
|
85
|
+
raf = requestAnimationFrame(loop);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
raf = requestAnimationFrame(loop);
|
|
89
|
+
return () => cancelAnimationFrame(raf);
|
|
90
|
+
}, [getBands]);
|
|
91
|
+
|
|
92
|
+
return (
|
|
93
|
+
<>
|
|
94
|
+
<canvas ref={canvasRef} width={400} height={400} />
|
|
95
|
+
<button onClick={togglePlayPause}>{isPlaying ? 'Pause' : 'Play'}</button>
|
|
96
|
+
<button onClick={toggleMic}>Toggle mic</button>
|
|
97
|
+
</>
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
### Mic input
|
|
103
|
+
|
|
104
|
+
```ts
|
|
105
|
+
// Enable mic — browser will ask for permission
|
|
106
|
+
await audio.enableMic();
|
|
107
|
+
|
|
108
|
+
// Get frequency bands from the mic
|
|
109
|
+
const { bass } = audio.getBands('mic');
|
|
110
|
+
|
|
111
|
+
// Get raw waveform data (time-domain)
|
|
112
|
+
const waveform = audio.getWaveform(); // Uint8Array | null
|
|
113
|
+
|
|
114
|
+
// Disable mic and stop the stream
|
|
115
|
+
audio.disableMic();
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## API
|
|
119
|
+
|
|
120
|
+
### `AudioBands` (vanilla JS)
|
|
121
|
+
|
|
122
|
+
```ts
|
|
123
|
+
new AudioBands(callbacks?: AudioBandsCallbacks)
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
| Method | Description |
|
|
127
|
+
|---|---|
|
|
128
|
+
| `load(url)` | Load and play an audio file. Resolves when playback starts. |
|
|
129
|
+
| `togglePlayPause()` | Toggle playback. |
|
|
130
|
+
| `enableMic()` | Request mic access and start analysis. |
|
|
131
|
+
| `disableMic()` | Stop mic stream and clean up. |
|
|
132
|
+
| `getBands(source?)` | Returns `Bands` for `'music'` (default) or `'mic'`. Call inside RAF. |
|
|
133
|
+
| `getFftData(source?)` | Returns raw `Uint8Array` of frequency bins (0–255) for `'music'` or `'mic'`. Call inside RAF. |
|
|
134
|
+
| `getWaveform()` | Returns raw time-domain `Uint8Array` from mic. Call inside RAF. |
|
|
135
|
+
| `destroy()` | Stop playback, release mic, close AudioContext. |
|
|
136
|
+
|
|
137
|
+
### `useAudioBands()` (React)
|
|
138
|
+
|
|
139
|
+
Same capabilities as `AudioBands`. `destroy()` is called automatically on unmount.
|
|
140
|
+
|
|
141
|
+
```ts
|
|
142
|
+
const {
|
|
143
|
+
isPlaying,
|
|
144
|
+
micActive,
|
|
145
|
+
audioError,
|
|
146
|
+
loadTrack,
|
|
147
|
+
togglePlayPause,
|
|
148
|
+
toggleMic,
|
|
149
|
+
getBands,
|
|
150
|
+
getFftData,
|
|
151
|
+
getWaveform,
|
|
152
|
+
} = useAudioBands();
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
### `Bands`
|
|
156
|
+
|
|
157
|
+
```ts
|
|
158
|
+
type Bands = {
|
|
159
|
+
bass: number; // 0–1 — low frequencies (0–8% of spectrum)
|
|
160
|
+
mid: number; // 0–1 — mid frequencies (8–40%)
|
|
161
|
+
high: number; // 0–1 — high frequencies (40–100%)
|
|
162
|
+
overall: number; // 0–1 — weighted mix: bass×0.5 + mid×0.3 + high×0.2
|
|
163
|
+
};
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### `AudioBandsCallbacks`
|
|
167
|
+
|
|
168
|
+
```ts
|
|
169
|
+
type AudioBandsCallbacks = {
|
|
170
|
+
onPlay?: () => void;
|
|
171
|
+
onPause?: () => void;
|
|
172
|
+
onError?: () => void;
|
|
173
|
+
onMicStart?: () => void;
|
|
174
|
+
onMicStop?: () => void;
|
|
175
|
+
};
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
## Notes
|
|
179
|
+
|
|
180
|
+
- `AudioContext` is created lazily on the first call to `load()` or `enableMic()`. Browsers require a user gesture before audio can start.
|
|
181
|
+
- The mic analyser is **not** connected to `AudioContext.destination`, so there is no feedback loop.
|
|
182
|
+
- `getBands()`, `getFftData()`, and `getWaveform()` read live data from the audio graph. Call them inside `requestAnimationFrame`, not in response to React state.
|
|
183
|
+
- `getFftData()` returns the same underlying buffer on every call. Copy it if you need to compare frames: `Array.from(fft)`.
|
|
184
|
+
|
|
185
|
+
## License
|
|
186
|
+
|
|
187
|
+
MIT
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
AudioBands: () => AudioBands,
|
|
24
|
+
useAudioBands: () => useAudioBands
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(index_exports);
|
|
27
|
+
|
|
28
|
+
// src/core.ts
|
|
29
|
+
var ZERO = { bass: 0, mid: 0, high: 0, overall: 0 };
|
|
30
|
+
function avg(arr, from, to) {
|
|
31
|
+
let sum = 0;
|
|
32
|
+
for (let i = from; i < to; i++) sum += arr[i];
|
|
33
|
+
return sum / (to - from);
|
|
34
|
+
}
|
|
35
|
+
function computeBands(analyser, data) {
|
|
36
|
+
analyser.getByteFrequencyData(data);
|
|
37
|
+
const len = data.length;
|
|
38
|
+
const bass = avg(data, 0, Math.floor(len * 0.08));
|
|
39
|
+
const mid = avg(data, Math.floor(len * 0.08), Math.floor(len * 0.4));
|
|
40
|
+
const high = avg(data, Math.floor(len * 0.4), len);
|
|
41
|
+
return {
|
|
42
|
+
bass: bass / 255,
|
|
43
|
+
mid: mid / 255,
|
|
44
|
+
high: high / 255,
|
|
45
|
+
overall: (bass * 0.5 + mid * 0.3 + high * 0.2) / 255
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
var AudioBands = class {
|
|
49
|
+
constructor(callbacks = {}) {
|
|
50
|
+
this.ctx = null;
|
|
51
|
+
this.musicAnalyser = null;
|
|
52
|
+
this.musicData = null;
|
|
53
|
+
this.micAnalyser = null;
|
|
54
|
+
this.micData = null;
|
|
55
|
+
this.micWaveformData = null;
|
|
56
|
+
this.audioEl = null;
|
|
57
|
+
this.musicSource = null;
|
|
58
|
+
this.micSource = null;
|
|
59
|
+
this.micStream = null;
|
|
60
|
+
this.callbacks = callbacks;
|
|
61
|
+
}
|
|
62
|
+
// Lazy — AudioContext must be created after a user gesture
|
|
63
|
+
ensureCtx() {
|
|
64
|
+
if (this.ctx) return this.ctx;
|
|
65
|
+
const Ctx = window.AudioContext || window.webkitAudioContext;
|
|
66
|
+
const ctx = new Ctx();
|
|
67
|
+
const analyser = ctx.createAnalyser();
|
|
68
|
+
analyser.fftSize = 256;
|
|
69
|
+
analyser.smoothingTimeConstant = 0.85;
|
|
70
|
+
analyser.connect(ctx.destination);
|
|
71
|
+
this.ctx = ctx;
|
|
72
|
+
this.musicAnalyser = analyser;
|
|
73
|
+
this.musicData = new Uint8Array(analyser.frequencyBinCount);
|
|
74
|
+
return ctx;
|
|
75
|
+
}
|
|
76
|
+
async load(url) {
|
|
77
|
+
const ctx = this.ensureCtx();
|
|
78
|
+
this.audioEl?.pause();
|
|
79
|
+
if (this.audioEl) this.audioEl.src = "";
|
|
80
|
+
try {
|
|
81
|
+
this.musicSource?.disconnect();
|
|
82
|
+
} catch {
|
|
83
|
+
}
|
|
84
|
+
const audio = new Audio();
|
|
85
|
+
audio.crossOrigin = "anonymous";
|
|
86
|
+
audio.src = url;
|
|
87
|
+
audio.loop = true;
|
|
88
|
+
this.audioEl = audio;
|
|
89
|
+
const source = ctx.createMediaElementSource(audio);
|
|
90
|
+
source.connect(this.musicAnalyser);
|
|
91
|
+
this.musicSource = source;
|
|
92
|
+
try {
|
|
93
|
+
await audio.play();
|
|
94
|
+
this.callbacks.onPlay?.();
|
|
95
|
+
} catch {
|
|
96
|
+
this.callbacks.onError?.();
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
togglePlayPause() {
|
|
100
|
+
const audio = this.audioEl;
|
|
101
|
+
if (!audio) return;
|
|
102
|
+
if (audio.paused) {
|
|
103
|
+
audio.play();
|
|
104
|
+
this.callbacks.onPlay?.();
|
|
105
|
+
} else {
|
|
106
|
+
audio.pause();
|
|
107
|
+
this.callbacks.onPause?.();
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
async enableMic() {
|
|
111
|
+
const ctx = this.ensureCtx();
|
|
112
|
+
try {
|
|
113
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
|
|
114
|
+
this.micStream = stream;
|
|
115
|
+
const analyser = ctx.createAnalyser();
|
|
116
|
+
analyser.fftSize = 256;
|
|
117
|
+
analyser.smoothingTimeConstant = 0.8;
|
|
118
|
+
this.micAnalyser = analyser;
|
|
119
|
+
this.micData = new Uint8Array(analyser.frequencyBinCount);
|
|
120
|
+
this.micWaveformData = new Uint8Array(analyser.fftSize);
|
|
121
|
+
const source = ctx.createMediaStreamSource(stream);
|
|
122
|
+
source.connect(analyser);
|
|
123
|
+
this.micSource = source;
|
|
124
|
+
this.callbacks.onMicStart?.();
|
|
125
|
+
} catch {
|
|
126
|
+
console.warn("[audio-bands] Mic access denied");
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
disableMic() {
|
|
130
|
+
this.micStream?.getTracks().forEach((t) => t.stop());
|
|
131
|
+
this.micStream = null;
|
|
132
|
+
try {
|
|
133
|
+
this.micSource?.disconnect();
|
|
134
|
+
} catch {
|
|
135
|
+
}
|
|
136
|
+
this.micSource = null;
|
|
137
|
+
this.micAnalyser = null;
|
|
138
|
+
this.micData = null;
|
|
139
|
+
this.micWaveformData = null;
|
|
140
|
+
this.callbacks.onMicStop?.();
|
|
141
|
+
}
|
|
142
|
+
// Call inside requestAnimationFrame to get current frequency data
|
|
143
|
+
getBands(source = "music") {
|
|
144
|
+
if (source === "mic") {
|
|
145
|
+
if (!this.micAnalyser || !this.micData) return { ...ZERO };
|
|
146
|
+
return computeBands(this.micAnalyser, this.micData);
|
|
147
|
+
}
|
|
148
|
+
if (!this.musicAnalyser || !this.musicData) return { ...ZERO };
|
|
149
|
+
return computeBands(this.musicAnalyser, this.musicData);
|
|
150
|
+
}
|
|
151
|
+
// Call inside requestAnimationFrame to get raw FFT frequency bins (0–255 per bin)
|
|
152
|
+
getFftData(source = "music") {
|
|
153
|
+
if (source === "mic") {
|
|
154
|
+
if (!this.micAnalyser || !this.micData) return null;
|
|
155
|
+
this.micAnalyser.getByteFrequencyData(this.micData);
|
|
156
|
+
return this.micData;
|
|
157
|
+
}
|
|
158
|
+
if (!this.musicAnalyser || !this.musicData) return null;
|
|
159
|
+
this.musicAnalyser.getByteFrequencyData(this.musicData);
|
|
160
|
+
return this.musicData;
|
|
161
|
+
}
|
|
162
|
+
// Call inside requestAnimationFrame to get raw time-domain waveform
|
|
163
|
+
getWaveform() {
|
|
164
|
+
if (!this.micAnalyser || !this.micWaveformData) return null;
|
|
165
|
+
this.micAnalyser.getByteTimeDomainData(this.micWaveformData);
|
|
166
|
+
return this.micWaveformData;
|
|
167
|
+
}
|
|
168
|
+
// Call when done — stops mic, closes AudioContext
|
|
169
|
+
destroy() {
|
|
170
|
+
this.audioEl?.pause();
|
|
171
|
+
this.micStream?.getTracks().forEach((t) => t.stop());
|
|
172
|
+
this.ctx?.close();
|
|
173
|
+
}
|
|
174
|
+
};
|
|
175
|
+
|
|
176
|
+
// src/react.ts
|
|
177
|
+
var import_react = require("react");
|
|
178
|
+
function useAudioBands() {
|
|
179
|
+
const [isPlaying, setIsPlaying] = (0, import_react.useState)(false);
|
|
180
|
+
const [micActive, setMicActive] = (0, import_react.useState)(false);
|
|
181
|
+
const [audioError, setAudioError] = (0, import_react.useState)(false);
|
|
182
|
+
const instance = (0, import_react.useRef)(null);
|
|
183
|
+
if (!instance.current) {
|
|
184
|
+
instance.current = new AudioBands({
|
|
185
|
+
onPlay: () => setIsPlaying(true),
|
|
186
|
+
onPause: () => setIsPlaying(false),
|
|
187
|
+
onError: () => setAudioError(true),
|
|
188
|
+
onMicStart: () => setMicActive(true),
|
|
189
|
+
onMicStop: () => setMicActive(false)
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
(0, import_react.useEffect)(() => {
|
|
193
|
+
return () => instance.current?.destroy();
|
|
194
|
+
}, []);
|
|
195
|
+
const loadTrack = (0, import_react.useCallback)(async (url) => {
|
|
196
|
+
setAudioError(false);
|
|
197
|
+
await instance.current.load(url);
|
|
198
|
+
}, []);
|
|
199
|
+
const togglePlayPause = (0, import_react.useCallback)(() => {
|
|
200
|
+
instance.current.togglePlayPause();
|
|
201
|
+
}, []);
|
|
202
|
+
const toggleMic = (0, import_react.useCallback)(async () => {
|
|
203
|
+
if (micActive) {
|
|
204
|
+
instance.current.disableMic();
|
|
205
|
+
} else {
|
|
206
|
+
await instance.current.enableMic();
|
|
207
|
+
}
|
|
208
|
+
}, [micActive]);
|
|
209
|
+
const getBands = (0, import_react.useCallback)((source) => {
|
|
210
|
+
return instance.current.getBands(source);
|
|
211
|
+
}, []);
|
|
212
|
+
const getFftData = (0, import_react.useCallback)((source) => {
|
|
213
|
+
return instance.current.getFftData(source);
|
|
214
|
+
}, []);
|
|
215
|
+
const getWaveform = (0, import_react.useCallback)(() => {
|
|
216
|
+
return instance.current.getWaveform();
|
|
217
|
+
}, []);
|
|
218
|
+
return {
|
|
219
|
+
isPlaying,
|
|
220
|
+
micActive,
|
|
221
|
+
audioError,
|
|
222
|
+
loadTrack,
|
|
223
|
+
togglePlayPause,
|
|
224
|
+
toggleMic,
|
|
225
|
+
getBands,
|
|
226
|
+
getFftData,
|
|
227
|
+
getWaveform
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
231
|
+
0 && (module.exports = {
|
|
232
|
+
AudioBands,
|
|
233
|
+
useAudioBands
|
|
234
|
+
});
|
|
235
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/core.ts","../src/react.ts"],"sourcesContent":["// Vanilla JS — use in any framework or plain HTML\nexport { AudioBands } from './core';\n\n// React hook — wrapper over AudioBands\nexport { useAudioBands } from './react';\n\n// Types\nexport type { Bands, AudioSource, AudioBandsCallbacks } from './types';\nexport type { UseAudioBandsReturn } from './react';\n","import type { Bands, AudioSource, AudioBandsCallbacks } from './types';\n\nconst ZERO: Bands = { bass: 0, mid: 0, high: 0, overall: 0 };\n\nfunction avg(arr: Uint8Array<ArrayBuffer>, from: number, to: number): number {\n let sum = 0;\n for (let i = from; i < to; i++) sum += arr[i];\n return sum / (to - from);\n}\n\nfunction computeBands(analyser: AnalyserNode, data: Uint8Array<ArrayBuffer>): Bands {\n analyser.getByteFrequencyData(data);\n const len = data.length;\n const bass = avg(data, 0, Math.floor(len * 0.08));\n const mid = avg(data, Math.floor(len * 0.08), Math.floor(len * 0.4));\n const high = avg(data, Math.floor(len * 0.4), len);\n return {\n bass: bass / 255,\n mid: mid / 255,\n high: high / 255,\n overall: (bass * 0.5 + mid * 0.3 + high * 0.2) / 255,\n };\n}\n\n/**\n * Vanilla JS class — no framework dependency.\n * Works in React, Vue, Svelte, or plain HTML.\n *\n * Call destroy() when done to close the AudioContext and stop the mic.\n */\nexport class AudioBands {\n private callbacks: AudioBandsCallbacks;\n\n private ctx: AudioContext | null = null;\n private musicAnalyser: AnalyserNode | null = null;\n private musicData: Uint8Array<ArrayBuffer> | null = null;\n private micAnalyser: AnalyserNode | null = null;\n private micData: Uint8Array<ArrayBuffer> | null = null;\n private micWaveformData: Uint8Array<ArrayBuffer> | null = null;\n private audioEl: HTMLAudioElement | null = null;\n private musicSource: MediaElementAudioSourceNode | null = null;\n private micSource: MediaStreamAudioSourceNode | null = null;\n private micStream: MediaStream | null = null;\n\n constructor(callbacks: AudioBandsCallbacks = {}) {\n this.callbacks = callbacks;\n }\n\n // Lazy — AudioContext must be created after a user gesture\n private ensureCtx(): AudioContext {\n if (this.ctx) return this.ctx;\n\n const Ctx =\n window.AudioContext ||\n (window as unknown as { webkitAudioContext: typeof AudioContext }).webkitAudioContext;\n\n const ctx = new Ctx();\n const analyser = ctx.createAnalyser();\n analyser.fftSize = 256;\n analyser.smoothingTimeConstant = 0.85;\n analyser.connect(ctx.destination);\n\n this.ctx = ctx;\n this.musicAnalyser = analyser;\n this.musicData = new Uint8Array(analyser.frequencyBinCount) as Uint8Array<ArrayBuffer>;\n\n return ctx;\n }\n\n async load(url: string): Promise<void> {\n const ctx = this.ensureCtx();\n\n this.audioEl?.pause();\n if (this.audioEl) this.audioEl.src = '';\n try { this.musicSource?.disconnect(); } catch { /* already disconnected */ }\n\n const audio = new Audio();\n audio.crossOrigin = 'anonymous';\n audio.src = url;\n audio.loop = true;\n this.audioEl = audio;\n\n const source = ctx.createMediaElementSource(audio);\n source.connect(this.musicAnalyser!);\n this.musicSource = source;\n\n try {\n await audio.play();\n this.callbacks.onPlay?.();\n } catch {\n this.callbacks.onError?.();\n }\n }\n\n togglePlayPause(): void {\n const audio = this.audioEl;\n if (!audio) return;\n if (audio.paused) {\n audio.play();\n this.callbacks.onPlay?.();\n } else {\n audio.pause();\n this.callbacks.onPause?.();\n }\n }\n\n async enableMic(): Promise<void> {\n const ctx = this.ensureCtx();\n\n try {\n const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });\n this.micStream = stream;\n\n const analyser = ctx.createAnalyser();\n analyser.fftSize = 256;\n analyser.smoothingTimeConstant = 0.8;\n this.micAnalyser = analyser;\n this.micData = new Uint8Array(analyser.frequencyBinCount) as Uint8Array<ArrayBuffer>;\n this.micWaveformData = new Uint8Array(analyser.fftSize) as Uint8Array<ArrayBuffer>;\n\n const source = ctx.createMediaStreamSource(stream);\n source.connect(analyser);\n // Not connected to destination — prevents mic feedback\n this.micSource = source;\n\n this.callbacks.onMicStart?.();\n } catch {\n console.warn('[audio-bands] Mic access denied');\n }\n }\n\n disableMic(): void {\n this.micStream?.getTracks().forEach((t) => t.stop());\n this.micStream = null;\n try { this.micSource?.disconnect(); } catch { /* already disconnected */ }\n this.micSource = null;\n this.micAnalyser = null;\n this.micData = null;\n this.micWaveformData = null;\n this.callbacks.onMicStop?.();\n }\n\n // Call inside requestAnimationFrame to get current frequency data\n getBands(source: AudioSource = 'music'): Bands {\n if (source === 'mic') {\n if (!this.micAnalyser || !this.micData) return { ...ZERO };\n return computeBands(this.micAnalyser, this.micData);\n }\n if (!this.musicAnalyser || !this.musicData) return { ...ZERO };\n return computeBands(this.musicAnalyser, this.musicData);\n }\n\n // Call inside requestAnimationFrame to get raw FFT frequency bins (0–255 per bin)\n getFftData(source: AudioSource = 'music'): Uint8Array<ArrayBuffer> | null {\n if (source === 'mic') {\n if (!this.micAnalyser || !this.micData) return null;\n this.micAnalyser.getByteFrequencyData(this.micData);\n return this.micData;\n }\n if (!this.musicAnalyser || !this.musicData) return null;\n this.musicAnalyser.getByteFrequencyData(this.musicData);\n return this.musicData;\n }\n\n // Call inside requestAnimationFrame to get raw time-domain waveform\n getWaveform(): Uint8Array<ArrayBuffer> | null {\n if (!this.micAnalyser || !this.micWaveformData) return null;\n this.micAnalyser.getByteTimeDomainData(this.micWaveformData);\n return this.micWaveformData;\n }\n\n // Call when done — stops mic, closes AudioContext\n destroy(): void {\n this.audioEl?.pause();\n this.micStream?.getTracks().forEach((t) => t.stop());\n this.ctx?.close();\n }\n}\n","'use client';\n\nimport { useRef, useState, useCallback, useEffect } from 'react';\nimport { AudioBands } from './core';\nimport type { Bands, AudioSource } from './types';\n\nexport type UseAudioBandsReturn = {\n isPlaying: boolean;\n micActive: boolean;\n audioError: boolean;\n loadTrack: (url: string) => Promise<void>;\n togglePlayPause: () => void;\n toggleMic: () => Promise<void>;\n getBands: (source?: AudioSource) => Bands;\n getFftData: (source?: AudioSource) => Uint8Array<ArrayBuffer> | null;\n getWaveform: () => Uint8Array<ArrayBuffer> | null;\n};\n\n/**\n * React hook — thin wrapper over AudioBands.\n * Handles lifecycle (destroy on unmount) and exposes state for re-renders.\n */\nexport function useAudioBands(): UseAudioBandsReturn {\n const [isPlaying, setIsPlaying] = useState(false);\n const [micActive, setMicActive] = useState(false);\n const [audioError, setAudioError] = useState(false);\n\n const instance = useRef<AudioBands | null>(null);\n\n if (!instance.current) {\n instance.current = new AudioBands({\n onPlay: () => setIsPlaying(true),\n onPause: () => setIsPlaying(false),\n onError: () => setAudioError(true),\n onMicStart: () => setMicActive(true),\n onMicStop: () => setMicActive(false),\n });\n }\n\n useEffect(() => {\n return () => instance.current?.destroy();\n }, []);\n\n const loadTrack = useCallback(async (url: string) => {\n setAudioError(false);\n await instance.current!.load(url);\n }, []);\n\n const togglePlayPause = useCallback(() => {\n instance.current!.togglePlayPause();\n }, []);\n\n const toggleMic = useCallback(async () => {\n if (micActive) {\n instance.current!.disableMic();\n } else {\n await instance.current!.enableMic();\n }\n }, [micActive]);\n\n const getBands = useCallback((source?: AudioSource) => {\n return instance.current!.getBands(source);\n }, []);\n\n const getFftData = useCallback((source?: AudioSource) => {\n return instance.current!.getFftData(source);\n }, []);\n\n const getWaveform = useCallback(() => {\n return instance.current!.getWaveform();\n }, []);\n\n return {\n isPlaying,\n micActive,\n audioError,\n loadTrack,\n togglePlayPause,\n toggleMic,\n getBands,\n getFftData,\n getWaveform,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACEA,IAAM,OAAc,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,GAAG,SAAS,EAAE;AAE3D,SAAS,IAAI,KAA8B,MAAc,IAAoB;AAC3E,MAAI,MAAM;AACV,WAAS,IAAI,MAAM,IAAI,IAAI,IAAK,QAAO,IAAI,CAAC;AAC5C,SAAO,OAAO,KAAK;AACrB;AAEA,SAAS,aAAa,UAAwB,MAAsC;AAClF,WAAS,qBAAqB,IAAI;AAClC,QAAM,MAAM,KAAK;AACjB,QAAM,OAAO,IAAI,MAAM,GAAG,KAAK,MAAM,MAAM,IAAI,CAAC;AAChD,QAAM,MAAM,IAAI,MAAM,KAAK,MAAM,MAAM,IAAI,GAAG,KAAK,MAAM,MAAM,GAAG,CAAC;AACnE,QAAM,OAAO,IAAI,MAAM,KAAK,MAAM,MAAM,GAAG,GAAG,GAAG;AACjD,SAAO;AAAA,IACL,MAAM,OAAO;AAAA,IACb,KAAK,MAAM;AAAA,IACX,MAAM,OAAO;AAAA,IACb,UAAU,OAAO,MAAM,MAAM,MAAM,OAAO,OAAO;AAAA,EACnD;AACF;AAQO,IAAM,aAAN,MAAiB;AAAA,EActB,YAAY,YAAiC,CAAC,GAAG;AAXjD,SAAQ,MAA2B;AACnC,SAAQ,gBAAqC;AAC7C,SAAQ,YAA4C;AACpD,SAAQ,cAAmC;AAC3C,SAAQ,UAA0C;AAClD,SAAQ,kBAAkD;AAC1D,SAAQ,UAAmC;AAC3C,SAAQ,cAAkD;AAC1D,SAAQ,YAA+C;AACvD,SAAQ,YAAgC;AAGtC,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGQ,YAA0B;AAChC,QAAI,KAAK,IAAK,QAAO,KAAK;AAE1B,UAAM,MACJ,OAAO,gBACN,OAAkE;AAErE,UAAM,MAAM,IAAI,IAAI;AACpB,UAAM,WAAW,IAAI,eAAe;AACpC,aAAS,UAAU;AACnB,aAAS,wBAAwB;AACjC,aAAS,QAAQ,IAAI,WAAW;AAEhC,SAAK,MAAM;AACX,SAAK,gBAAgB;AACrB,SAAK,YAAY,IAAI,WAAW,SAAS,iBAAiB;AAE1D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,KAAK,KAA4B;AACrC,UAAM,MAAM,KAAK,UAAU;AAE3B,SAAK,SAAS,MAAM;AACpB,QAAI,KAAK,QAAS,MAAK,QAAQ,MAAM;AACrC,QAAI;AAAE,WAAK,aAAa,WAAW;AAAA,IAAG,QAAQ;AAAA,IAA6B;AAE3E,UAAM,QAAQ,IAAI,MAAM;AACxB,UAAM,cAAc;AACpB,UAAM,MAAM;AACZ,UAAM,OAAO;AACb,SAAK,UAAU;AAEf,UAAM,SAAS,IAAI,yBAAyB,KAAK;AACjD,WAAO,QAAQ,KAAK,aAAc;AAClC,SAAK,cAAc;AAEnB,QAAI;AACF,YAAM,MAAM,KAAK;AACjB,WAAK,UAAU,SAAS;AAAA,IAC1B,QAAQ;AACN,WAAK,UAAU,UAAU;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,kBAAwB;AACtB,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,MAAO;AACZ,QAAI,MAAM,QAAQ;AAChB,YAAM,KAAK;AACX,WAAK,UAAU,SAAS;AAAA,IAC1B,OAAO;AACL,YAAM,MAAM;AACZ,WAAK,UAAU,UAAU;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,YAA2B;AAC/B,UAAM,MAAM,KAAK,UAAU;AAE3B,QAAI;AACF,YAAM,SAAS,MAAM,UAAU,aAAa,aAAa,EAAE,OAAO,MAAM,OAAO,MAAM,CAAC;AACtF,WAAK,YAAY;AAEjB,YAAM,WAAW,IAAI,eAAe;AACpC,eAAS,UAAU;AACnB,eAAS,wBAAwB;AACjC,WAAK,cAAc;AACnB,WAAK,UAAU,IAAI,WAAW,SAAS,iBAAiB;AACxD,WAAK,kBAAkB,IAAI,WAAW,SAAS,OAAO;AAEtD,YAAM,SAAS,IAAI,wBAAwB,MAAM;AACjD,aAAO,QAAQ,QAAQ;AAEvB,WAAK,YAAY;AAEjB,WAAK,UAAU,aAAa;AAAA,IAC9B,QAAQ;AACN,cAAQ,KAAK,iCAAiC;AAAA,IAChD;AAAA,EACF;AAAA,EAEA,aAAmB;AACjB,SAAK,WAAW,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,KAAK,CAAC;AACnD,SAAK,YAAY;AACjB,QAAI;AAAE,WAAK,WAAW,WAAW;AAAA,IAAG,QAAQ;AAAA,IAA6B;AACzE,SAAK,YAAY;AACjB,SAAK,cAAc;AACnB,SAAK,UAAU;AACf,SAAK,kBAAkB;AACvB,SAAK,UAAU,YAAY;AAAA,EAC7B;AAAA;AAAA,EAGA,SAAS,SAAsB,SAAgB;AAC7C,QAAI,WAAW,OAAO;AACpB,UAAI,CAAC,KAAK,eAAe,CAAC,KAAK,QAAS,QAAO,EAAE,GAAG,KAAK;AACzD,aAAO,aAAa,KAAK,aAAa,KAAK,OAAO;AAAA,IACpD;AACA,QAAI,CAAC,KAAK,iBAAiB,CAAC,KAAK,UAAW,QAAO,EAAE,GAAG,KAAK;AAC7D,WAAO,aAAa,KAAK,eAAe,KAAK,SAAS;AAAA,EACxD;AAAA;AAAA,EAGA,WAAW,SAAsB,SAAyC;AACxE,QAAI,WAAW,OAAO;AACpB,UAAI,CAAC,KAAK,eAAe,CAAC,KAAK,QAAS,QAAO;AAC/C,WAAK,YAAY,qBAAqB,KAAK,OAAO;AAClD,aAAO,KAAK;AAAA,IACd;AACA,QAAI,CAAC,KAAK,iBAAiB,CAAC,KAAK,UAAW,QAAO;AACnD,SAAK,cAAc,qBAAqB,KAAK,SAAS;AACtD,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,cAA8C;AAC5C,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,gBAAiB,QAAO;AACvD,SAAK,YAAY,sBAAsB,KAAK,eAAe;AAC3D,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,UAAgB;AACd,SAAK,SAAS,MAAM;AACpB,SAAK,WAAW,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,KAAK,CAAC;AACnD,SAAK,KAAK,MAAM;AAAA,EAClB;AACF;;;AC/KA,mBAAyD;AAoBlD,SAAS,gBAAqC;AACnD,QAAM,CAAC,WAAW,YAAY,QAAI,uBAAS,KAAK;AAChD,QAAM,CAAC,WAAW,YAAY,QAAI,uBAAS,KAAK;AAChD,QAAM,CAAC,YAAY,aAAa,QAAI,uBAAS,KAAK;AAElD,QAAM,eAAW,qBAA0B,IAAI;AAE/C,MAAI,CAAC,SAAS,SAAS;AACrB,aAAS,UAAU,IAAI,WAAW;AAAA,MAChC,QAAQ,MAAM,aAAa,IAAI;AAAA,MAC/B,SAAS,MAAM,aAAa,KAAK;AAAA,MACjC,SAAS,MAAM,cAAc,IAAI;AAAA,MACjC,YAAY,MAAM,aAAa,IAAI;AAAA,MACnC,WAAW,MAAM,aAAa,KAAK;AAAA,IACrC,CAAC;AAAA,EACH;AAEA,8BAAU,MAAM;AACd,WAAO,MAAM,SAAS,SAAS,QAAQ;AAAA,EACzC,GAAG,CAAC,CAAC;AAEL,QAAM,gBAAY,0BAAY,OAAO,QAAgB;AACnD,kBAAc,KAAK;AACnB,UAAM,SAAS,QAAS,KAAK,GAAG;AAAA,EAClC,GAAG,CAAC,CAAC;AAEL,QAAM,sBAAkB,0BAAY,MAAM;AACxC,aAAS,QAAS,gBAAgB;AAAA,EACpC,GAAG,CAAC,CAAC;AAEL,QAAM,gBAAY,0BAAY,YAAY;AACxC,QAAI,WAAW;AACb,eAAS,QAAS,WAAW;AAAA,IAC/B,OAAO;AACL,YAAM,SAAS,QAAS,UAAU;AAAA,IACpC;AAAA,EACF,GAAG,CAAC,SAAS,CAAC;AAEd,QAAM,eAAW,0BAAY,CAAC,WAAyB;AACrD,WAAO,SAAS,QAAS,SAAS,MAAM;AAAA,EAC1C,GAAG,CAAC,CAAC;AAEL,QAAM,iBAAa,0BAAY,CAAC,WAAyB;AACvD,WAAO,SAAS,QAAS,WAAW,MAAM;AAAA,EAC5C,GAAG,CAAC,CAAC;AAEL,QAAM,kBAAc,0BAAY,MAAM;AACpC,WAAO,SAAS,QAAS,YAAY;AAAA,EACvC,GAAG,CAAC,CAAC;AAEL,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
type Bands = {
|
|
2
|
+
bass: number;
|
|
3
|
+
mid: number;
|
|
4
|
+
high: number;
|
|
5
|
+
overall: number;
|
|
6
|
+
};
|
|
7
|
+
type AudioSource = 'music' | 'mic';
|
|
8
|
+
type AudioBandsCallbacks = {
|
|
9
|
+
onPlay?: () => void;
|
|
10
|
+
onPause?: () => void;
|
|
11
|
+
onError?: () => void;
|
|
12
|
+
onMicStart?: () => void;
|
|
13
|
+
onMicStop?: () => void;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Vanilla JS class — no framework dependency.
|
|
18
|
+
* Works in React, Vue, Svelte, or plain HTML.
|
|
19
|
+
*
|
|
20
|
+
* Call destroy() when done to close the AudioContext and stop the mic.
|
|
21
|
+
*/
|
|
22
|
+
declare class AudioBands {
|
|
23
|
+
private callbacks;
|
|
24
|
+
private ctx;
|
|
25
|
+
private musicAnalyser;
|
|
26
|
+
private musicData;
|
|
27
|
+
private micAnalyser;
|
|
28
|
+
private micData;
|
|
29
|
+
private micWaveformData;
|
|
30
|
+
private audioEl;
|
|
31
|
+
private musicSource;
|
|
32
|
+
private micSource;
|
|
33
|
+
private micStream;
|
|
34
|
+
constructor(callbacks?: AudioBandsCallbacks);
|
|
35
|
+
private ensureCtx;
|
|
36
|
+
load(url: string): Promise<void>;
|
|
37
|
+
togglePlayPause(): void;
|
|
38
|
+
enableMic(): Promise<void>;
|
|
39
|
+
disableMic(): void;
|
|
40
|
+
getBands(source?: AudioSource): Bands;
|
|
41
|
+
getFftData(source?: AudioSource): Uint8Array<ArrayBuffer> | null;
|
|
42
|
+
getWaveform(): Uint8Array<ArrayBuffer> | null;
|
|
43
|
+
destroy(): void;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
type UseAudioBandsReturn = {
|
|
47
|
+
isPlaying: boolean;
|
|
48
|
+
micActive: boolean;
|
|
49
|
+
audioError: boolean;
|
|
50
|
+
loadTrack: (url: string) => Promise<void>;
|
|
51
|
+
togglePlayPause: () => void;
|
|
52
|
+
toggleMic: () => Promise<void>;
|
|
53
|
+
getBands: (source?: AudioSource) => Bands;
|
|
54
|
+
getFftData: (source?: AudioSource) => Uint8Array<ArrayBuffer> | null;
|
|
55
|
+
getWaveform: () => Uint8Array<ArrayBuffer> | null;
|
|
56
|
+
};
|
|
57
|
+
/**
|
|
58
|
+
* React hook — thin wrapper over AudioBands.
|
|
59
|
+
* Handles lifecycle (destroy on unmount) and exposes state for re-renders.
|
|
60
|
+
*/
|
|
61
|
+
declare function useAudioBands(): UseAudioBandsReturn;
|
|
62
|
+
|
|
63
|
+
export { AudioBands, type AudioBandsCallbacks, type AudioSource, type Bands, type UseAudioBandsReturn, useAudioBands };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
type Bands = {
|
|
2
|
+
bass: number;
|
|
3
|
+
mid: number;
|
|
4
|
+
high: number;
|
|
5
|
+
overall: number;
|
|
6
|
+
};
|
|
7
|
+
type AudioSource = 'music' | 'mic';
|
|
8
|
+
type AudioBandsCallbacks = {
|
|
9
|
+
onPlay?: () => void;
|
|
10
|
+
onPause?: () => void;
|
|
11
|
+
onError?: () => void;
|
|
12
|
+
onMicStart?: () => void;
|
|
13
|
+
onMicStop?: () => void;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Vanilla JS class — no framework dependency.
|
|
18
|
+
* Works in React, Vue, Svelte, or plain HTML.
|
|
19
|
+
*
|
|
20
|
+
* Call destroy() when done to close the AudioContext and stop the mic.
|
|
21
|
+
*/
|
|
22
|
+
declare class AudioBands {
|
|
23
|
+
private callbacks;
|
|
24
|
+
private ctx;
|
|
25
|
+
private musicAnalyser;
|
|
26
|
+
private musicData;
|
|
27
|
+
private micAnalyser;
|
|
28
|
+
private micData;
|
|
29
|
+
private micWaveformData;
|
|
30
|
+
private audioEl;
|
|
31
|
+
private musicSource;
|
|
32
|
+
private micSource;
|
|
33
|
+
private micStream;
|
|
34
|
+
constructor(callbacks?: AudioBandsCallbacks);
|
|
35
|
+
private ensureCtx;
|
|
36
|
+
load(url: string): Promise<void>;
|
|
37
|
+
togglePlayPause(): void;
|
|
38
|
+
enableMic(): Promise<void>;
|
|
39
|
+
disableMic(): void;
|
|
40
|
+
getBands(source?: AudioSource): Bands;
|
|
41
|
+
getFftData(source?: AudioSource): Uint8Array<ArrayBuffer> | null;
|
|
42
|
+
getWaveform(): Uint8Array<ArrayBuffer> | null;
|
|
43
|
+
destroy(): void;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
type UseAudioBandsReturn = {
|
|
47
|
+
isPlaying: boolean;
|
|
48
|
+
micActive: boolean;
|
|
49
|
+
audioError: boolean;
|
|
50
|
+
loadTrack: (url: string) => Promise<void>;
|
|
51
|
+
togglePlayPause: () => void;
|
|
52
|
+
toggleMic: () => Promise<void>;
|
|
53
|
+
getBands: (source?: AudioSource) => Bands;
|
|
54
|
+
getFftData: (source?: AudioSource) => Uint8Array<ArrayBuffer> | null;
|
|
55
|
+
getWaveform: () => Uint8Array<ArrayBuffer> | null;
|
|
56
|
+
};
|
|
57
|
+
/**
|
|
58
|
+
* React hook — thin wrapper over AudioBands.
|
|
59
|
+
* Handles lifecycle (destroy on unmount) and exposes state for re-renders.
|
|
60
|
+
*/
|
|
61
|
+
declare function useAudioBands(): UseAudioBandsReturn;
|
|
62
|
+
|
|
63
|
+
export { AudioBands, type AudioBandsCallbacks, type AudioSource, type Bands, type UseAudioBandsReturn, useAudioBands };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
// src/core.ts
|
|
2
|
+
var ZERO = { bass: 0, mid: 0, high: 0, overall: 0 };
|
|
3
|
+
function avg(arr, from, to) {
|
|
4
|
+
let sum = 0;
|
|
5
|
+
for (let i = from; i < to; i++) sum += arr[i];
|
|
6
|
+
return sum / (to - from);
|
|
7
|
+
}
|
|
8
|
+
function computeBands(analyser, data) {
|
|
9
|
+
analyser.getByteFrequencyData(data);
|
|
10
|
+
const len = data.length;
|
|
11
|
+
const bass = avg(data, 0, Math.floor(len * 0.08));
|
|
12
|
+
const mid = avg(data, Math.floor(len * 0.08), Math.floor(len * 0.4));
|
|
13
|
+
const high = avg(data, Math.floor(len * 0.4), len);
|
|
14
|
+
return {
|
|
15
|
+
bass: bass / 255,
|
|
16
|
+
mid: mid / 255,
|
|
17
|
+
high: high / 255,
|
|
18
|
+
overall: (bass * 0.5 + mid * 0.3 + high * 0.2) / 255
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
var AudioBands = class {
|
|
22
|
+
constructor(callbacks = {}) {
|
|
23
|
+
this.ctx = null;
|
|
24
|
+
this.musicAnalyser = null;
|
|
25
|
+
this.musicData = null;
|
|
26
|
+
this.micAnalyser = null;
|
|
27
|
+
this.micData = null;
|
|
28
|
+
this.micWaveformData = null;
|
|
29
|
+
this.audioEl = null;
|
|
30
|
+
this.musicSource = null;
|
|
31
|
+
this.micSource = null;
|
|
32
|
+
this.micStream = null;
|
|
33
|
+
this.callbacks = callbacks;
|
|
34
|
+
}
|
|
35
|
+
// Lazy — AudioContext must be created after a user gesture
|
|
36
|
+
ensureCtx() {
|
|
37
|
+
if (this.ctx) return this.ctx;
|
|
38
|
+
const Ctx = window.AudioContext || window.webkitAudioContext;
|
|
39
|
+
const ctx = new Ctx();
|
|
40
|
+
const analyser = ctx.createAnalyser();
|
|
41
|
+
analyser.fftSize = 256;
|
|
42
|
+
analyser.smoothingTimeConstant = 0.85;
|
|
43
|
+
analyser.connect(ctx.destination);
|
|
44
|
+
this.ctx = ctx;
|
|
45
|
+
this.musicAnalyser = analyser;
|
|
46
|
+
this.musicData = new Uint8Array(analyser.frequencyBinCount);
|
|
47
|
+
return ctx;
|
|
48
|
+
}
|
|
49
|
+
async load(url) {
|
|
50
|
+
const ctx = this.ensureCtx();
|
|
51
|
+
this.audioEl?.pause();
|
|
52
|
+
if (this.audioEl) this.audioEl.src = "";
|
|
53
|
+
try {
|
|
54
|
+
this.musicSource?.disconnect();
|
|
55
|
+
} catch {
|
|
56
|
+
}
|
|
57
|
+
const audio = new Audio();
|
|
58
|
+
audio.crossOrigin = "anonymous";
|
|
59
|
+
audio.src = url;
|
|
60
|
+
audio.loop = true;
|
|
61
|
+
this.audioEl = audio;
|
|
62
|
+
const source = ctx.createMediaElementSource(audio);
|
|
63
|
+
source.connect(this.musicAnalyser);
|
|
64
|
+
this.musicSource = source;
|
|
65
|
+
try {
|
|
66
|
+
await audio.play();
|
|
67
|
+
this.callbacks.onPlay?.();
|
|
68
|
+
} catch {
|
|
69
|
+
this.callbacks.onError?.();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
togglePlayPause() {
|
|
73
|
+
const audio = this.audioEl;
|
|
74
|
+
if (!audio) return;
|
|
75
|
+
if (audio.paused) {
|
|
76
|
+
audio.play();
|
|
77
|
+
this.callbacks.onPlay?.();
|
|
78
|
+
} else {
|
|
79
|
+
audio.pause();
|
|
80
|
+
this.callbacks.onPause?.();
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
async enableMic() {
|
|
84
|
+
const ctx = this.ensureCtx();
|
|
85
|
+
try {
|
|
86
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
|
|
87
|
+
this.micStream = stream;
|
|
88
|
+
const analyser = ctx.createAnalyser();
|
|
89
|
+
analyser.fftSize = 256;
|
|
90
|
+
analyser.smoothingTimeConstant = 0.8;
|
|
91
|
+
this.micAnalyser = analyser;
|
|
92
|
+
this.micData = new Uint8Array(analyser.frequencyBinCount);
|
|
93
|
+
this.micWaveformData = new Uint8Array(analyser.fftSize);
|
|
94
|
+
const source = ctx.createMediaStreamSource(stream);
|
|
95
|
+
source.connect(analyser);
|
|
96
|
+
this.micSource = source;
|
|
97
|
+
this.callbacks.onMicStart?.();
|
|
98
|
+
} catch {
|
|
99
|
+
console.warn("[audio-bands] Mic access denied");
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
disableMic() {
|
|
103
|
+
this.micStream?.getTracks().forEach((t) => t.stop());
|
|
104
|
+
this.micStream = null;
|
|
105
|
+
try {
|
|
106
|
+
this.micSource?.disconnect();
|
|
107
|
+
} catch {
|
|
108
|
+
}
|
|
109
|
+
this.micSource = null;
|
|
110
|
+
this.micAnalyser = null;
|
|
111
|
+
this.micData = null;
|
|
112
|
+
this.micWaveformData = null;
|
|
113
|
+
this.callbacks.onMicStop?.();
|
|
114
|
+
}
|
|
115
|
+
// Call inside requestAnimationFrame to get current frequency data
|
|
116
|
+
getBands(source = "music") {
|
|
117
|
+
if (source === "mic") {
|
|
118
|
+
if (!this.micAnalyser || !this.micData) return { ...ZERO };
|
|
119
|
+
return computeBands(this.micAnalyser, this.micData);
|
|
120
|
+
}
|
|
121
|
+
if (!this.musicAnalyser || !this.musicData) return { ...ZERO };
|
|
122
|
+
return computeBands(this.musicAnalyser, this.musicData);
|
|
123
|
+
}
|
|
124
|
+
// Call inside requestAnimationFrame to get raw FFT frequency bins (0–255 per bin)
|
|
125
|
+
getFftData(source = "music") {
|
|
126
|
+
if (source === "mic") {
|
|
127
|
+
if (!this.micAnalyser || !this.micData) return null;
|
|
128
|
+
this.micAnalyser.getByteFrequencyData(this.micData);
|
|
129
|
+
return this.micData;
|
|
130
|
+
}
|
|
131
|
+
if (!this.musicAnalyser || !this.musicData) return null;
|
|
132
|
+
this.musicAnalyser.getByteFrequencyData(this.musicData);
|
|
133
|
+
return this.musicData;
|
|
134
|
+
}
|
|
135
|
+
// Call inside requestAnimationFrame to get raw time-domain waveform
|
|
136
|
+
getWaveform() {
|
|
137
|
+
if (!this.micAnalyser || !this.micWaveformData) return null;
|
|
138
|
+
this.micAnalyser.getByteTimeDomainData(this.micWaveformData);
|
|
139
|
+
return this.micWaveformData;
|
|
140
|
+
}
|
|
141
|
+
// Call when done — stops mic, closes AudioContext
|
|
142
|
+
destroy() {
|
|
143
|
+
this.audioEl?.pause();
|
|
144
|
+
this.micStream?.getTracks().forEach((t) => t.stop());
|
|
145
|
+
this.ctx?.close();
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
// src/react.ts
|
|
150
|
+
import { useRef, useState, useCallback, useEffect } from "react";
|
|
151
|
+
function useAudioBands() {
|
|
152
|
+
const [isPlaying, setIsPlaying] = useState(false);
|
|
153
|
+
const [micActive, setMicActive] = useState(false);
|
|
154
|
+
const [audioError, setAudioError] = useState(false);
|
|
155
|
+
const instance = useRef(null);
|
|
156
|
+
if (!instance.current) {
|
|
157
|
+
instance.current = new AudioBands({
|
|
158
|
+
onPlay: () => setIsPlaying(true),
|
|
159
|
+
onPause: () => setIsPlaying(false),
|
|
160
|
+
onError: () => setAudioError(true),
|
|
161
|
+
onMicStart: () => setMicActive(true),
|
|
162
|
+
onMicStop: () => setMicActive(false)
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
useEffect(() => {
|
|
166
|
+
return () => instance.current?.destroy();
|
|
167
|
+
}, []);
|
|
168
|
+
const loadTrack = useCallback(async (url) => {
|
|
169
|
+
setAudioError(false);
|
|
170
|
+
await instance.current.load(url);
|
|
171
|
+
}, []);
|
|
172
|
+
const togglePlayPause = useCallback(() => {
|
|
173
|
+
instance.current.togglePlayPause();
|
|
174
|
+
}, []);
|
|
175
|
+
const toggleMic = useCallback(async () => {
|
|
176
|
+
if (micActive) {
|
|
177
|
+
instance.current.disableMic();
|
|
178
|
+
} else {
|
|
179
|
+
await instance.current.enableMic();
|
|
180
|
+
}
|
|
181
|
+
}, [micActive]);
|
|
182
|
+
const getBands = useCallback((source) => {
|
|
183
|
+
return instance.current.getBands(source);
|
|
184
|
+
}, []);
|
|
185
|
+
const getFftData = useCallback((source) => {
|
|
186
|
+
return instance.current.getFftData(source);
|
|
187
|
+
}, []);
|
|
188
|
+
const getWaveform = useCallback(() => {
|
|
189
|
+
return instance.current.getWaveform();
|
|
190
|
+
}, []);
|
|
191
|
+
return {
|
|
192
|
+
isPlaying,
|
|
193
|
+
micActive,
|
|
194
|
+
audioError,
|
|
195
|
+
loadTrack,
|
|
196
|
+
togglePlayPause,
|
|
197
|
+
toggleMic,
|
|
198
|
+
getBands,
|
|
199
|
+
getFftData,
|
|
200
|
+
getWaveform
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
export {
|
|
204
|
+
AudioBands,
|
|
205
|
+
useAudioBands
|
|
206
|
+
};
|
|
207
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core.ts","../src/react.ts"],"sourcesContent":["import type { Bands, AudioSource, AudioBandsCallbacks } from './types';\n\nconst ZERO: Bands = { bass: 0, mid: 0, high: 0, overall: 0 };\n\nfunction avg(arr: Uint8Array<ArrayBuffer>, from: number, to: number): number {\n let sum = 0;\n for (let i = from; i < to; i++) sum += arr[i];\n return sum / (to - from);\n}\n\nfunction computeBands(analyser: AnalyserNode, data: Uint8Array<ArrayBuffer>): Bands {\n analyser.getByteFrequencyData(data);\n const len = data.length;\n const bass = avg(data, 0, Math.floor(len * 0.08));\n const mid = avg(data, Math.floor(len * 0.08), Math.floor(len * 0.4));\n const high = avg(data, Math.floor(len * 0.4), len);\n return {\n bass: bass / 255,\n mid: mid / 255,\n high: high / 255,\n overall: (bass * 0.5 + mid * 0.3 + high * 0.2) / 255,\n };\n}\n\n/**\n * Vanilla JS class — no framework dependency.\n * Works in React, Vue, Svelte, or plain HTML.\n *\n * Call destroy() when done to close the AudioContext and stop the mic.\n */\nexport class AudioBands {\n private callbacks: AudioBandsCallbacks;\n\n private ctx: AudioContext | null = null;\n private musicAnalyser: AnalyserNode | null = null;\n private musicData: Uint8Array<ArrayBuffer> | null = null;\n private micAnalyser: AnalyserNode | null = null;\n private micData: Uint8Array<ArrayBuffer> | null = null;\n private micWaveformData: Uint8Array<ArrayBuffer> | null = null;\n private audioEl: HTMLAudioElement | null = null;\n private musicSource: MediaElementAudioSourceNode | null = null;\n private micSource: MediaStreamAudioSourceNode | null = null;\n private micStream: MediaStream | null = null;\n\n constructor(callbacks: AudioBandsCallbacks = {}) {\n this.callbacks = callbacks;\n }\n\n // Lazy — AudioContext must be created after a user gesture\n private ensureCtx(): AudioContext {\n if (this.ctx) return this.ctx;\n\n const Ctx =\n window.AudioContext ||\n (window as unknown as { webkitAudioContext: typeof AudioContext }).webkitAudioContext;\n\n const ctx = new Ctx();\n const analyser = ctx.createAnalyser();\n analyser.fftSize = 256;\n analyser.smoothingTimeConstant = 0.85;\n analyser.connect(ctx.destination);\n\n this.ctx = ctx;\n this.musicAnalyser = analyser;\n this.musicData = new Uint8Array(analyser.frequencyBinCount) as Uint8Array<ArrayBuffer>;\n\n return ctx;\n }\n\n async load(url: string): Promise<void> {\n const ctx = this.ensureCtx();\n\n this.audioEl?.pause();\n if (this.audioEl) this.audioEl.src = '';\n try { this.musicSource?.disconnect(); } catch { /* already disconnected */ }\n\n const audio = new Audio();\n audio.crossOrigin = 'anonymous';\n audio.src = url;\n audio.loop = true;\n this.audioEl = audio;\n\n const source = ctx.createMediaElementSource(audio);\n source.connect(this.musicAnalyser!);\n this.musicSource = source;\n\n try {\n await audio.play();\n this.callbacks.onPlay?.();\n } catch {\n this.callbacks.onError?.();\n }\n }\n\n togglePlayPause(): void {\n const audio = this.audioEl;\n if (!audio) return;\n if (audio.paused) {\n audio.play();\n this.callbacks.onPlay?.();\n } else {\n audio.pause();\n this.callbacks.onPause?.();\n }\n }\n\n async enableMic(): Promise<void> {\n const ctx = this.ensureCtx();\n\n try {\n const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });\n this.micStream = stream;\n\n const analyser = ctx.createAnalyser();\n analyser.fftSize = 256;\n analyser.smoothingTimeConstant = 0.8;\n this.micAnalyser = analyser;\n this.micData = new Uint8Array(analyser.frequencyBinCount) as Uint8Array<ArrayBuffer>;\n this.micWaveformData = new Uint8Array(analyser.fftSize) as Uint8Array<ArrayBuffer>;\n\n const source = ctx.createMediaStreamSource(stream);\n source.connect(analyser);\n // Not connected to destination — prevents mic feedback\n this.micSource = source;\n\n this.callbacks.onMicStart?.();\n } catch {\n console.warn('[audio-bands] Mic access denied');\n }\n }\n\n disableMic(): void {\n this.micStream?.getTracks().forEach((t) => t.stop());\n this.micStream = null;\n try { this.micSource?.disconnect(); } catch { /* already disconnected */ }\n this.micSource = null;\n this.micAnalyser = null;\n this.micData = null;\n this.micWaveformData = null;\n this.callbacks.onMicStop?.();\n }\n\n // Call inside requestAnimationFrame to get current frequency data\n getBands(source: AudioSource = 'music'): Bands {\n if (source === 'mic') {\n if (!this.micAnalyser || !this.micData) return { ...ZERO };\n return computeBands(this.micAnalyser, this.micData);\n }\n if (!this.musicAnalyser || !this.musicData) return { ...ZERO };\n return computeBands(this.musicAnalyser, this.musicData);\n }\n\n // Call inside requestAnimationFrame to get raw FFT frequency bins (0–255 per bin)\n getFftData(source: AudioSource = 'music'): Uint8Array<ArrayBuffer> | null {\n if (source === 'mic') {\n if (!this.micAnalyser || !this.micData) return null;\n this.micAnalyser.getByteFrequencyData(this.micData);\n return this.micData;\n }\n if (!this.musicAnalyser || !this.musicData) return null;\n this.musicAnalyser.getByteFrequencyData(this.musicData);\n return this.musicData;\n }\n\n // Call inside requestAnimationFrame to get raw time-domain waveform\n getWaveform(): Uint8Array<ArrayBuffer> | null {\n if (!this.micAnalyser || !this.micWaveformData) return null;\n this.micAnalyser.getByteTimeDomainData(this.micWaveformData);\n return this.micWaveformData;\n }\n\n // Call when done — stops mic, closes AudioContext\n destroy(): void {\n this.audioEl?.pause();\n this.micStream?.getTracks().forEach((t) => t.stop());\n this.ctx?.close();\n }\n}\n","'use client';\n\nimport { useRef, useState, useCallback, useEffect } from 'react';\nimport { AudioBands } from './core';\nimport type { Bands, AudioSource } from './types';\n\nexport type UseAudioBandsReturn = {\n isPlaying: boolean;\n micActive: boolean;\n audioError: boolean;\n loadTrack: (url: string) => Promise<void>;\n togglePlayPause: () => void;\n toggleMic: () => Promise<void>;\n getBands: (source?: AudioSource) => Bands;\n getFftData: (source?: AudioSource) => Uint8Array<ArrayBuffer> | null;\n getWaveform: () => Uint8Array<ArrayBuffer> | null;\n};\n\n/**\n * React hook — thin wrapper over AudioBands.\n * Handles lifecycle (destroy on unmount) and exposes state for re-renders.\n */\nexport function useAudioBands(): UseAudioBandsReturn {\n const [isPlaying, setIsPlaying] = useState(false);\n const [micActive, setMicActive] = useState(false);\n const [audioError, setAudioError] = useState(false);\n\n const instance = useRef<AudioBands | null>(null);\n\n if (!instance.current) {\n instance.current = new AudioBands({\n onPlay: () => setIsPlaying(true),\n onPause: () => setIsPlaying(false),\n onError: () => setAudioError(true),\n onMicStart: () => setMicActive(true),\n onMicStop: () => setMicActive(false),\n });\n }\n\n useEffect(() => {\n return () => instance.current?.destroy();\n }, []);\n\n const loadTrack = useCallback(async (url: string) => {\n setAudioError(false);\n await instance.current!.load(url);\n }, []);\n\n const togglePlayPause = useCallback(() => {\n instance.current!.togglePlayPause();\n }, []);\n\n const toggleMic = useCallback(async () => {\n if (micActive) {\n instance.current!.disableMic();\n } else {\n await instance.current!.enableMic();\n }\n }, [micActive]);\n\n const getBands = useCallback((source?: AudioSource) => {\n return instance.current!.getBands(source);\n }, []);\n\n const getFftData = useCallback((source?: AudioSource) => {\n return instance.current!.getFftData(source);\n }, []);\n\n const getWaveform = useCallback(() => {\n return instance.current!.getWaveform();\n }, []);\n\n return {\n isPlaying,\n micActive,\n audioError,\n loadTrack,\n togglePlayPause,\n toggleMic,\n getBands,\n getFftData,\n getWaveform,\n };\n}\n"],"mappings":";AAEA,IAAM,OAAc,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,GAAG,SAAS,EAAE;AAE3D,SAAS,IAAI,KAA8B,MAAc,IAAoB;AAC3E,MAAI,MAAM;AACV,WAAS,IAAI,MAAM,IAAI,IAAI,IAAK,QAAO,IAAI,CAAC;AAC5C,SAAO,OAAO,KAAK;AACrB;AAEA,SAAS,aAAa,UAAwB,MAAsC;AAClF,WAAS,qBAAqB,IAAI;AAClC,QAAM,MAAM,KAAK;AACjB,QAAM,OAAO,IAAI,MAAM,GAAG,KAAK,MAAM,MAAM,IAAI,CAAC;AAChD,QAAM,MAAM,IAAI,MAAM,KAAK,MAAM,MAAM,IAAI,GAAG,KAAK,MAAM,MAAM,GAAG,CAAC;AACnE,QAAM,OAAO,IAAI,MAAM,KAAK,MAAM,MAAM,GAAG,GAAG,GAAG;AACjD,SAAO;AAAA,IACL,MAAM,OAAO;AAAA,IACb,KAAK,MAAM;AAAA,IACX,MAAM,OAAO;AAAA,IACb,UAAU,OAAO,MAAM,MAAM,MAAM,OAAO,OAAO;AAAA,EACnD;AACF;AAQO,IAAM,aAAN,MAAiB;AAAA,EActB,YAAY,YAAiC,CAAC,GAAG;AAXjD,SAAQ,MAA2B;AACnC,SAAQ,gBAAqC;AAC7C,SAAQ,YAA4C;AACpD,SAAQ,cAAmC;AAC3C,SAAQ,UAA0C;AAClD,SAAQ,kBAAkD;AAC1D,SAAQ,UAAmC;AAC3C,SAAQ,cAAkD;AAC1D,SAAQ,YAA+C;AACvD,SAAQ,YAAgC;AAGtC,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGQ,YAA0B;AAChC,QAAI,KAAK,IAAK,QAAO,KAAK;AAE1B,UAAM,MACJ,OAAO,gBACN,OAAkE;AAErE,UAAM,MAAM,IAAI,IAAI;AACpB,UAAM,WAAW,IAAI,eAAe;AACpC,aAAS,UAAU;AACnB,aAAS,wBAAwB;AACjC,aAAS,QAAQ,IAAI,WAAW;AAEhC,SAAK,MAAM;AACX,SAAK,gBAAgB;AACrB,SAAK,YAAY,IAAI,WAAW,SAAS,iBAAiB;AAE1D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,KAAK,KAA4B;AACrC,UAAM,MAAM,KAAK,UAAU;AAE3B,SAAK,SAAS,MAAM;AACpB,QAAI,KAAK,QAAS,MAAK,QAAQ,MAAM;AACrC,QAAI;AAAE,WAAK,aAAa,WAAW;AAAA,IAAG,QAAQ;AAAA,IAA6B;AAE3E,UAAM,QAAQ,IAAI,MAAM;AACxB,UAAM,cAAc;AACpB,UAAM,MAAM;AACZ,UAAM,OAAO;AACb,SAAK,UAAU;AAEf,UAAM,SAAS,IAAI,yBAAyB,KAAK;AACjD,WAAO,QAAQ,KAAK,aAAc;AAClC,SAAK,cAAc;AAEnB,QAAI;AACF,YAAM,MAAM,KAAK;AACjB,WAAK,UAAU,SAAS;AAAA,IAC1B,QAAQ;AACN,WAAK,UAAU,UAAU;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,kBAAwB;AACtB,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,MAAO;AACZ,QAAI,MAAM,QAAQ;AAChB,YAAM,KAAK;AACX,WAAK,UAAU,SAAS;AAAA,IAC1B,OAAO;AACL,YAAM,MAAM;AACZ,WAAK,UAAU,UAAU;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,YAA2B;AAC/B,UAAM,MAAM,KAAK,UAAU;AAE3B,QAAI;AACF,YAAM,SAAS,MAAM,UAAU,aAAa,aAAa,EAAE,OAAO,MAAM,OAAO,MAAM,CAAC;AACtF,WAAK,YAAY;AAEjB,YAAM,WAAW,IAAI,eAAe;AACpC,eAAS,UAAU;AACnB,eAAS,wBAAwB;AACjC,WAAK,cAAc;AACnB,WAAK,UAAU,IAAI,WAAW,SAAS,iBAAiB;AACxD,WAAK,kBAAkB,IAAI,WAAW,SAAS,OAAO;AAEtD,YAAM,SAAS,IAAI,wBAAwB,MAAM;AACjD,aAAO,QAAQ,QAAQ;AAEvB,WAAK,YAAY;AAEjB,WAAK,UAAU,aAAa;AAAA,IAC9B,QAAQ;AACN,cAAQ,KAAK,iCAAiC;AAAA,IAChD;AAAA,EACF;AAAA,EAEA,aAAmB;AACjB,SAAK,WAAW,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,KAAK,CAAC;AACnD,SAAK,YAAY;AACjB,QAAI;AAAE,WAAK,WAAW,WAAW;AAAA,IAAG,QAAQ;AAAA,IAA6B;AACzE,SAAK,YAAY;AACjB,SAAK,cAAc;AACnB,SAAK,UAAU;AACf,SAAK,kBAAkB;AACvB,SAAK,UAAU,YAAY;AAAA,EAC7B;AAAA;AAAA,EAGA,SAAS,SAAsB,SAAgB;AAC7C,QAAI,WAAW,OAAO;AACpB,UAAI,CAAC,KAAK,eAAe,CAAC,KAAK,QAAS,QAAO,EAAE,GAAG,KAAK;AACzD,aAAO,aAAa,KAAK,aAAa,KAAK,OAAO;AAAA,IACpD;AACA,QAAI,CAAC,KAAK,iBAAiB,CAAC,KAAK,UAAW,QAAO,EAAE,GAAG,KAAK;AAC7D,WAAO,aAAa,KAAK,eAAe,KAAK,SAAS;AAAA,EACxD;AAAA;AAAA,EAGA,WAAW,SAAsB,SAAyC;AACxE,QAAI,WAAW,OAAO;AACpB,UAAI,CAAC,KAAK,eAAe,CAAC,KAAK,QAAS,QAAO;AAC/C,WAAK,YAAY,qBAAqB,KAAK,OAAO;AAClD,aAAO,KAAK;AAAA,IACd;AACA,QAAI,CAAC,KAAK,iBAAiB,CAAC,KAAK,UAAW,QAAO;AACnD,SAAK,cAAc,qBAAqB,KAAK,SAAS;AACtD,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,cAA8C;AAC5C,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,gBAAiB,QAAO;AACvD,SAAK,YAAY,sBAAsB,KAAK,eAAe;AAC3D,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,UAAgB;AACd,SAAK,SAAS,MAAM;AACpB,SAAK,WAAW,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,KAAK,CAAC;AACnD,SAAK,KAAK,MAAM;AAAA,EAClB;AACF;;;AC/KA,SAAS,QAAQ,UAAU,aAAa,iBAAiB;AAoBlD,SAAS,gBAAqC;AACnD,QAAM,CAAC,WAAW,YAAY,IAAI,SAAS,KAAK;AAChD,QAAM,CAAC,WAAW,YAAY,IAAI,SAAS,KAAK;AAChD,QAAM,CAAC,YAAY,aAAa,IAAI,SAAS,KAAK;AAElD,QAAM,WAAW,OAA0B,IAAI;AAE/C,MAAI,CAAC,SAAS,SAAS;AACrB,aAAS,UAAU,IAAI,WAAW;AAAA,MAChC,QAAQ,MAAM,aAAa,IAAI;AAAA,MAC/B,SAAS,MAAM,aAAa,KAAK;AAAA,MACjC,SAAS,MAAM,cAAc,IAAI;AAAA,MACjC,YAAY,MAAM,aAAa,IAAI;AAAA,MACnC,WAAW,MAAM,aAAa,KAAK;AAAA,IACrC,CAAC;AAAA,EACH;AAEA,YAAU,MAAM;AACd,WAAO,MAAM,SAAS,SAAS,QAAQ;AAAA,EACzC,GAAG,CAAC,CAAC;AAEL,QAAM,YAAY,YAAY,OAAO,QAAgB;AACnD,kBAAc,KAAK;AACnB,UAAM,SAAS,QAAS,KAAK,GAAG;AAAA,EAClC,GAAG,CAAC,CAAC;AAEL,QAAM,kBAAkB,YAAY,MAAM;AACxC,aAAS,QAAS,gBAAgB;AAAA,EACpC,GAAG,CAAC,CAAC;AAEL,QAAM,YAAY,YAAY,YAAY;AACxC,QAAI,WAAW;AACb,eAAS,QAAS,WAAW;AAAA,IAC/B,OAAO;AACL,YAAM,SAAS,QAAS,UAAU;AAAA,IACpC;AAAA,EACF,GAAG,CAAC,SAAS,CAAC;AAEd,QAAM,WAAW,YAAY,CAAC,WAAyB;AACrD,WAAO,SAAS,QAAS,SAAS,MAAM;AAAA,EAC1C,GAAG,CAAC,CAAC;AAEL,QAAM,aAAa,YAAY,CAAC,WAAyB;AACvD,WAAO,SAAS,QAAS,WAAW,MAAM;AAAA,EAC5C,GAAG,CAAC,CAAC;AAEL,QAAM,cAAc,YAAY,MAAM;AACpC,WAAO,SAAS,QAAS,YAAY;AAAA,EACvC,GAAG,CAAC,CAAC;AAEL,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@juandinella/audio-bands",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Headless React hook for real-time audio frequency analysis. Get bass/mid/high bands from music or mic, without a renderer.",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"react",
|
|
7
|
+
"hook",
|
|
8
|
+
"audio",
|
|
9
|
+
"web-audio",
|
|
10
|
+
"frequency",
|
|
11
|
+
"visualizer",
|
|
12
|
+
"headless",
|
|
13
|
+
"bass",
|
|
14
|
+
"analyser"
|
|
15
|
+
],
|
|
16
|
+
"author": "Juan Dinella",
|
|
17
|
+
"license": "MIT",
|
|
18
|
+
"type": "module",
|
|
19
|
+
"main": "./dist/index.cjs",
|
|
20
|
+
"module": "./dist/index.js",
|
|
21
|
+
"types": "./dist/index.d.ts",
|
|
22
|
+
"exports": {
|
|
23
|
+
".": {
|
|
24
|
+
"types": "./dist/index.d.ts",
|
|
25
|
+
"import": "./dist/index.js",
|
|
26
|
+
"require": "./dist/index.cjs"
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
"files": [
|
|
30
|
+
"dist"
|
|
31
|
+
],
|
|
32
|
+
"scripts": {
|
|
33
|
+
"build": "tsup",
|
|
34
|
+
"dev": "tsup --watch"
|
|
35
|
+
},
|
|
36
|
+
"peerDependencies": {
|
|
37
|
+
"react": ">=18"
|
|
38
|
+
},
|
|
39
|
+
"devDependencies": {
|
|
40
|
+
"@types/react": "^19.0.0",
|
|
41
|
+
"tsup": "^8.0.0",
|
|
42
|
+
"typescript": "^5.0.0"
|
|
43
|
+
}
|
|
44
|
+
}
|