@djangocfg/ui-nextjs 2.1.82 → 2.1.83
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -4
- package/src/tools/AudioPlayer/@refactoring3/00-IMPLEMENTATION-ROADMAP.md +1146 -0
- package/src/tools/AudioPlayer/@refactoring3/01-WAVESURFER-STREAMING-ANALYSIS.md +611 -0
- package/src/tools/AudioPlayer/@refactoring3/02-MEDIA-VIEWER-ANALYSIS.md +560 -0
- package/src/tools/AudioPlayer/@refactoring3/03-HYBRID-ARCHITECTURE-PROPOSAL.md +769 -0
- package/src/tools/AudioPlayer/@refactoring3/04-CRACKLING-ISSUE-DIAGNOSIS.md +373 -0
- package/src/tools/AudioPlayer/README.md +177 -205
- package/src/tools/AudioPlayer/components/AudioPlayer.tsx +9 -4
- package/src/tools/AudioPlayer/components/HybridAudioPlayer.tsx +251 -0
- package/src/tools/AudioPlayer/components/HybridSimplePlayer.tsx +291 -0
- package/src/tools/AudioPlayer/components/HybridWaveform.tsx +279 -0
- package/src/tools/AudioPlayer/components/SimpleAudioPlayer.tsx +16 -26
- package/src/tools/AudioPlayer/components/index.ts +6 -1
- package/src/tools/AudioPlayer/context/AudioProvider.tsx +8 -3
- package/src/tools/AudioPlayer/context/HybridAudioProvider.tsx +121 -0
- package/src/tools/AudioPlayer/context/index.ts +14 -2
- package/src/tools/AudioPlayer/hooks/index.ts +11 -0
- package/src/tools/AudioPlayer/hooks/useHybridAudio.ts +387 -0
- package/src/tools/AudioPlayer/hooks/useHybridAudioAnalysis.ts +95 -0
- package/src/tools/AudioPlayer/hooks/useSharedWebAudio.ts +6 -3
- package/src/tools/AudioPlayer/index.ts +31 -0
- package/src/tools/AudioPlayer/progressive/ProgressiveAudioPlayer.tsx +8 -0
- package/src/tools/index.ts +22 -0
- package/src/tools/AudioPlayer/@refactoring/00-PLAN.md +0 -148
- package/src/tools/AudioPlayer/@refactoring/01-TYPES.md +0 -301
- package/src/tools/AudioPlayer/@refactoring/02-HOOKS.md +0 -281
- package/src/tools/AudioPlayer/@refactoring/03-CONTEXT.md +0 -328
- package/src/tools/AudioPlayer/@refactoring/04-COMPONENTS.md +0 -251
- package/src/tools/AudioPlayer/@refactoring/05-EFFECTS.md +0 -427
- package/src/tools/AudioPlayer/@refactoring/06-UTILS-AND-INDEX.md +0 -193
- package/src/tools/AudioPlayer/@refactoring/07-EXECUTION-CHECKLIST.md +0 -146
- package/src/tools/AudioPlayer/@refactoring2/ISSUE_ANALYSIS.md +0 -187
- package/src/tools/AudioPlayer/@refactoring2/PLAN.md +0 -372
|
@@ -0,0 +1,387 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* useHybridAudio - Hybrid audio hook combining HTML5 audio + Web Audio API
|
|
5
|
+
*
|
|
6
|
+
* Uses native HTML5 <audio> for playback (no crackling, native streaming)
|
|
7
|
+
* and Web Audio API only for visualization (AnalyserNode).
|
|
8
|
+
*
|
|
9
|
+
* Audio routing:
|
|
10
|
+
* source -> destination (single path for playback)
|
|
11
|
+
* source -> analyser (parallel path for visualization only, no output)
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { useRef, useState, useCallback, useEffect } from 'react';
|
|
15
|
+
|
|
16
|
+
// =============================================================================
|
|
17
|
+
// TYPES
|
|
18
|
+
// =============================================================================
|
|
19
|
+
|
|
20
|
+
export interface UseHybridAudioOptions {
|
|
21
|
+
src: string;
|
|
22
|
+
autoPlay?: boolean;
|
|
23
|
+
initialVolume?: number;
|
|
24
|
+
loop?: boolean;
|
|
25
|
+
crossOrigin?: 'anonymous' | 'use-credentials';
|
|
26
|
+
onPlay?: () => void;
|
|
27
|
+
onPause?: () => void;
|
|
28
|
+
onEnded?: () => void;
|
|
29
|
+
onTimeUpdate?: (time: number) => void;
|
|
30
|
+
onError?: (error: Error) => void;
|
|
31
|
+
onReady?: () => void;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export interface HybridAudioState {
|
|
35
|
+
isReady: boolean;
|
|
36
|
+
isPlaying: boolean;
|
|
37
|
+
currentTime: number;
|
|
38
|
+
duration: number;
|
|
39
|
+
volume: number;
|
|
40
|
+
isMuted: boolean;
|
|
41
|
+
isLooping: boolean;
|
|
42
|
+
buffered: TimeRanges | null;
|
|
43
|
+
error: Error | null;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export interface HybridAudioControls {
|
|
47
|
+
play: () => Promise<void>;
|
|
48
|
+
pause: () => void;
|
|
49
|
+
togglePlay: () => void;
|
|
50
|
+
seek: (time: number) => void;
|
|
51
|
+
seekTo: (progress: number) => void;
|
|
52
|
+
skip: (seconds: number) => void;
|
|
53
|
+
setVolume: (vol: number) => void;
|
|
54
|
+
toggleMute: () => void;
|
|
55
|
+
toggleLoop: () => void;
|
|
56
|
+
setLoop: (enabled: boolean) => void;
|
|
57
|
+
restart: () => void;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export interface HybridWebAudioAPI {
|
|
61
|
+
context: AudioContext | null;
|
|
62
|
+
analyser: AnalyserNode | null;
|
|
63
|
+
sourceNode: MediaElementAudioSourceNode | null;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export interface UseHybridAudioReturn {
|
|
67
|
+
audioRef: React.RefObject<HTMLAudioElement | null>;
|
|
68
|
+
state: HybridAudioState;
|
|
69
|
+
controls: HybridAudioControls;
|
|
70
|
+
webAudio: HybridWebAudioAPI;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// =============================================================================
|
|
74
|
+
// HOOK
|
|
75
|
+
// =============================================================================
|
|
76
|
+
|
|
77
|
+
export function useHybridAudio(options: UseHybridAudioOptions): UseHybridAudioReturn {
|
|
78
|
+
const {
|
|
79
|
+
src,
|
|
80
|
+
autoPlay = false,
|
|
81
|
+
initialVolume = 1,
|
|
82
|
+
loop = false,
|
|
83
|
+
crossOrigin = 'anonymous',
|
|
84
|
+
onPlay,
|
|
85
|
+
onPause,
|
|
86
|
+
onEnded,
|
|
87
|
+
onTimeUpdate,
|
|
88
|
+
onError,
|
|
89
|
+
onReady,
|
|
90
|
+
} = options;
|
|
91
|
+
|
|
92
|
+
// Refs
|
|
93
|
+
const audioRef = useRef<HTMLAudioElement | null>(null);
|
|
94
|
+
const audioContextRef = useRef<AudioContext | null>(null);
|
|
95
|
+
const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
|
|
96
|
+
const analyserRef = useRef<AnalyserNode | null>(null);
|
|
97
|
+
const connectedElementRef = useRef<HTMLMediaElement | null>(null);
|
|
98
|
+
|
|
99
|
+
// State
|
|
100
|
+
const [state, setState] = useState<HybridAudioState>({
|
|
101
|
+
isReady: false,
|
|
102
|
+
isPlaying: false,
|
|
103
|
+
currentTime: 0,
|
|
104
|
+
duration: 0,
|
|
105
|
+
volume: initialVolume,
|
|
106
|
+
isMuted: false,
|
|
107
|
+
isLooping: loop,
|
|
108
|
+
buffered: null,
|
|
109
|
+
error: null,
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
// Initialize Web Audio for visualization (lazy, on first play)
|
|
113
|
+
const initWebAudio = useCallback(() => {
|
|
114
|
+
const audio = audioRef.current;
|
|
115
|
+
if (!audio) return;
|
|
116
|
+
|
|
117
|
+
// Already connected to this element
|
|
118
|
+
if (connectedElementRef.current === audio && audioContextRef.current) {
|
|
119
|
+
return;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
try {
|
|
123
|
+
// Create AudioContext if needed
|
|
124
|
+
if (!audioContextRef.current) {
|
|
125
|
+
const AudioContextClass =
|
|
126
|
+
window.AudioContext ||
|
|
127
|
+
(window as unknown as { webkitAudioContext: typeof AudioContext }).webkitAudioContext;
|
|
128
|
+
audioContextRef.current = new AudioContextClass();
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const ctx = audioContextRef.current;
|
|
132
|
+
|
|
133
|
+
// Disconnect old source if any
|
|
134
|
+
if (sourceNodeRef.current) {
|
|
135
|
+
try {
|
|
136
|
+
sourceNodeRef.current.disconnect();
|
|
137
|
+
} catch {
|
|
138
|
+
/* ignore */
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Create source from audio element
|
|
143
|
+
const source = ctx.createMediaElementSource(audio);
|
|
144
|
+
sourceNodeRef.current = source;
|
|
145
|
+
|
|
146
|
+
// Create analyser for visualization
|
|
147
|
+
const analyser = ctx.createAnalyser();
|
|
148
|
+
analyser.fftSize = 256;
|
|
149
|
+
analyser.smoothingTimeConstant = 0.85;
|
|
150
|
+
analyserRef.current = analyser;
|
|
151
|
+
|
|
152
|
+
// Audio routing:
|
|
153
|
+
// 1. source -> destination (for playback)
|
|
154
|
+
// 2. source -> analyser (for visualization only - NO output!)
|
|
155
|
+
source.connect(ctx.destination);
|
|
156
|
+
source.connect(analyser);
|
|
157
|
+
// NOTE: analyser does NOT connect to destination - prevents double audio!
|
|
158
|
+
|
|
159
|
+
connectedElementRef.current = audio;
|
|
160
|
+
} catch (error) {
|
|
161
|
+
console.warn('[useHybridAudio] Web Audio init failed:', error);
|
|
162
|
+
}
|
|
163
|
+
}, []);
|
|
164
|
+
|
|
165
|
+
// Resume AudioContext on user interaction
|
|
166
|
+
const resumeAudioContext = useCallback(async () => {
|
|
167
|
+
const ctx = audioContextRef.current;
|
|
168
|
+
if (ctx && ctx.state === 'suspended') {
|
|
169
|
+
await ctx.resume();
|
|
170
|
+
}
|
|
171
|
+
}, []);
|
|
172
|
+
|
|
173
|
+
// Controls
|
|
174
|
+
const play = useCallback(async () => {
|
|
175
|
+
const audio = audioRef.current;
|
|
176
|
+
if (!audio) return;
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
initWebAudio();
|
|
180
|
+
await resumeAudioContext();
|
|
181
|
+
await audio.play();
|
|
182
|
+
} catch (error) {
|
|
183
|
+
console.error('[useHybridAudio] Play failed:', error);
|
|
184
|
+
onError?.(error as Error);
|
|
185
|
+
}
|
|
186
|
+
}, [initWebAudio, resumeAudioContext, onError]);
|
|
187
|
+
|
|
188
|
+
const pause = useCallback(() => {
|
|
189
|
+
audioRef.current?.pause();
|
|
190
|
+
}, []);
|
|
191
|
+
|
|
192
|
+
const togglePlay = useCallback(() => {
|
|
193
|
+
if (state.isPlaying) {
|
|
194
|
+
pause();
|
|
195
|
+
} else {
|
|
196
|
+
play();
|
|
197
|
+
}
|
|
198
|
+
}, [state.isPlaying, play, pause]);
|
|
199
|
+
|
|
200
|
+
const seek = useCallback(
|
|
201
|
+
(time: number) => {
|
|
202
|
+
const audio = audioRef.current;
|
|
203
|
+
if (audio && isFinite(time)) {
|
|
204
|
+
audio.currentTime = Math.max(0, Math.min(time, state.duration || audio.duration || 0));
|
|
205
|
+
}
|
|
206
|
+
},
|
|
207
|
+
[state.duration]
|
|
208
|
+
);
|
|
209
|
+
|
|
210
|
+
const seekTo = useCallback(
|
|
211
|
+
(progress: number) => {
|
|
212
|
+
const duration = state.duration || audioRef.current?.duration || 0;
|
|
213
|
+
seek(duration * Math.max(0, Math.min(1, progress)));
|
|
214
|
+
},
|
|
215
|
+
[state.duration, seek]
|
|
216
|
+
);
|
|
217
|
+
|
|
218
|
+
const skip = useCallback(
|
|
219
|
+
(seconds: number) => {
|
|
220
|
+
seek(state.currentTime + seconds);
|
|
221
|
+
},
|
|
222
|
+
[state.currentTime, seek]
|
|
223
|
+
);
|
|
224
|
+
|
|
225
|
+
const setVolume = useCallback((vol: number) => {
|
|
226
|
+
const audio = audioRef.current;
|
|
227
|
+
if (audio) {
|
|
228
|
+
const clampedVol = Math.max(0, Math.min(1, vol));
|
|
229
|
+
audio.volume = clampedVol;
|
|
230
|
+
setState((prev) => ({ ...prev, volume: clampedVol }));
|
|
231
|
+
}
|
|
232
|
+
}, []);
|
|
233
|
+
|
|
234
|
+
const toggleMute = useCallback(() => {
|
|
235
|
+
const audio = audioRef.current;
|
|
236
|
+
if (audio) {
|
|
237
|
+
audio.muted = !audio.muted;
|
|
238
|
+
setState((prev) => ({ ...prev, isMuted: audio.muted }));
|
|
239
|
+
}
|
|
240
|
+
}, []);
|
|
241
|
+
|
|
242
|
+
const toggleLoop = useCallback(() => {
|
|
243
|
+
const audio = audioRef.current;
|
|
244
|
+
if (audio) {
|
|
245
|
+
audio.loop = !audio.loop;
|
|
246
|
+
setState((prev) => ({ ...prev, isLooping: audio.loop }));
|
|
247
|
+
}
|
|
248
|
+
}, []);
|
|
249
|
+
|
|
250
|
+
const setLoop = useCallback((enabled: boolean) => {
|
|
251
|
+
const audio = audioRef.current;
|
|
252
|
+
if (audio) {
|
|
253
|
+
audio.loop = enabled;
|
|
254
|
+
setState((prev) => ({ ...prev, isLooping: enabled }));
|
|
255
|
+
}
|
|
256
|
+
}, []);
|
|
257
|
+
|
|
258
|
+
const restart = useCallback(() => {
|
|
259
|
+
seek(0);
|
|
260
|
+
play();
|
|
261
|
+
}, [seek, play]);
|
|
262
|
+
|
|
263
|
+
const controls: HybridAudioControls = {
|
|
264
|
+
play,
|
|
265
|
+
pause,
|
|
266
|
+
togglePlay,
|
|
267
|
+
seek,
|
|
268
|
+
seekTo,
|
|
269
|
+
skip,
|
|
270
|
+
setVolume,
|
|
271
|
+
toggleMute,
|
|
272
|
+
toggleLoop,
|
|
273
|
+
setLoop,
|
|
274
|
+
restart,
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
// Create audio element on mount
|
|
278
|
+
useEffect(() => {
|
|
279
|
+
const audio = document.createElement('audio');
|
|
280
|
+
audio.preload = 'metadata';
|
|
281
|
+
audio.crossOrigin = crossOrigin;
|
|
282
|
+
audio.volume = initialVolume;
|
|
283
|
+
audio.loop = loop;
|
|
284
|
+
audioRef.current = audio;
|
|
285
|
+
|
|
286
|
+
return () => {
|
|
287
|
+
audio.pause();
|
|
288
|
+
audio.src = '';
|
|
289
|
+
if (audioContextRef.current) {
|
|
290
|
+
audioContextRef.current.close().catch(() => {});
|
|
291
|
+
}
|
|
292
|
+
};
|
|
293
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
294
|
+
}, []);
|
|
295
|
+
|
|
296
|
+
// Event handlers
|
|
297
|
+
useEffect(() => {
|
|
298
|
+
const audio = audioRef.current;
|
|
299
|
+
if (!audio) return;
|
|
300
|
+
|
|
301
|
+
const handlers = {
|
|
302
|
+
loadedmetadata: () => {
|
|
303
|
+
setState((prev) => ({
|
|
304
|
+
...prev,
|
|
305
|
+
duration: audio.duration,
|
|
306
|
+
isReady: true,
|
|
307
|
+
}));
|
|
308
|
+
onReady?.();
|
|
309
|
+
},
|
|
310
|
+
canplay: () => {
|
|
311
|
+
setState((prev) => ({ ...prev, isReady: true }));
|
|
312
|
+
if (autoPlay) {
|
|
313
|
+
play();
|
|
314
|
+
}
|
|
315
|
+
},
|
|
316
|
+
play: () => {
|
|
317
|
+
setState((prev) => ({ ...prev, isPlaying: true }));
|
|
318
|
+
onPlay?.();
|
|
319
|
+
},
|
|
320
|
+
pause: () => {
|
|
321
|
+
setState((prev) => ({ ...prev, isPlaying: false }));
|
|
322
|
+
onPause?.();
|
|
323
|
+
},
|
|
324
|
+
ended: () => {
|
|
325
|
+
setState((prev) => ({ ...prev, isPlaying: false }));
|
|
326
|
+
onEnded?.();
|
|
327
|
+
},
|
|
328
|
+
timeupdate: () => {
|
|
329
|
+
setState((prev) => ({ ...prev, currentTime: audio.currentTime }));
|
|
330
|
+
onTimeUpdate?.(audio.currentTime);
|
|
331
|
+
},
|
|
332
|
+
progress: () => {
|
|
333
|
+
setState((prev) => ({ ...prev, buffered: audio.buffered }));
|
|
334
|
+
},
|
|
335
|
+
error: () => {
|
|
336
|
+
const error = new Error(audio.error?.message || 'Audio error');
|
|
337
|
+
setState((prev) => ({ ...prev, error }));
|
|
338
|
+
onError?.(error);
|
|
339
|
+
},
|
|
340
|
+
volumechange: () => {
|
|
341
|
+
setState((prev) => ({
|
|
342
|
+
...prev,
|
|
343
|
+
volume: audio.volume,
|
|
344
|
+
isMuted: audio.muted,
|
|
345
|
+
}));
|
|
346
|
+
},
|
|
347
|
+
};
|
|
348
|
+
|
|
349
|
+
Object.entries(handlers).forEach(([event, handler]) => {
|
|
350
|
+
audio.addEventListener(event, handler);
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
return () => {
|
|
354
|
+
Object.entries(handlers).forEach(([event, handler]) => {
|
|
355
|
+
audio.removeEventListener(event, handler);
|
|
356
|
+
});
|
|
357
|
+
};
|
|
358
|
+
}, [autoPlay, onPlay, onPause, onEnded, onTimeUpdate, onError, onReady, play]);
|
|
359
|
+
|
|
360
|
+
// Load new source
|
|
361
|
+
useEffect(() => {
|
|
362
|
+
const audio = audioRef.current;
|
|
363
|
+
if (!audio || !src) return;
|
|
364
|
+
|
|
365
|
+
setState((prev) => ({
|
|
366
|
+
...prev,
|
|
367
|
+
isReady: false,
|
|
368
|
+
currentTime: 0,
|
|
369
|
+
duration: 0,
|
|
370
|
+
error: null,
|
|
371
|
+
}));
|
|
372
|
+
|
|
373
|
+
audio.src = src;
|
|
374
|
+
audio.load();
|
|
375
|
+
}, [src]);
|
|
376
|
+
|
|
377
|
+
return {
|
|
378
|
+
audioRef,
|
|
379
|
+
state,
|
|
380
|
+
controls,
|
|
381
|
+
webAudio: {
|
|
382
|
+
context: audioContextRef.current,
|
|
383
|
+
analyser: analyserRef.current,
|
|
384
|
+
sourceNode: sourceNodeRef.current,
|
|
385
|
+
},
|
|
386
|
+
};
|
|
387
|
+
}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* useHybridAudioAnalysis - Audio frequency analysis for hybrid player.
|
|
5
|
+
*
|
|
6
|
+
* Simplified version of useAudioAnalysis that works directly with AnalyserNode
|
|
7
|
+
* instead of SharedWebAudioContext.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { useState, useRef, useEffect, useCallback } from 'react';
|
|
11
|
+
import type { AudioLevels } from '../effects';
|
|
12
|
+
|
|
13
|
+
export function useHybridAudioAnalysis(
|
|
14
|
+
analyser: AnalyserNode | null,
|
|
15
|
+
isPlaying: boolean
|
|
16
|
+
): AudioLevels {
|
|
17
|
+
const [levels, setLevels] = useState<AudioLevels>({ bass: 0, mid: 0, high: 0, overall: 0 });
|
|
18
|
+
const animationRef = useRef<number | null>(null);
|
|
19
|
+
const dataArrayRef = useRef<Uint8Array<ArrayBuffer> | null>(null);
|
|
20
|
+
|
|
21
|
+
const cleanup = useCallback(() => {
|
|
22
|
+
if (animationRef.current) {
|
|
23
|
+
cancelAnimationFrame(animationRef.current);
|
|
24
|
+
animationRef.current = null;
|
|
25
|
+
}
|
|
26
|
+
}, []);
|
|
27
|
+
|
|
28
|
+
// Initialize data array when analyser is available
|
|
29
|
+
useEffect(() => {
|
|
30
|
+
if (analyser && !dataArrayRef.current) {
|
|
31
|
+
dataArrayRef.current = new Uint8Array(analyser.frequencyBinCount) as Uint8Array<ArrayBuffer>;
|
|
32
|
+
}
|
|
33
|
+
}, [analyser]);
|
|
34
|
+
|
|
35
|
+
// Animation loop
|
|
36
|
+
useEffect(() => {
|
|
37
|
+
if (!isPlaying || !analyser || !dataArrayRef.current) {
|
|
38
|
+
cleanup();
|
|
39
|
+
// Smooth fade out when stopped
|
|
40
|
+
setLevels((prev) => ({
|
|
41
|
+
bass: prev.bass * 0.95 < 0.01 ? 0 : prev.bass * 0.95,
|
|
42
|
+
mid: prev.mid * 0.95 < 0.01 ? 0 : prev.mid * 0.95,
|
|
43
|
+
high: prev.high * 0.95 < 0.01 ? 0 : prev.high * 0.95,
|
|
44
|
+
overall: prev.overall * 0.95 < 0.01 ? 0 : prev.overall * 0.95,
|
|
45
|
+
}));
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const dataArray = dataArrayRef.current;
|
|
50
|
+
|
|
51
|
+
const animate = () => {
|
|
52
|
+
analyser.getByteFrequencyData(dataArray);
|
|
53
|
+
const binCount = dataArray.length;
|
|
54
|
+
|
|
55
|
+
// Bass (0-15% of frequency range)
|
|
56
|
+
const bassEnd = Math.floor(binCount * 0.15);
|
|
57
|
+
let bassSum = 0;
|
|
58
|
+
for (let i = 0; i < bassEnd; i++) bassSum += dataArray[i];
|
|
59
|
+
const bass = bassSum / bassEnd / 255;
|
|
60
|
+
|
|
61
|
+
// Mids (15-50% of frequency range)
|
|
62
|
+
const midStart = bassEnd;
|
|
63
|
+
const midEnd = Math.floor(binCount * 0.5);
|
|
64
|
+
let midSum = 0;
|
|
65
|
+
for (let i = midStart; i < midEnd; i++) midSum += dataArray[i];
|
|
66
|
+
const mid = midSum / (midEnd - midStart) / 255;
|
|
67
|
+
|
|
68
|
+
// Highs (50-100% of frequency range)
|
|
69
|
+
const highStart = midEnd;
|
|
70
|
+
let highSum = 0;
|
|
71
|
+
for (let i = highStart; i < binCount; i++) highSum += dataArray[i];
|
|
72
|
+
const high = highSum / (binCount - highStart) / 255;
|
|
73
|
+
|
|
74
|
+
// Overall
|
|
75
|
+
let totalSum = 0;
|
|
76
|
+
for (let i = 0; i < binCount; i++) totalSum += dataArray[i];
|
|
77
|
+
const overall = totalSum / binCount / 255;
|
|
78
|
+
|
|
79
|
+
// Smooth with lerp for natural feel
|
|
80
|
+
setLevels((prev) => ({
|
|
81
|
+
bass: prev.bass * 0.7 + bass * 0.3,
|
|
82
|
+
mid: prev.mid * 0.7 + mid * 0.3,
|
|
83
|
+
high: prev.high * 0.7 + high * 0.3,
|
|
84
|
+
overall: prev.overall * 0.7 + overall * 0.3,
|
|
85
|
+
}));
|
|
86
|
+
|
|
87
|
+
animationRef.current = requestAnimationFrame(animate);
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
animationRef.current = requestAnimationFrame(animate);
|
|
91
|
+
return cleanup;
|
|
92
|
+
}, [analyser, isPlaying, cleanup]);
|
|
93
|
+
|
|
94
|
+
return levels;
|
|
95
|
+
}
|
|
@@ -42,7 +42,8 @@ export function useSharedWebAudio(audioElement: HTMLMediaElement | null): Shared
|
|
|
42
42
|
}
|
|
43
43
|
|
|
44
44
|
sourceRef.current = audioContext.createMediaElementSource(audioElement);
|
|
45
|
-
//
|
|
45
|
+
// Single audio output path: source -> destination
|
|
46
|
+
// Analysers connect to source in parallel for frequency reading only (no output)
|
|
46
47
|
sourceRef.current.connect(audioContext.destination);
|
|
47
48
|
connectedElementRef.current = audioElement;
|
|
48
49
|
}
|
|
@@ -77,9 +78,11 @@ export function useSharedWebAudio(audioElement: HTMLMediaElement | null): Shared
|
|
|
77
78
|
analyser.fftSize = options?.fftSize ?? 256;
|
|
78
79
|
analyser.smoothingTimeConstant = options?.smoothing ?? 0.85;
|
|
79
80
|
|
|
80
|
-
// Connect
|
|
81
|
+
// Connect analyser as passive listener (for frequency analysis only)
|
|
82
|
+
// Audio path: source -> destination (already connected in initAudio)
|
|
83
|
+
// Analysis path: source -> analyser (no output connection needed)
|
|
84
|
+
// NOTE: Do NOT connect analyser to destination - it causes double audio routing and crackling!
|
|
81
85
|
sourceRef.current.connect(analyser);
|
|
82
|
-
analyser.connect(audioContextRef.current.destination);
|
|
83
86
|
|
|
84
87
|
analyserNodesRef.current.add(analyser);
|
|
85
88
|
return analyser;
|
|
@@ -19,6 +19,7 @@
|
|
|
19
19
|
// =============================================================================
|
|
20
20
|
|
|
21
21
|
export {
|
|
22
|
+
// WaveSurfer-based (original)
|
|
22
23
|
AudioPlayer,
|
|
23
24
|
SimpleAudioPlayer,
|
|
24
25
|
AudioEqualizer,
|
|
@@ -30,6 +31,10 @@ export {
|
|
|
30
31
|
OrbsEffect,
|
|
31
32
|
SpotlightEffect,
|
|
32
33
|
MeshEffect,
|
|
34
|
+
// Hybrid player (HTML5 audio + Web Audio visualization)
|
|
35
|
+
HybridAudioPlayer,
|
|
36
|
+
HybridSimplePlayer,
|
|
37
|
+
HybridWaveform,
|
|
33
38
|
} from './components';
|
|
34
39
|
|
|
35
40
|
export type {
|
|
@@ -37,6 +42,10 @@ export type {
|
|
|
37
42
|
VisualizationToggleProps,
|
|
38
43
|
AudioReactiveCoverProps,
|
|
39
44
|
GlowEffectData,
|
|
45
|
+
// Hybrid types
|
|
46
|
+
HybridAudioPlayerProps,
|
|
47
|
+
HybridSimplePlayerProps,
|
|
48
|
+
HybridWaveformProps,
|
|
40
49
|
} from './components';
|
|
41
50
|
|
|
42
51
|
// =============================================================================
|
|
@@ -44,12 +53,25 @@ export type {
|
|
|
44
53
|
// =============================================================================
|
|
45
54
|
|
|
46
55
|
export {
|
|
56
|
+
// WaveSurfer-based provider
|
|
47
57
|
AudioProvider,
|
|
48
58
|
AudioPlayerContext,
|
|
49
59
|
useAudio,
|
|
50
60
|
useAudioControls,
|
|
51
61
|
useAudioState,
|
|
52
62
|
useAudioElement,
|
|
63
|
+
// Hybrid provider (HTML5 audio + Web Audio visualization)
|
|
64
|
+
HybridAudioProvider,
|
|
65
|
+
useHybridAudioContext,
|
|
66
|
+
useHybridAudioState,
|
|
67
|
+
useHybridAudioControls,
|
|
68
|
+
useHybridAudioLevels,
|
|
69
|
+
useHybridWebAudio,
|
|
70
|
+
} from './context';
|
|
71
|
+
|
|
72
|
+
export type {
|
|
73
|
+
HybridAudioContextValue,
|
|
74
|
+
HybridAudioProviderProps,
|
|
53
75
|
} from './context';
|
|
54
76
|
|
|
55
77
|
// =============================================================================
|
|
@@ -70,6 +92,9 @@ export {
|
|
|
70
92
|
VARIANT_INFO,
|
|
71
93
|
INTENSITY_INFO,
|
|
72
94
|
COLOR_SCHEME_INFO,
|
|
95
|
+
// Hybrid hooks
|
|
96
|
+
useHybridAudio,
|
|
97
|
+
useHybridAudioAnalysis,
|
|
73
98
|
} from './hooks';
|
|
74
99
|
|
|
75
100
|
export type {
|
|
@@ -83,6 +108,12 @@ export type {
|
|
|
83
108
|
UseVisualizationReturn,
|
|
84
109
|
UseAudioVisualizationReturn,
|
|
85
110
|
VisualizationProviderProps,
|
|
111
|
+
// Hybrid types
|
|
112
|
+
UseHybridAudioOptions,
|
|
113
|
+
HybridAudioState,
|
|
114
|
+
HybridAudioControls,
|
|
115
|
+
HybridWebAudioAPI,
|
|
116
|
+
UseHybridAudioReturn,
|
|
86
117
|
} from './hooks';
|
|
87
118
|
|
|
88
119
|
// =============================================================================
|
|
@@ -13,6 +13,14 @@
|
|
|
13
13
|
* - Keyboard shortcuts
|
|
14
14
|
* - Volume control
|
|
15
15
|
* - Loop mode
|
|
16
|
+
*
|
|
17
|
+
* NOTE: This player does NOT support reactive cover effects.
|
|
18
|
+
* For reactive effects with streaming support, use `HybridSimplePlayer` instead.
|
|
19
|
+
*
|
|
20
|
+
* Use this player when you need:
|
|
21
|
+
* - Progressive waveform that shows the actual audio amplitude shape
|
|
22
|
+
* - Visualization of buffered/loaded ranges
|
|
23
|
+
* - Large file streaming without reactive effects
|
|
16
24
|
*/
|
|
17
25
|
|
|
18
26
|
import { useRef, useCallback } from 'react';
|
package/src/tools/index.ts
CHANGED
|
@@ -110,6 +110,18 @@ export {
|
|
|
110
110
|
mergePeaks,
|
|
111
111
|
resamplePeaks,
|
|
112
112
|
smoothPeaks,
|
|
113
|
+
// Hybrid Audio Player (HTML5 audio + Web Audio visualization)
|
|
114
|
+
HybridSimplePlayer,
|
|
115
|
+
HybridAudioPlayer,
|
|
116
|
+
HybridWaveform,
|
|
117
|
+
HybridAudioProvider,
|
|
118
|
+
useHybridAudioContext,
|
|
119
|
+
useHybridAudioState,
|
|
120
|
+
useHybridAudioControls,
|
|
121
|
+
useHybridAudioLevels,
|
|
122
|
+
useHybridWebAudio,
|
|
123
|
+
useHybridAudio,
|
|
124
|
+
useHybridAudioAnalysis,
|
|
113
125
|
} from './AudioPlayer';
|
|
114
126
|
export type {
|
|
115
127
|
SimpleAudioPlayerProps,
|
|
@@ -136,6 +148,16 @@ export type {
|
|
|
136
148
|
LoadedRange,
|
|
137
149
|
ProgressiveAudioState,
|
|
138
150
|
ProgressiveAudioControls,
|
|
151
|
+
// Hybrid types
|
|
152
|
+
HybridSimplePlayerProps,
|
|
153
|
+
HybridAudioPlayerProps,
|
|
154
|
+
HybridWaveformProps,
|
|
155
|
+
HybridAudioProviderProps,
|
|
156
|
+
HybridAudioContextValue,
|
|
157
|
+
UseHybridAudioOptions,
|
|
158
|
+
HybridAudioState,
|
|
159
|
+
HybridAudioControls,
|
|
160
|
+
HybridWebAudioAPI,
|
|
139
161
|
} from './AudioPlayer';
|
|
140
162
|
|
|
141
163
|
// Export ImageViewer
|