@djangocfg/ui-nextjs 2.1.64 → 2.1.66
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +9 -6
- package/src/blocks/SplitHero/SplitHeroMedia.tsx +2 -1
- package/src/tools/AudioPlayer/AudioEqualizer.tsx +235 -0
- package/src/tools/AudioPlayer/AudioPlayer.tsx +223 -0
- package/src/tools/AudioPlayer/AudioReactiveCover.tsx +389 -0
- package/src/tools/AudioPlayer/AudioShortcutsPopover.tsx +95 -0
- package/src/tools/AudioPlayer/README.md +301 -0
- package/src/tools/AudioPlayer/SimpleAudioPlayer.tsx +275 -0
- package/src/tools/AudioPlayer/VisualizationToggle.tsx +68 -0
- package/src/tools/AudioPlayer/context.tsx +426 -0
- package/src/tools/AudioPlayer/effects/index.ts +412 -0
- package/src/tools/AudioPlayer/index.ts +84 -0
- package/src/tools/AudioPlayer/types.ts +162 -0
- package/src/tools/AudioPlayer/useAudioHotkeys.ts +142 -0
- package/src/tools/AudioPlayer/useAudioVisualization.tsx +195 -0
- package/src/tools/ImageViewer/ImageViewer.tsx +416 -0
- package/src/tools/ImageViewer/README.md +161 -0
- package/src/tools/ImageViewer/index.ts +16 -0
- package/src/tools/VideoPlayer/README.md +196 -187
- package/src/tools/VideoPlayer/VideoErrorFallback.tsx +174 -0
- package/src/tools/VideoPlayer/VideoPlayer.tsx +189 -218
- package/src/tools/VideoPlayer/VideoPlayerContext.tsx +125 -0
- package/src/tools/VideoPlayer/index.ts +59 -7
- package/src/tools/VideoPlayer/providers/NativeProvider.tsx +206 -0
- package/src/tools/VideoPlayer/providers/StreamProvider.tsx +311 -0
- package/src/tools/VideoPlayer/providers/VidstackProvider.tsx +254 -0
- package/src/tools/VideoPlayer/providers/index.ts +8 -0
- package/src/tools/VideoPlayer/types.ts +320 -71
- package/src/tools/index.ts +82 -4
- package/src/tools/VideoPlayer/NativePlayer.tsx +0 -141
|
@@ -0,0 +1,426 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AudioContext - Shared audio state and controls
|
|
5
|
+
*
|
|
6
|
+
* Provides centralized audio state management using WaveSurfer.js
|
|
7
|
+
* All child components can access playback state and controls
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import {
|
|
11
|
+
createContext,
|
|
12
|
+
useContext,
|
|
13
|
+
useRef,
|
|
14
|
+
useMemo,
|
|
15
|
+
useCallback,
|
|
16
|
+
useState,
|
|
17
|
+
useEffect,
|
|
18
|
+
type ReactNode,
|
|
19
|
+
} from 'react';
|
|
20
|
+
import { useWavesurfer } from '@wavesurfer/react';
|
|
21
|
+
import type { AudioContextState, AudioSource, WaveformOptions } from './types';
|
|
22
|
+
import type { AudioLevels } from './effects';
|
|
23
|
+
|
|
24
|
+
// =============================================================================
|
|
25
|
+
// AUDIO ANALYSIS HOOK (moved here to persist across variant changes)
|
|
26
|
+
// =============================================================================
|
|
27
|
+
|
|
28
|
+
function useAudioAnalysis(audioElement: HTMLMediaElement | null, isPlaying: boolean): AudioLevels {
|
|
29
|
+
const [levels, setLevels] = useState<AudioLevels>({ bass: 0, mid: 0, high: 0, overall: 0 });
|
|
30
|
+
|
|
31
|
+
const audioContextRef = useRef<AudioContext | null>(null);
|
|
32
|
+
const analyserRef = useRef<AnalyserNode | null>(null);
|
|
33
|
+
const sourceRef = useRef<MediaElementAudioSourceNode | null>(null);
|
|
34
|
+
const animationRef = useRef<number | null>(null);
|
|
35
|
+
const connectedElementRef = useRef<HTMLMediaElement | null>(null);
|
|
36
|
+
|
|
37
|
+
const cleanup = useCallback(() => {
|
|
38
|
+
if (animationRef.current) {
|
|
39
|
+
cancelAnimationFrame(animationRef.current);
|
|
40
|
+
animationRef.current = null;
|
|
41
|
+
}
|
|
42
|
+
}, []);
|
|
43
|
+
|
|
44
|
+
// Initialize Web Audio - persists for the lifetime of the provider
|
|
45
|
+
useEffect(() => {
|
|
46
|
+
if (!audioElement) {
|
|
47
|
+
cleanup();
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Already connected to this element - skip
|
|
52
|
+
if (connectedElementRef.current === audioElement && audioContextRef.current) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const initAudio = () => {
|
|
57
|
+
try {
|
|
58
|
+
if (!audioContextRef.current) {
|
|
59
|
+
const AudioContextClass = window.AudioContext || (window as unknown as { webkitAudioContext: typeof AudioContext }).webkitAudioContext;
|
|
60
|
+
audioContextRef.current = new AudioContextClass();
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const audioContext = audioContextRef.current;
|
|
64
|
+
|
|
65
|
+
if (!analyserRef.current) {
|
|
66
|
+
analyserRef.current = audioContext.createAnalyser();
|
|
67
|
+
analyserRef.current.fftSize = 256;
|
|
68
|
+
analyserRef.current.smoothingTimeConstant = 0.85;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Only create source node if not already connected
|
|
72
|
+
if (connectedElementRef.current !== audioElement) {
|
|
73
|
+
if (sourceRef.current) {
|
|
74
|
+
try { sourceRef.current.disconnect(); } catch { /* ignore */ }
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
sourceRef.current = audioContext.createMediaElementSource(audioElement);
|
|
78
|
+
sourceRef.current.connect(analyserRef.current);
|
|
79
|
+
analyserRef.current.connect(audioContext.destination);
|
|
80
|
+
connectedElementRef.current = audioElement;
|
|
81
|
+
}
|
|
82
|
+
} catch (error) {
|
|
83
|
+
console.warn('AudioContext: Could not connect audio analysis', error);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
const handlePlay = () => {
|
|
88
|
+
initAudio();
|
|
89
|
+
if (audioContextRef.current?.state === 'suspended') {
|
|
90
|
+
audioContextRef.current.resume();
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
audioElement.addEventListener('play', handlePlay);
|
|
95
|
+
if (!audioElement.paused) {
|
|
96
|
+
handlePlay();
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return () => {
|
|
100
|
+
audioElement.removeEventListener('play', handlePlay);
|
|
101
|
+
};
|
|
102
|
+
}, [audioElement, cleanup]);
|
|
103
|
+
|
|
104
|
+
// Animation loop - runs continuously when playing
|
|
105
|
+
useEffect(() => {
|
|
106
|
+
if (!isPlaying || !analyserRef.current) {
|
|
107
|
+
cleanup();
|
|
108
|
+
// Smooth fade out
|
|
109
|
+
setLevels(prev => ({
|
|
110
|
+
bass: prev.bass * 0.95 < 0.01 ? 0 : prev.bass * 0.95,
|
|
111
|
+
mid: prev.mid * 0.95,
|
|
112
|
+
high: prev.high * 0.95,
|
|
113
|
+
overall: prev.overall * 0.95,
|
|
114
|
+
}));
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const analyser = analyserRef.current;
|
|
119
|
+
const dataArray = new Uint8Array(analyser.frequencyBinCount);
|
|
120
|
+
|
|
121
|
+
const animate = () => {
|
|
122
|
+
analyser.getByteFrequencyData(dataArray);
|
|
123
|
+
const binCount = dataArray.length;
|
|
124
|
+
|
|
125
|
+
// Bass (0-15%)
|
|
126
|
+
const bassEnd = Math.floor(binCount * 0.15);
|
|
127
|
+
let bassSum = 0;
|
|
128
|
+
for (let i = 0; i < bassEnd; i++) bassSum += dataArray[i];
|
|
129
|
+
const bass = bassSum / bassEnd / 255;
|
|
130
|
+
|
|
131
|
+
// Mids (15-50%)
|
|
132
|
+
const midStart = bassEnd;
|
|
133
|
+
const midEnd = Math.floor(binCount * 0.5);
|
|
134
|
+
let midSum = 0;
|
|
135
|
+
for (let i = midStart; i < midEnd; i++) midSum += dataArray[i];
|
|
136
|
+
const mid = midSum / (midEnd - midStart) / 255;
|
|
137
|
+
|
|
138
|
+
// Highs (50-100%)
|
|
139
|
+
const highStart = midEnd;
|
|
140
|
+
let highSum = 0;
|
|
141
|
+
for (let i = highStart; i < binCount; i++) highSum += dataArray[i];
|
|
142
|
+
const high = highSum / (binCount - highStart) / 255;
|
|
143
|
+
|
|
144
|
+
// Overall
|
|
145
|
+
let totalSum = 0;
|
|
146
|
+
for (let i = 0; i < binCount; i++) totalSum += dataArray[i];
|
|
147
|
+
const overall = totalSum / binCount / 255;
|
|
148
|
+
|
|
149
|
+
// Smooth with lerp
|
|
150
|
+
setLevels(prev => ({
|
|
151
|
+
bass: prev.bass * 0.7 + bass * 0.3,
|
|
152
|
+
mid: prev.mid * 0.7 + mid * 0.3,
|
|
153
|
+
high: prev.high * 0.7 + high * 0.3,
|
|
154
|
+
overall: prev.overall * 0.7 + overall * 0.3,
|
|
155
|
+
}));
|
|
156
|
+
|
|
157
|
+
animationRef.current = requestAnimationFrame(animate);
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
animationRef.current = requestAnimationFrame(animate);
|
|
161
|
+
return cleanup;
|
|
162
|
+
}, [isPlaying, cleanup]);
|
|
163
|
+
|
|
164
|
+
return levels;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// =============================================================================
|
|
168
|
+
// CONTEXT
|
|
169
|
+
// =============================================================================
|
|
170
|
+
|
|
171
|
+
// Named AudioPlayerContext to avoid conflict with browser's AudioContext
|
|
172
|
+
const AudioPlayerContext = createContext<AudioContextState | null>(null);
|
|
173
|
+
|
|
174
|
+
// =============================================================================
|
|
175
|
+
// PROVIDER PROPS
|
|
176
|
+
// =============================================================================
|
|
177
|
+
|
|
178
|
+
interface AudioProviderProps {
|
|
179
|
+
/** Audio source */
|
|
180
|
+
source: AudioSource;
|
|
181
|
+
/** Auto-play when loaded */
|
|
182
|
+
autoPlay?: boolean;
|
|
183
|
+
/** WaveSurfer options */
|
|
184
|
+
waveformOptions?: WaveformOptions;
|
|
185
|
+
/** Container ref for waveform rendering */
|
|
186
|
+
containerRef: React.RefObject<HTMLDivElement | null>;
|
|
187
|
+
/** Children components */
|
|
188
|
+
children: ReactNode;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// =============================================================================
|
|
192
|
+
// PROVIDER
|
|
193
|
+
// =============================================================================
|
|
194
|
+
|
|
195
|
+
export function AudioProvider({
|
|
196
|
+
source,
|
|
197
|
+
autoPlay = false,
|
|
198
|
+
waveformOptions = {},
|
|
199
|
+
containerRef,
|
|
200
|
+
children,
|
|
201
|
+
}: AudioProviderProps) {
|
|
202
|
+
// Memoize WaveSurfer options with theme-aware colors
|
|
203
|
+
const options = useMemo(
|
|
204
|
+
() => ({
|
|
205
|
+
container: containerRef,
|
|
206
|
+
url: source.uri,
|
|
207
|
+
// Theme-aware colors using HSL
|
|
208
|
+
waveColor: waveformOptions.waveColor || 'hsl(217 91% 60% / 0.3)',
|
|
209
|
+
progressColor: waveformOptions.progressColor || 'hsl(217 91% 60%)',
|
|
210
|
+
cursorColor: waveformOptions.cursorColor || 'hsl(217 91% 60%)',
|
|
211
|
+
cursorWidth: waveformOptions.cursorWidth ?? 2,
|
|
212
|
+
height: waveformOptions.height ?? 64,
|
|
213
|
+
barWidth: waveformOptions.barWidth ?? 3,
|
|
214
|
+
barRadius: waveformOptions.barRadius ?? 3,
|
|
215
|
+
barGap: waveformOptions.barGap ?? 2,
|
|
216
|
+
normalize: true,
|
|
217
|
+
interact: true,
|
|
218
|
+
hideScrollbar: true,
|
|
219
|
+
autoplay: autoPlay,
|
|
220
|
+
}),
|
|
221
|
+
[source.uri, autoPlay, waveformOptions, containerRef]
|
|
222
|
+
);
|
|
223
|
+
|
|
224
|
+
// Use official wavesurfer-react hook
|
|
225
|
+
const { wavesurfer, isReady, isPlaying, currentTime } = useWavesurfer(options);
|
|
226
|
+
|
|
227
|
+
// Derived state
|
|
228
|
+
const duration = wavesurfer?.getDuration() ?? 0;
|
|
229
|
+
const volume = wavesurfer?.getVolume() ?? 1;
|
|
230
|
+
const isMuted = wavesurfer?.getMuted() ?? false;
|
|
231
|
+
|
|
232
|
+
// Get audio element for equalizer
|
|
233
|
+
const audioElement = useMemo(() => {
|
|
234
|
+
return wavesurfer?.getMediaElement() ?? null;
|
|
235
|
+
}, [wavesurfer]);
|
|
236
|
+
|
|
237
|
+
// Audio analysis for reactive effects (persists across variant changes)
|
|
238
|
+
const audioLevels = useAudioAnalysis(audioElement, isPlaying);
|
|
239
|
+
|
|
240
|
+
// Actions
|
|
241
|
+
const play = useCallback(async () => {
|
|
242
|
+
await wavesurfer?.play();
|
|
243
|
+
}, [wavesurfer]);
|
|
244
|
+
|
|
245
|
+
const pause = useCallback(() => {
|
|
246
|
+
wavesurfer?.pause();
|
|
247
|
+
}, [wavesurfer]);
|
|
248
|
+
|
|
249
|
+
const togglePlay = useCallback(() => {
|
|
250
|
+
wavesurfer?.playPause();
|
|
251
|
+
}, [wavesurfer]);
|
|
252
|
+
|
|
253
|
+
const seek = useCallback(
|
|
254
|
+
(time: number) => {
|
|
255
|
+
if (wavesurfer) {
|
|
256
|
+
const clampedTime = Math.max(0, Math.min(time, duration));
|
|
257
|
+
wavesurfer.setTime(clampedTime);
|
|
258
|
+
}
|
|
259
|
+
},
|
|
260
|
+
[wavesurfer, duration]
|
|
261
|
+
);
|
|
262
|
+
|
|
263
|
+
const seekTo = useCallback(
|
|
264
|
+
(progress: number) => {
|
|
265
|
+
if (wavesurfer) {
|
|
266
|
+
const clampedProgress = Math.max(0, Math.min(progress, 1));
|
|
267
|
+
wavesurfer.seekTo(clampedProgress);
|
|
268
|
+
}
|
|
269
|
+
},
|
|
270
|
+
[wavesurfer]
|
|
271
|
+
);
|
|
272
|
+
|
|
273
|
+
const skip = useCallback(
|
|
274
|
+
(seconds: number) => {
|
|
275
|
+
wavesurfer?.skip(seconds);
|
|
276
|
+
},
|
|
277
|
+
[wavesurfer]
|
|
278
|
+
);
|
|
279
|
+
|
|
280
|
+
const setVolume = useCallback(
|
|
281
|
+
(vol: number) => {
|
|
282
|
+
if (wavesurfer) {
|
|
283
|
+
wavesurfer.setVolume(Math.max(0, Math.min(vol, 1)));
|
|
284
|
+
}
|
|
285
|
+
},
|
|
286
|
+
[wavesurfer]
|
|
287
|
+
);
|
|
288
|
+
|
|
289
|
+
const toggleMute = useCallback(() => {
|
|
290
|
+
if (wavesurfer) {
|
|
291
|
+
wavesurfer.setMuted(!wavesurfer.getMuted());
|
|
292
|
+
}
|
|
293
|
+
}, [wavesurfer]);
|
|
294
|
+
|
|
295
|
+
const restart = useCallback(() => {
|
|
296
|
+
if (wavesurfer) {
|
|
297
|
+
wavesurfer.seekTo(0);
|
|
298
|
+
wavesurfer.play();
|
|
299
|
+
}
|
|
300
|
+
}, [wavesurfer]);
|
|
301
|
+
|
|
302
|
+
// Context value
|
|
303
|
+
const contextValue = useMemo<AudioContextState>(
|
|
304
|
+
() => ({
|
|
305
|
+
// Core instances
|
|
306
|
+
wavesurfer,
|
|
307
|
+
audioElement,
|
|
308
|
+
|
|
309
|
+
// Playback state
|
|
310
|
+
isReady,
|
|
311
|
+
isPlaying,
|
|
312
|
+
currentTime,
|
|
313
|
+
duration,
|
|
314
|
+
volume,
|
|
315
|
+
isMuted,
|
|
316
|
+
|
|
317
|
+
// Audio analysis
|
|
318
|
+
audioLevels,
|
|
319
|
+
|
|
320
|
+
// Actions
|
|
321
|
+
play,
|
|
322
|
+
pause,
|
|
323
|
+
togglePlay,
|
|
324
|
+
seek,
|
|
325
|
+
seekTo,
|
|
326
|
+
skip,
|
|
327
|
+
setVolume,
|
|
328
|
+
toggleMute,
|
|
329
|
+
restart,
|
|
330
|
+
}),
|
|
331
|
+
[
|
|
332
|
+
wavesurfer,
|
|
333
|
+
audioElement,
|
|
334
|
+
isReady,
|
|
335
|
+
isPlaying,
|
|
336
|
+
currentTime,
|
|
337
|
+
duration,
|
|
338
|
+
volume,
|
|
339
|
+
isMuted,
|
|
340
|
+
audioLevels,
|
|
341
|
+
play,
|
|
342
|
+
pause,
|
|
343
|
+
togglePlay,
|
|
344
|
+
seek,
|
|
345
|
+
seekTo,
|
|
346
|
+
skip,
|
|
347
|
+
setVolume,
|
|
348
|
+
toggleMute,
|
|
349
|
+
restart,
|
|
350
|
+
]
|
|
351
|
+
);
|
|
352
|
+
|
|
353
|
+
return (
|
|
354
|
+
<AudioPlayerContext.Provider value={contextValue}>
|
|
355
|
+
{children}
|
|
356
|
+
</AudioPlayerContext.Provider>
|
|
357
|
+
);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// =============================================================================
|
|
361
|
+
// HOOK
|
|
362
|
+
// =============================================================================
|
|
363
|
+
|
|
364
|
+
export function useAudio(): AudioContextState {
|
|
365
|
+
const context = useContext(AudioPlayerContext);
|
|
366
|
+
if (!context) {
|
|
367
|
+
throw new Error('useAudio must be used within an AudioProvider');
|
|
368
|
+
}
|
|
369
|
+
return context;
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// =============================================================================
|
|
373
|
+
// SELECTIVE HOOKS - for performance optimization
|
|
374
|
+
// =============================================================================
|
|
375
|
+
|
|
376
|
+
/** Hook for playback controls only (no re-render on time updates) */
|
|
377
|
+
export function useAudioControls() {
|
|
378
|
+
const {
|
|
379
|
+
isReady,
|
|
380
|
+
play,
|
|
381
|
+
pause,
|
|
382
|
+
togglePlay,
|
|
383
|
+
skip,
|
|
384
|
+
restart,
|
|
385
|
+
setVolume,
|
|
386
|
+
toggleMute,
|
|
387
|
+
} = useAudio();
|
|
388
|
+
|
|
389
|
+
return {
|
|
390
|
+
isReady,
|
|
391
|
+
play,
|
|
392
|
+
pause,
|
|
393
|
+
togglePlay,
|
|
394
|
+
skip,
|
|
395
|
+
restart,
|
|
396
|
+
setVolume,
|
|
397
|
+
toggleMute,
|
|
398
|
+
};
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
/** Hook for playback state (read-only) */
|
|
402
|
+
export function useAudioState() {
|
|
403
|
+
const {
|
|
404
|
+
isReady,
|
|
405
|
+
isPlaying,
|
|
406
|
+
currentTime,
|
|
407
|
+
duration,
|
|
408
|
+
volume,
|
|
409
|
+
isMuted,
|
|
410
|
+
} = useAudio();
|
|
411
|
+
|
|
412
|
+
return {
|
|
413
|
+
isReady,
|
|
414
|
+
isPlaying,
|
|
415
|
+
currentTime,
|
|
416
|
+
duration,
|
|
417
|
+
volume,
|
|
418
|
+
isMuted,
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
/** Hook for audio element access (for equalizer and reactive effects) */
|
|
423
|
+
export function useAudioElement() {
|
|
424
|
+
const { audioElement, isPlaying, audioLevels } = useAudio();
|
|
425
|
+
return { audioElement, isPlaying, audioLevels };
|
|
426
|
+
}
|