@djangocfg/ui-nextjs 2.1.66 → 2.1.67
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +8 -6
- package/src/stores/index.ts +8 -0
- package/src/stores/mediaCache.ts +464 -0
- package/src/tools/AudioPlayer/@refactoring/00-PLAN.md +148 -0
- package/src/tools/AudioPlayer/@refactoring/01-TYPES.md +301 -0
- package/src/tools/AudioPlayer/@refactoring/02-HOOKS.md +281 -0
- package/src/tools/AudioPlayer/@refactoring/03-CONTEXT.md +328 -0
- package/src/tools/AudioPlayer/@refactoring/04-COMPONENTS.md +251 -0
- package/src/tools/AudioPlayer/@refactoring/05-EFFECTS.md +427 -0
- package/src/tools/AudioPlayer/@refactoring/06-UTILS-AND-INDEX.md +193 -0
- package/src/tools/AudioPlayer/@refactoring/07-EXECUTION-CHECKLIST.md +146 -0
- package/src/tools/AudioPlayer/README.md +35 -11
- package/src/tools/AudioPlayer/{AudioEqualizer.tsx → components/AudioEqualizer.tsx} +29 -64
- package/src/tools/AudioPlayer/{AudioPlayer.tsx → components/AudioPlayer.tsx} +22 -14
- package/src/tools/AudioPlayer/{AudioShortcutsPopover.tsx → components/AudioShortcutsPopover.tsx} +6 -2
- package/src/tools/AudioPlayer/components/ReactiveCover/AudioReactiveCover.tsx +147 -0
- package/src/tools/AudioPlayer/components/ReactiveCover/effects/GlowEffect.tsx +110 -0
- package/src/tools/AudioPlayer/components/ReactiveCover/effects/MeshEffect.tsx +58 -0
- package/src/tools/AudioPlayer/components/ReactiveCover/effects/OrbsEffect.tsx +45 -0
- package/src/tools/AudioPlayer/components/ReactiveCover/effects/SpotlightEffect.tsx +82 -0
- package/src/tools/AudioPlayer/components/ReactiveCover/effects/index.ts +8 -0
- package/src/tools/AudioPlayer/components/ReactiveCover/index.ts +6 -0
- package/src/tools/AudioPlayer/{SimpleAudioPlayer.tsx → components/SimpleAudioPlayer.tsx} +12 -7
- package/src/tools/AudioPlayer/{VisualizationToggle.tsx → components/VisualizationToggle.tsx} +2 -6
- package/src/tools/AudioPlayer/components/index.ts +21 -0
- package/src/tools/AudioPlayer/context/AudioProvider.tsx +292 -0
- package/src/tools/AudioPlayer/context/index.ts +11 -0
- package/src/tools/AudioPlayer/context/selectors.ts +96 -0
- package/src/tools/AudioPlayer/hooks/index.ts +29 -0
- package/src/tools/AudioPlayer/hooks/useAudioAnalysis.ts +110 -0
- package/src/tools/AudioPlayer/{useAudioHotkeys.ts → hooks/useAudioHotkeys.ts} +11 -4
- package/src/tools/AudioPlayer/hooks/useSharedWebAudio.ts +106 -0
- package/src/tools/AudioPlayer/{useAudioVisualization.tsx → hooks/useVisualization.tsx} +11 -5
- package/src/tools/AudioPlayer/index.ts +104 -49
- package/src/tools/AudioPlayer/types/audio.ts +107 -0
- package/src/tools/AudioPlayer/{types.ts → types/components.ts} +20 -84
- package/src/tools/AudioPlayer/types/effects.ts +73 -0
- package/src/tools/AudioPlayer/types/index.ts +35 -0
- package/src/tools/AudioPlayer/utils/formatTime.ts +10 -0
- package/src/tools/AudioPlayer/utils/index.ts +5 -0
- package/src/tools/ImageViewer/@refactoring/00-PLAN.md +71 -0
- package/src/tools/ImageViewer/@refactoring/01-TYPES.md +121 -0
- package/src/tools/ImageViewer/@refactoring/02-UTILS.md +143 -0
- package/src/tools/ImageViewer/@refactoring/03-HOOKS.md +261 -0
- package/src/tools/ImageViewer/@refactoring/04-COMPONENTS.md +427 -0
- package/src/tools/ImageViewer/@refactoring/05-EXECUTION-CHECKLIST.md +126 -0
- package/src/tools/ImageViewer/README.md +16 -3
- package/src/tools/ImageViewer/components/ImageInfo.tsx +44 -0
- package/src/tools/ImageViewer/components/ImageToolbar.tsx +150 -0
- package/src/tools/ImageViewer/components/ImageViewer.tsx +235 -0
- package/src/tools/ImageViewer/components/index.ts +7 -0
- package/src/tools/ImageViewer/hooks/index.ts +9 -0
- package/src/tools/ImageViewer/hooks/useImageLoading.ts +153 -0
- package/src/tools/ImageViewer/hooks/useImageTransform.ts +101 -0
- package/src/tools/ImageViewer/index.ts +47 -3
- package/src/tools/ImageViewer/types.ts +75 -0
- package/src/tools/ImageViewer/utils/constants.ts +59 -0
- package/src/tools/ImageViewer/utils/index.ts +16 -0
- package/src/tools/ImageViewer/utils/lqip.ts +47 -0
- package/src/tools/VideoPlayer/@refactoring/00-PLAN.md +91 -0
- package/src/tools/VideoPlayer/@refactoring/01-TYPES.md +284 -0
- package/src/tools/VideoPlayer/@refactoring/02-UTILS.md +141 -0
- package/src/tools/VideoPlayer/@refactoring/03-HOOKS.md +178 -0
- package/src/tools/VideoPlayer/@refactoring/04-COMPONENTS.md +95 -0
- package/src/tools/VideoPlayer/@refactoring/05-EXECUTION-CHECKLIST.md +139 -0
- package/src/tools/VideoPlayer/README.md +26 -10
- package/src/tools/VideoPlayer/{VideoControls.tsx → components/VideoControls.tsx} +8 -9
- package/src/tools/VideoPlayer/{VideoErrorFallback.tsx → components/VideoErrorFallback.tsx} +2 -2
- package/src/tools/VideoPlayer/{VideoPlayer.tsx → components/VideoPlayer.tsx} +4 -5
- package/src/tools/VideoPlayer/components/index.ts +14 -0
- package/src/tools/VideoPlayer/context/VideoPlayerContext.tsx +52 -0
- package/src/tools/VideoPlayer/context/index.ts +8 -0
- package/src/tools/VideoPlayer/hooks/index.ts +9 -0
- package/src/tools/VideoPlayer/hooks/useVideoPositionCache.ts +109 -0
- package/src/tools/VideoPlayer/index.ts +29 -20
- package/src/tools/VideoPlayer/providers/StreamProvider.tsx +118 -28
- package/src/tools/VideoPlayer/providers/VidstackProvider.tsx +89 -11
- package/src/tools/VideoPlayer/types/index.ts +38 -0
- package/src/tools/VideoPlayer/types/player.ts +116 -0
- package/src/tools/VideoPlayer/types/provider.ts +93 -0
- package/src/tools/VideoPlayer/types/sources.ts +97 -0
- package/src/tools/VideoPlayer/utils/fileSource.ts +78 -0
- package/src/tools/VideoPlayer/utils/index.ts +11 -0
- package/src/tools/VideoPlayer/utils/resolvers.ts +75 -0
- package/src/tools/index.ts +10 -0
- package/src/tools/AudioPlayer/AudioReactiveCover.tsx +0 -389
- package/src/tools/AudioPlayer/context.tsx +0 -426
- package/src/tools/ImageViewer/ImageViewer.tsx +0 -416
- package/src/tools/VideoPlayer/VideoPlayerContext.tsx +0 -125
- package/src/tools/VideoPlayer/types.ts +0 -367
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AudioProvider - Shared audio state and controls
|
|
5
|
+
*
|
|
6
|
+
* Provides centralized audio state management using WaveSurfer.js
|
|
7
|
+
* All child components can access playback state and controls
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import {
|
|
11
|
+
createContext,
|
|
12
|
+
useRef,
|
|
13
|
+
useMemo,
|
|
14
|
+
useCallback,
|
|
15
|
+
useState,
|
|
16
|
+
useEffect,
|
|
17
|
+
type ReactNode,
|
|
18
|
+
} from 'react';
|
|
19
|
+
import { useWavesurfer } from '@wavesurfer/react';
|
|
20
|
+
import type { AudioContextState, AudioSource, WaveformOptions } from '../types';
|
|
21
|
+
import { useSharedWebAudio, useAudioAnalysis } from '../hooks';
|
|
22
|
+
import { useAudioCache } from '../../../stores/mediaCache';
|
|
23
|
+
|
|
24
|
+
// =============================================================================
|
|
25
|
+
// CONTEXT
|
|
26
|
+
// =============================================================================
|
|
27
|
+
|
|
28
|
+
// Named AudioPlayerContext to avoid conflict with browser's AudioContext
|
|
29
|
+
export const AudioPlayerContext = createContext<AudioContextState | null>(null);
|
|
30
|
+
|
|
31
|
+
// =============================================================================
|
|
32
|
+
// PROVIDER PROPS
|
|
33
|
+
// =============================================================================
|
|
34
|
+
|
|
35
|
+
interface AudioProviderProps {
|
|
36
|
+
/** Audio source */
|
|
37
|
+
source: AudioSource;
|
|
38
|
+
/** Auto-play when loaded */
|
|
39
|
+
autoPlay?: boolean;
|
|
40
|
+
/** WaveSurfer options */
|
|
41
|
+
waveformOptions?: WaveformOptions;
|
|
42
|
+
/** Container ref for waveform rendering */
|
|
43
|
+
containerRef: React.RefObject<HTMLDivElement | null>;
|
|
44
|
+
/** Children components */
|
|
45
|
+
children: ReactNode;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// =============================================================================
|
|
49
|
+
// PROVIDER
|
|
50
|
+
// =============================================================================
|
|
51
|
+
|
|
52
|
+
export function AudioProvider({
|
|
53
|
+
source,
|
|
54
|
+
autoPlay = false,
|
|
55
|
+
waveformOptions = {},
|
|
56
|
+
containerRef,
|
|
57
|
+
children,
|
|
58
|
+
}: AudioProviderProps) {
|
|
59
|
+
// Cache for playback position persistence
|
|
60
|
+
const { saveAudioPosition, getAudioPosition } = useAudioCache();
|
|
61
|
+
const lastSavedTimeRef = useRef<number>(0);
|
|
62
|
+
|
|
63
|
+
// Memoize WaveSurfer options with theme-aware colors
|
|
64
|
+
const options = useMemo(
|
|
65
|
+
() => ({
|
|
66
|
+
container: containerRef,
|
|
67
|
+
url: source.uri,
|
|
68
|
+
// Theme-aware colors using HSL
|
|
69
|
+
waveColor: waveformOptions.waveColor || 'hsl(217 91% 60% / 0.3)',
|
|
70
|
+
progressColor: waveformOptions.progressColor || 'hsl(217 91% 60%)',
|
|
71
|
+
cursorColor: waveformOptions.cursorColor || 'hsl(217 91% 60%)',
|
|
72
|
+
cursorWidth: waveformOptions.cursorWidth ?? 2,
|
|
73
|
+
height: waveformOptions.height ?? 64,
|
|
74
|
+
barWidth: waveformOptions.barWidth ?? 3,
|
|
75
|
+
barRadius: waveformOptions.barRadius ?? 3,
|
|
76
|
+
barGap: waveformOptions.barGap ?? 2,
|
|
77
|
+
normalize: true,
|
|
78
|
+
interact: true,
|
|
79
|
+
hideScrollbar: true,
|
|
80
|
+
autoplay: autoPlay,
|
|
81
|
+
}),
|
|
82
|
+
[source.uri, autoPlay, waveformOptions, containerRef]
|
|
83
|
+
);
|
|
84
|
+
|
|
85
|
+
// Use official wavesurfer-react hook
|
|
86
|
+
const { wavesurfer, isReady, isPlaying, currentTime } = useWavesurfer(options);
|
|
87
|
+
|
|
88
|
+
// Restore cached playback position when ready
|
|
89
|
+
useEffect(() => {
|
|
90
|
+
if (isReady && wavesurfer && source.uri) {
|
|
91
|
+
const cachedPosition = getAudioPosition(source.uri);
|
|
92
|
+
if (cachedPosition && cachedPosition > 0) {
|
|
93
|
+
const duration = wavesurfer.getDuration();
|
|
94
|
+
// Only restore if position is valid (not at the end)
|
|
95
|
+
if (cachedPosition < duration - 1) {
|
|
96
|
+
wavesurfer.setTime(cachedPosition);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}, [isReady, wavesurfer, source.uri, getAudioPosition]);
|
|
101
|
+
|
|
102
|
+
// Save playback position periodically and on pause
|
|
103
|
+
useEffect(() => {
|
|
104
|
+
if (!source.uri) return;
|
|
105
|
+
|
|
106
|
+
// Save position every 5 seconds during playback
|
|
107
|
+
if (isPlaying && currentTime > 0) {
|
|
108
|
+
const timeSinceLastSave = currentTime - lastSavedTimeRef.current;
|
|
109
|
+
if (timeSinceLastSave >= 5 || timeSinceLastSave < 0) {
|
|
110
|
+
saveAudioPosition(source.uri, currentTime);
|
|
111
|
+
lastSavedTimeRef.current = currentTime;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Save position immediately when paused
|
|
116
|
+
if (!isPlaying && currentTime > 0) {
|
|
117
|
+
saveAudioPosition(source.uri, currentTime);
|
|
118
|
+
lastSavedTimeRef.current = currentTime;
|
|
119
|
+
}
|
|
120
|
+
}, [isPlaying, currentTime, source.uri, saveAudioPosition]);
|
|
121
|
+
|
|
122
|
+
// Derived state
|
|
123
|
+
const duration = wavesurfer?.getDuration() ?? 0;
|
|
124
|
+
const volume = wavesurfer?.getVolume() ?? 1;
|
|
125
|
+
const isMuted = wavesurfer?.getMuted() ?? false;
|
|
126
|
+
|
|
127
|
+
// Loop state
|
|
128
|
+
const [isLooping, setIsLooping] = useState(false);
|
|
129
|
+
|
|
130
|
+
// Handle loop: restart playback when audio finishes
|
|
131
|
+
useEffect(() => {
|
|
132
|
+
if (!wavesurfer) return;
|
|
133
|
+
|
|
134
|
+
const handleFinish = () => {
|
|
135
|
+
if (isLooping) {
|
|
136
|
+
wavesurfer.seekTo(0);
|
|
137
|
+
wavesurfer.play();
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
wavesurfer.on('finish', handleFinish);
|
|
142
|
+
return () => {
|
|
143
|
+
wavesurfer.un('finish', handleFinish);
|
|
144
|
+
};
|
|
145
|
+
}, [wavesurfer, isLooping]);
|
|
146
|
+
|
|
147
|
+
// Get audio element for equalizer
|
|
148
|
+
const audioElement = useMemo(() => {
|
|
149
|
+
return wavesurfer?.getMediaElement() ?? null;
|
|
150
|
+
}, [wavesurfer]);
|
|
151
|
+
|
|
152
|
+
// Shared Web Audio context for all analyzers (prevents duplicate source nodes)
|
|
153
|
+
const sharedAudio = useSharedWebAudio(audioElement);
|
|
154
|
+
|
|
155
|
+
// Audio analysis for reactive effects (uses shared context)
|
|
156
|
+
const audioLevels = useAudioAnalysis(sharedAudio, isPlaying);
|
|
157
|
+
|
|
158
|
+
// Actions
|
|
159
|
+
const play = useCallback(async () => {
|
|
160
|
+
await wavesurfer?.play();
|
|
161
|
+
}, [wavesurfer]);
|
|
162
|
+
|
|
163
|
+
const pause = useCallback(() => {
|
|
164
|
+
wavesurfer?.pause();
|
|
165
|
+
}, [wavesurfer]);
|
|
166
|
+
|
|
167
|
+
const togglePlay = useCallback(() => {
|
|
168
|
+
wavesurfer?.playPause();
|
|
169
|
+
}, [wavesurfer]);
|
|
170
|
+
|
|
171
|
+
const seek = useCallback(
|
|
172
|
+
(time: number) => {
|
|
173
|
+
if (wavesurfer) {
|
|
174
|
+
const clampedTime = Math.max(0, Math.min(time, duration));
|
|
175
|
+
wavesurfer.setTime(clampedTime);
|
|
176
|
+
}
|
|
177
|
+
},
|
|
178
|
+
[wavesurfer, duration]
|
|
179
|
+
);
|
|
180
|
+
|
|
181
|
+
const seekTo = useCallback(
|
|
182
|
+
(progress: number) => {
|
|
183
|
+
if (wavesurfer) {
|
|
184
|
+
const clampedProgress = Math.max(0, Math.min(progress, 1));
|
|
185
|
+
wavesurfer.seekTo(clampedProgress);
|
|
186
|
+
}
|
|
187
|
+
},
|
|
188
|
+
[wavesurfer]
|
|
189
|
+
);
|
|
190
|
+
|
|
191
|
+
const skip = useCallback(
|
|
192
|
+
(seconds: number) => {
|
|
193
|
+
wavesurfer?.skip(seconds);
|
|
194
|
+
},
|
|
195
|
+
[wavesurfer]
|
|
196
|
+
);
|
|
197
|
+
|
|
198
|
+
const setVolume = useCallback(
|
|
199
|
+
(vol: number) => {
|
|
200
|
+
if (wavesurfer) {
|
|
201
|
+
wavesurfer.setVolume(Math.max(0, Math.min(vol, 1)));
|
|
202
|
+
}
|
|
203
|
+
},
|
|
204
|
+
[wavesurfer]
|
|
205
|
+
);
|
|
206
|
+
|
|
207
|
+
const toggleMute = useCallback(() => {
|
|
208
|
+
if (wavesurfer) {
|
|
209
|
+
wavesurfer.setMuted(!wavesurfer.getMuted());
|
|
210
|
+
}
|
|
211
|
+
}, [wavesurfer]);
|
|
212
|
+
|
|
213
|
+
const restart = useCallback(() => {
|
|
214
|
+
if (wavesurfer) {
|
|
215
|
+
wavesurfer.seekTo(0);
|
|
216
|
+
wavesurfer.play();
|
|
217
|
+
}
|
|
218
|
+
}, [wavesurfer]);
|
|
219
|
+
|
|
220
|
+
const toggleLoop = useCallback(() => {
|
|
221
|
+
setIsLooping((prev) => !prev);
|
|
222
|
+
}, []);
|
|
223
|
+
|
|
224
|
+
const setLoop = useCallback((enabled: boolean) => {
|
|
225
|
+
setIsLooping(enabled);
|
|
226
|
+
}, []);
|
|
227
|
+
|
|
228
|
+
// Context value
|
|
229
|
+
const contextValue = useMemo<AudioContextState>(
|
|
230
|
+
() => ({
|
|
231
|
+
// Core instances
|
|
232
|
+
wavesurfer,
|
|
233
|
+
audioElement,
|
|
234
|
+
sharedAudio,
|
|
235
|
+
|
|
236
|
+
// Playback state
|
|
237
|
+
isReady,
|
|
238
|
+
isPlaying,
|
|
239
|
+
currentTime,
|
|
240
|
+
duration,
|
|
241
|
+
volume,
|
|
242
|
+
isMuted,
|
|
243
|
+
isLooping,
|
|
244
|
+
|
|
245
|
+
// Audio analysis
|
|
246
|
+
audioLevels,
|
|
247
|
+
|
|
248
|
+
// Actions
|
|
249
|
+
play,
|
|
250
|
+
pause,
|
|
251
|
+
togglePlay,
|
|
252
|
+
seek,
|
|
253
|
+
seekTo,
|
|
254
|
+
skip,
|
|
255
|
+
setVolume,
|
|
256
|
+
toggleMute,
|
|
257
|
+
toggleLoop,
|
|
258
|
+
setLoop,
|
|
259
|
+
restart,
|
|
260
|
+
}),
|
|
261
|
+
[
|
|
262
|
+
wavesurfer,
|
|
263
|
+
audioElement,
|
|
264
|
+
sharedAudio,
|
|
265
|
+
isReady,
|
|
266
|
+
isPlaying,
|
|
267
|
+
currentTime,
|
|
268
|
+
duration,
|
|
269
|
+
volume,
|
|
270
|
+
isMuted,
|
|
271
|
+
isLooping,
|
|
272
|
+
audioLevels,
|
|
273
|
+
play,
|
|
274
|
+
pause,
|
|
275
|
+
togglePlay,
|
|
276
|
+
seek,
|
|
277
|
+
seekTo,
|
|
278
|
+
skip,
|
|
279
|
+
setVolume,
|
|
280
|
+
toggleMute,
|
|
281
|
+
toggleLoop,
|
|
282
|
+
setLoop,
|
|
283
|
+
restart,
|
|
284
|
+
]
|
|
285
|
+
);
|
|
286
|
+
|
|
287
|
+
return (
|
|
288
|
+
<AudioPlayerContext.Provider value={contextValue}>
|
|
289
|
+
{children}
|
|
290
|
+
</AudioPlayerContext.Provider>
|
|
291
|
+
);
|
|
292
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AudioPlayer context - Public API
|
|
3
|
+
*
|
|
4
|
+
* Re-exports provider and selector hooks
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
// Provider
|
|
8
|
+
export { AudioProvider, AudioPlayerContext } from './AudioProvider';
|
|
9
|
+
|
|
10
|
+
// Selector hooks
|
|
11
|
+
export { useAudio, useAudioControls, useAudioState, useAudioElement } from './selectors';
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Context selectors - Performance-optimized hooks for accessing audio state
|
|
5
|
+
*
|
|
6
|
+
* These hooks provide selective access to context values,
|
|
7
|
+
* helping to minimize unnecessary re-renders.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { useContext } from 'react';
|
|
11
|
+
import { AudioPlayerContext } from './AudioProvider';
|
|
12
|
+
import type { AudioContextState } from '../types';
|
|
13
|
+
|
|
14
|
+
// =============================================================================
|
|
15
|
+
// MAIN HOOK
|
|
16
|
+
// =============================================================================
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Access full audio context state
|
|
20
|
+
* @throws Error if used outside AudioProvider
|
|
21
|
+
*/
|
|
22
|
+
export function useAudio(): AudioContextState {
|
|
23
|
+
const context = useContext(AudioPlayerContext);
|
|
24
|
+
if (!context) {
|
|
25
|
+
throw new Error('useAudio must be used within an AudioProvider');
|
|
26
|
+
}
|
|
27
|
+
return context;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// =============================================================================
|
|
31
|
+
// SELECTIVE HOOKS - for performance optimization
|
|
32
|
+
// =============================================================================
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Hook for playback controls only (no re-render on time updates)
|
|
36
|
+
*/
|
|
37
|
+
export function useAudioControls() {
|
|
38
|
+
const {
|
|
39
|
+
isReady,
|
|
40
|
+
play,
|
|
41
|
+
pause,
|
|
42
|
+
togglePlay,
|
|
43
|
+
skip,
|
|
44
|
+
restart,
|
|
45
|
+
setVolume,
|
|
46
|
+
toggleMute,
|
|
47
|
+
toggleLoop,
|
|
48
|
+
setLoop,
|
|
49
|
+
} = useAudio();
|
|
50
|
+
|
|
51
|
+
return {
|
|
52
|
+
isReady,
|
|
53
|
+
play,
|
|
54
|
+
pause,
|
|
55
|
+
togglePlay,
|
|
56
|
+
skip,
|
|
57
|
+
restart,
|
|
58
|
+
setVolume,
|
|
59
|
+
toggleMute,
|
|
60
|
+
toggleLoop,
|
|
61
|
+
setLoop,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Hook for playback state (read-only)
|
|
67
|
+
*/
|
|
68
|
+
export function useAudioState() {
|
|
69
|
+
const {
|
|
70
|
+
isReady,
|
|
71
|
+
isPlaying,
|
|
72
|
+
currentTime,
|
|
73
|
+
duration,
|
|
74
|
+
volume,
|
|
75
|
+
isMuted,
|
|
76
|
+
isLooping,
|
|
77
|
+
} = useAudio();
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
isReady,
|
|
81
|
+
isPlaying,
|
|
82
|
+
currentTime,
|
|
83
|
+
duration,
|
|
84
|
+
volume,
|
|
85
|
+
isMuted,
|
|
86
|
+
isLooping,
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Hook for audio element access (for equalizer and reactive effects)
|
|
92
|
+
*/
|
|
93
|
+
export function useAudioElement() {
|
|
94
|
+
const { audioElement, sharedAudio, isPlaying, audioLevels } = useAudio();
|
|
95
|
+
return { audioElement, sharedAudio, isPlaying, audioLevels };
|
|
96
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AudioPlayer hooks - re-exports all hooks
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
// Internal hooks (used by provider)
|
|
6
|
+
export { useSharedWebAudio } from './useSharedWebAudio';
|
|
7
|
+
export { useAudioAnalysis } from './useAudioAnalysis';
|
|
8
|
+
|
|
9
|
+
// Public hooks
|
|
10
|
+
export { useAudioHotkeys, AUDIO_SHORTCUTS } from './useAudioHotkeys';
|
|
11
|
+
export type { AudioHotkeyOptions, ShortcutItem, ShortcutGroup } from './useAudioHotkeys';
|
|
12
|
+
|
|
13
|
+
export {
|
|
14
|
+
useVisualization,
|
|
15
|
+
useAudioVisualization, // backward compat alias
|
|
16
|
+
VisualizationProvider,
|
|
17
|
+
VARIANT_INFO,
|
|
18
|
+
INTENSITY_INFO,
|
|
19
|
+
COLOR_SCHEME_INFO,
|
|
20
|
+
} from './useVisualization';
|
|
21
|
+
export type {
|
|
22
|
+
VisualizationSettings,
|
|
23
|
+
VisualizationVariant,
|
|
24
|
+
VisualizationIntensity,
|
|
25
|
+
VisualizationColorScheme,
|
|
26
|
+
UseVisualizationReturn,
|
|
27
|
+
UseAudioVisualizationReturn,
|
|
28
|
+
VisualizationProviderProps,
|
|
29
|
+
} from './useVisualization';
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* useAudioAnalysis - Real-time audio frequency analysis for reactive effects.
|
|
5
|
+
*
|
|
6
|
+
* Analyzes audio frequencies for bass, mid, high, and overall levels.
|
|
7
|
+
* Uses shared Web Audio context to prevent duplicate source nodes.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { useState, useRef, useEffect, useCallback } from 'react';
|
|
11
|
+
import type { SharedWebAudioContext } from '../types';
|
|
12
|
+
import type { AudioLevels } from '../effects';
|
|
13
|
+
|
|
14
|
+
export function useAudioAnalysis(
|
|
15
|
+
sharedAudio: SharedWebAudioContext,
|
|
16
|
+
isPlaying: boolean
|
|
17
|
+
): AudioLevels {
|
|
18
|
+
const [levels, setLevels] = useState<AudioLevels>({ bass: 0, mid: 0, high: 0, overall: 0 });
|
|
19
|
+
const analyserRef = useRef<AnalyserNode | null>(null);
|
|
20
|
+
const animationRef = useRef<number | null>(null);
|
|
21
|
+
const dataArrayRef = useRef<Uint8Array | null>(null);
|
|
22
|
+
|
|
23
|
+
const cleanup = useCallback(() => {
|
|
24
|
+
if (animationRef.current) {
|
|
25
|
+
cancelAnimationFrame(animationRef.current);
|
|
26
|
+
animationRef.current = null;
|
|
27
|
+
}
|
|
28
|
+
}, []);
|
|
29
|
+
|
|
30
|
+
// Create analyser when shared audio is ready
|
|
31
|
+
useEffect(() => {
|
|
32
|
+
if (!sharedAudio.sourceNode || analyserRef.current) return;
|
|
33
|
+
|
|
34
|
+
const analyser = sharedAudio.createAnalyser({ fftSize: 256, smoothing: 0.85 });
|
|
35
|
+
if (analyser) {
|
|
36
|
+
analyserRef.current = analyser;
|
|
37
|
+
dataArrayRef.current = new Uint8Array(analyser.frequencyBinCount);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return () => {
|
|
41
|
+
if (analyserRef.current) {
|
|
42
|
+
sharedAudio.disconnectAnalyser(analyserRef.current);
|
|
43
|
+
analyserRef.current = null;
|
|
44
|
+
dataArrayRef.current = null;
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
}, [sharedAudio.sourceNode, sharedAudio.createAnalyser, sharedAudio.disconnectAnalyser]);
|
|
48
|
+
|
|
49
|
+
// Animation loop
|
|
50
|
+
useEffect(() => {
|
|
51
|
+
if (!isPlaying || !analyserRef.current || !dataArrayRef.current) {
|
|
52
|
+
cleanup();
|
|
53
|
+
// Smooth fade out
|
|
54
|
+
setLevels(prev => ({
|
|
55
|
+
bass: prev.bass * 0.95 < 0.01 ? 0 : prev.bass * 0.95,
|
|
56
|
+
mid: prev.mid * 0.95,
|
|
57
|
+
high: prev.high * 0.95,
|
|
58
|
+
overall: prev.overall * 0.95,
|
|
59
|
+
}));
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const analyser = analyserRef.current;
|
|
64
|
+
const dataArray = dataArrayRef.current;
|
|
65
|
+
|
|
66
|
+
const animate = () => {
|
|
67
|
+
analyser.getByteFrequencyData(dataArray as Uint8Array<ArrayBuffer>);
|
|
68
|
+
const binCount = dataArray.length;
|
|
69
|
+
|
|
70
|
+
// Bass (0-15%)
|
|
71
|
+
const bassEnd = Math.floor(binCount * 0.15);
|
|
72
|
+
let bassSum = 0;
|
|
73
|
+
for (let i = 0; i < bassEnd; i++) bassSum += dataArray[i];
|
|
74
|
+
const bass = bassSum / bassEnd / 255;
|
|
75
|
+
|
|
76
|
+
// Mids (15-50%)
|
|
77
|
+
const midStart = bassEnd;
|
|
78
|
+
const midEnd = Math.floor(binCount * 0.5);
|
|
79
|
+
let midSum = 0;
|
|
80
|
+
for (let i = midStart; i < midEnd; i++) midSum += dataArray[i];
|
|
81
|
+
const mid = midSum / (midEnd - midStart) / 255;
|
|
82
|
+
|
|
83
|
+
// Highs (50-100%)
|
|
84
|
+
const highStart = midEnd;
|
|
85
|
+
let highSum = 0;
|
|
86
|
+
for (let i = highStart; i < binCount; i++) highSum += dataArray[i];
|
|
87
|
+
const high = highSum / (binCount - highStart) / 255;
|
|
88
|
+
|
|
89
|
+
// Overall
|
|
90
|
+
let totalSum = 0;
|
|
91
|
+
for (let i = 0; i < binCount; i++) totalSum += dataArray[i];
|
|
92
|
+
const overall = totalSum / binCount / 255;
|
|
93
|
+
|
|
94
|
+
// Smooth with lerp
|
|
95
|
+
setLevels(prev => ({
|
|
96
|
+
bass: prev.bass * 0.7 + bass * 0.3,
|
|
97
|
+
mid: prev.mid * 0.7 + mid * 0.3,
|
|
98
|
+
high: prev.high * 0.7 + high * 0.3,
|
|
99
|
+
overall: prev.overall * 0.7 + overall * 0.3,
|
|
100
|
+
}));
|
|
101
|
+
|
|
102
|
+
animationRef.current = requestAnimationFrame(animate);
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
animationRef.current = requestAnimationFrame(animate);
|
|
106
|
+
return cleanup;
|
|
107
|
+
}, [isPlaying, cleanup]);
|
|
108
|
+
|
|
109
|
+
return levels;
|
|
110
|
+
}
|
|
@@ -1,14 +1,13 @@
|
|
|
1
1
|
'use client';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
*
|
|
4
|
+
* useAudioHotkeys - Keyboard shortcuts for audio playback control.
|
|
5
5
|
*
|
|
6
|
-
* Provides keyboard shortcuts for audio playback control.
|
|
7
6
|
* Uses useHotkey from @djangocfg/ui-nextjs.
|
|
8
7
|
*/
|
|
9
8
|
|
|
10
9
|
import { useHotkey, useDeviceDetect } from '@djangocfg/ui-nextjs';
|
|
11
|
-
import { useAudioControls, useAudioState } from '
|
|
10
|
+
import { useAudioControls, useAudioState } from '../context';
|
|
12
11
|
|
|
13
12
|
// =============================================================================
|
|
14
13
|
// TYPES
|
|
@@ -30,7 +29,7 @@ export interface AudioHotkeyOptions {
|
|
|
30
29
|
export function useAudioHotkeys(options: AudioHotkeyOptions = {}) {
|
|
31
30
|
const { enabled = true, skipDuration = 10, volumeStep = 0.1 } = options;
|
|
32
31
|
|
|
33
|
-
const { togglePlay, skip, setVolume, toggleMute, isReady } = useAudioControls();
|
|
32
|
+
const { togglePlay, skip, setVolume, toggleMute, toggleLoop, isReady } = useAudioControls();
|
|
34
33
|
const { volume, duration } = useAudioState();
|
|
35
34
|
const device = useDeviceDetect();
|
|
36
35
|
|
|
@@ -85,6 +84,13 @@ export function useAudioHotkeys(options: AudioHotkeyOptions = {}) {
|
|
|
85
84
|
{ enabled: enabled && isReady, description: 'Mute/Unmute' }
|
|
86
85
|
);
|
|
87
86
|
|
|
87
|
+
// Loop/Repeat - L key (conflicts with skip forward, using Shift+L)
|
|
88
|
+
useHotkey(
|
|
89
|
+
'shift+l',
|
|
90
|
+
() => toggleLoop(),
|
|
91
|
+
{ enabled: enabled && isReady, description: 'Toggle loop' }
|
|
92
|
+
);
|
|
93
|
+
|
|
88
94
|
// Number keys 0-9 to seek to percentage
|
|
89
95
|
useHotkey(
|
|
90
96
|
['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'],
|
|
@@ -123,6 +129,7 @@ export const AUDIO_SHORTCUTS: ShortcutGroup[] = [
|
|
|
123
129
|
{ keys: ['Space'], label: 'Play/Pause' },
|
|
124
130
|
{ keys: ['←'], label: 'Skip 10s back' },
|
|
125
131
|
{ keys: ['→'], label: 'Skip 10s forward' },
|
|
132
|
+
{ keys: ['⇧', 'L'], label: 'Toggle loop' },
|
|
126
133
|
],
|
|
127
134
|
},
|
|
128
135
|
{
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* useSharedWebAudio - Manages a shared Web Audio context and source node.
|
|
5
|
+
*
|
|
6
|
+
* This prevents the "InvalidStateError" from creating multiple MediaElementSourceNodes
|
|
7
|
+
* for the same audio element. All analyzers share the same source.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { useRef, useEffect, useCallback } from 'react';
|
|
11
|
+
import type { SharedWebAudioContext } from '../types';
|
|
12
|
+
|
|
13
|
+
export function useSharedWebAudio(audioElement: HTMLMediaElement | null): SharedWebAudioContext {
|
|
14
|
+
const audioContextRef = useRef<AudioContext | null>(null);
|
|
15
|
+
const sourceRef = useRef<MediaElementAudioSourceNode | null>(null);
|
|
16
|
+
const connectedElementRef = useRef<HTMLMediaElement | null>(null);
|
|
17
|
+
const analyserNodesRef = useRef<Set<AnalyserNode>>(new Set());
|
|
18
|
+
|
|
19
|
+
// Initialize Web Audio on first play
|
|
20
|
+
useEffect(() => {
|
|
21
|
+
if (!audioElement) return;
|
|
22
|
+
|
|
23
|
+
// Already connected to this element
|
|
24
|
+
if (connectedElementRef.current === audioElement && audioContextRef.current) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const initAudio = () => {
|
|
29
|
+
try {
|
|
30
|
+
if (!audioContextRef.current) {
|
|
31
|
+
const AudioContextClass = window.AudioContext ||
|
|
32
|
+
(window as unknown as { webkitAudioContext: typeof AudioContext }).webkitAudioContext;
|
|
33
|
+
audioContextRef.current = new AudioContextClass();
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const audioContext = audioContextRef.current;
|
|
37
|
+
|
|
38
|
+
// Only create source node once per audio element
|
|
39
|
+
if (connectedElementRef.current !== audioElement) {
|
|
40
|
+
if (sourceRef.current) {
|
|
41
|
+
try { sourceRef.current.disconnect(); } catch { /* ignore */ }
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
sourceRef.current = audioContext.createMediaElementSource(audioElement);
|
|
45
|
+
// Connect directly to destination (analysers will be inserted in between)
|
|
46
|
+
sourceRef.current.connect(audioContext.destination);
|
|
47
|
+
connectedElementRef.current = audioElement;
|
|
48
|
+
}
|
|
49
|
+
} catch (error) {
|
|
50
|
+
console.warn('[SharedWebAudio] Could not initialize:', error);
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
const handlePlay = () => {
|
|
55
|
+
initAudio();
|
|
56
|
+
if (audioContextRef.current?.state === 'suspended') {
|
|
57
|
+
audioContextRef.current.resume();
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
audioElement.addEventListener('play', handlePlay);
|
|
62
|
+
if (!audioElement.paused) {
|
|
63
|
+
handlePlay();
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return () => {
|
|
67
|
+
audioElement.removeEventListener('play', handlePlay);
|
|
68
|
+
};
|
|
69
|
+
}, [audioElement]);
|
|
70
|
+
|
|
71
|
+
// Create an analyser connected to the shared source
|
|
72
|
+
const createAnalyser = useCallback((options?: { fftSize?: number; smoothing?: number }): AnalyserNode | null => {
|
|
73
|
+
if (!audioContextRef.current || !sourceRef.current) return null;
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
const analyser = audioContextRef.current.createAnalyser();
|
|
77
|
+
analyser.fftSize = options?.fftSize ?? 256;
|
|
78
|
+
analyser.smoothingTimeConstant = options?.smoothing ?? 0.85;
|
|
79
|
+
|
|
80
|
+
// Connect: source -> analyser -> destination
|
|
81
|
+
sourceRef.current.connect(analyser);
|
|
82
|
+
analyser.connect(audioContextRef.current.destination);
|
|
83
|
+
|
|
84
|
+
analyserNodesRef.current.add(analyser);
|
|
85
|
+
return analyser;
|
|
86
|
+
} catch (error) {
|
|
87
|
+
console.warn('[SharedWebAudio] Could not create analyser:', error);
|
|
88
|
+
return null;
|
|
89
|
+
}
|
|
90
|
+
}, []);
|
|
91
|
+
|
|
92
|
+
// Disconnect an analyser
|
|
93
|
+
const disconnectAnalyser = useCallback((analyser: AnalyserNode) => {
|
|
94
|
+
try {
|
|
95
|
+
analyser.disconnect();
|
|
96
|
+
analyserNodesRef.current.delete(analyser);
|
|
97
|
+
} catch { /* ignore */ }
|
|
98
|
+
}, []);
|
|
99
|
+
|
|
100
|
+
return {
|
|
101
|
+
audioContext: audioContextRef.current,
|
|
102
|
+
sourceNode: sourceRef.current,
|
|
103
|
+
createAnalyser,
|
|
104
|
+
disconnectAnalyser,
|
|
105
|
+
};
|
|
106
|
+
}
|