@djangocfg/ui-nextjs 2.1.69 → 2.1.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -4
- package/src/stores/mediaCache.ts +2 -2
- package/src/tools/AudioPlayer/@refactoring2/ISSUE_ANALYSIS.md +187 -0
- package/src/tools/AudioPlayer/@refactoring2/PLAN.md +372 -0
- package/src/tools/AudioPlayer/hooks/useAudioHotkeys.ts +3 -2
- package/src/tools/AudioPlayer/index.ts +27 -0
- package/src/tools/AudioPlayer/progressive/ProgressiveAudioPlayer.tsx +295 -0
- package/src/tools/AudioPlayer/progressive/WaveformCanvas.tsx +381 -0
- package/src/tools/AudioPlayer/progressive/index.ts +40 -0
- package/src/tools/AudioPlayer/progressive/peaks.ts +234 -0
- package/src/tools/AudioPlayer/progressive/types.ts +179 -0
- package/src/tools/AudioPlayer/progressive/useAudioElement.ts +289 -0
- package/src/tools/AudioPlayer/progressive/useProgressiveWaveform.ts +267 -0
- package/src/tools/VideoPlayer/providers/StreamProvider.tsx +12 -2
- package/src/tools/VideoPlayer/providers/VidstackProvider.tsx +6 -2
- package/src/tools/index.ts +16 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Audio Peaks Extraction Utilities
|
|
3
|
+
*
|
|
4
|
+
* Extracts waveform peaks from AudioBuffer for visualization.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
// =============================================================================
|
|
8
|
+
// TYPES
|
|
9
|
+
// =============================================================================
|
|
10
|
+
|
|
11
|
+
interface ExtractPeaksOptions {
|
|
12
|
+
/** Number of peaks to extract */
|
|
13
|
+
length: number;
|
|
14
|
+
/** Channel to use (0 = left, 1 = right, -1 = mix) */
|
|
15
|
+
channel?: number;
|
|
16
|
+
/** Normalize peaks to 0-1 range */
|
|
17
|
+
normalize?: boolean;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// =============================================================================
|
|
21
|
+
// PEAK EXTRACTION
|
|
22
|
+
// =============================================================================
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Extract peaks from an AudioBuffer
|
|
26
|
+
*
|
|
27
|
+
* @param audioBuffer - Decoded audio buffer
|
|
28
|
+
* @param options - Extraction options
|
|
29
|
+
* @returns Array of normalized peak values (0-1)
|
|
30
|
+
*/
|
|
31
|
+
export function extractPeaks(
|
|
32
|
+
audioBuffer: AudioBuffer,
|
|
33
|
+
options: ExtractPeaksOptions
|
|
34
|
+
): number[] {
|
|
35
|
+
const { length, channel = -1, normalize = true } = options;
|
|
36
|
+
|
|
37
|
+
// Get channel data
|
|
38
|
+
let data: Float32Array;
|
|
39
|
+
if (channel === -1) {
|
|
40
|
+
// Mix all channels
|
|
41
|
+
data = mixChannels(audioBuffer);
|
|
42
|
+
} else {
|
|
43
|
+
data = audioBuffer.getChannelData(Math.min(channel, audioBuffer.numberOfChannels - 1));
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const peaks = new Array<number>(length);
|
|
47
|
+
const samplesPerPeak = Math.floor(data.length / length);
|
|
48
|
+
|
|
49
|
+
let maxPeak = 0;
|
|
50
|
+
|
|
51
|
+
for (let i = 0; i < length; i++) {
|
|
52
|
+
const start = i * samplesPerPeak;
|
|
53
|
+
const end = Math.min(start + samplesPerPeak, data.length);
|
|
54
|
+
|
|
55
|
+
let max = 0;
|
|
56
|
+
for (let j = start; j < end; j++) {
|
|
57
|
+
const abs = Math.abs(data[j]);
|
|
58
|
+
if (abs > max) max = abs;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
peaks[i] = max;
|
|
62
|
+
if (max > maxPeak) maxPeak = max;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Normalize to 0-1
|
|
66
|
+
if (normalize && maxPeak > 0) {
|
|
67
|
+
for (let i = 0; i < length; i++) {
|
|
68
|
+
peaks[i] = peaks[i] / maxPeak;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return peaks;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Mix all channels into mono
|
|
77
|
+
*/
|
|
78
|
+
function mixChannels(audioBuffer: AudioBuffer): Float32Array {
|
|
79
|
+
const numChannels = audioBuffer.numberOfChannels;
|
|
80
|
+
const length = audioBuffer.length;
|
|
81
|
+
const mixed = new Float32Array(length);
|
|
82
|
+
|
|
83
|
+
for (let ch = 0; ch < numChannels; ch++) {
|
|
84
|
+
const channelData = audioBuffer.getChannelData(ch);
|
|
85
|
+
for (let i = 0; i < length; i++) {
|
|
86
|
+
mixed[i] += channelData[i];
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Average
|
|
91
|
+
const divisor = numChannels;
|
|
92
|
+
for (let i = 0; i < length; i++) {
|
|
93
|
+
mixed[i] /= divisor;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return mixed;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// =============================================================================
|
|
100
|
+
// PEAK MERGING
|
|
101
|
+
// =============================================================================
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Merge two peaks arrays (for progressive loading)
|
|
105
|
+
*
|
|
106
|
+
* @param existing - Existing peaks array
|
|
107
|
+
* @param newPeaks - New peaks to merge
|
|
108
|
+
* @param startIndex - Start index for new peaks
|
|
109
|
+
* @returns Merged peaks array
|
|
110
|
+
*/
|
|
111
|
+
export function mergePeaks(
|
|
112
|
+
existing: number[],
|
|
113
|
+
newPeaks: number[],
|
|
114
|
+
startIndex: number
|
|
115
|
+
): number[] {
|
|
116
|
+
const result = [...existing];
|
|
117
|
+
|
|
118
|
+
// Extend if needed
|
|
119
|
+
const neededLength = startIndex + newPeaks.length;
|
|
120
|
+
while (result.length < neededLength) {
|
|
121
|
+
result.push(0);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Merge
|
|
125
|
+
for (let i = 0; i < newPeaks.length; i++) {
|
|
126
|
+
result[startIndex + i] = newPeaks[i];
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return result;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// =============================================================================
|
|
133
|
+
// PEAK RESAMPLING
|
|
134
|
+
// =============================================================================
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Resample peaks to a different length
|
|
138
|
+
*
|
|
139
|
+
* @param peaks - Source peaks
|
|
140
|
+
* @param targetLength - Target number of peaks
|
|
141
|
+
* @returns Resampled peaks
|
|
142
|
+
*/
|
|
143
|
+
export function resamplePeaks(peaks: number[], targetLength: number): number[] {
|
|
144
|
+
if (peaks.length === 0) return new Array(targetLength).fill(0);
|
|
145
|
+
if (peaks.length === targetLength) return [...peaks];
|
|
146
|
+
|
|
147
|
+
const result = new Array<number>(targetLength);
|
|
148
|
+
const ratio = peaks.length / targetLength;
|
|
149
|
+
|
|
150
|
+
for (let i = 0; i < targetLength; i++) {
|
|
151
|
+
const start = Math.floor(i * ratio);
|
|
152
|
+
const end = Math.min(Math.ceil((i + 1) * ratio), peaks.length);
|
|
153
|
+
|
|
154
|
+
let max = 0;
|
|
155
|
+
for (let j = start; j < end; j++) {
|
|
156
|
+
if (peaks[j] > max) max = peaks[j];
|
|
157
|
+
}
|
|
158
|
+
result[i] = max;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return result;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// =============================================================================
|
|
165
|
+
// PEAK SMOOTHING
|
|
166
|
+
// =============================================================================
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Smooth peaks using moving average
|
|
170
|
+
*
|
|
171
|
+
* @param peaks - Source peaks
|
|
172
|
+
* @param windowSize - Smoothing window size
|
|
173
|
+
* @returns Smoothed peaks
|
|
174
|
+
*/
|
|
175
|
+
export function smoothPeaks(peaks: number[], windowSize: number = 3): number[] {
|
|
176
|
+
if (windowSize < 2 || peaks.length < windowSize) return [...peaks];
|
|
177
|
+
|
|
178
|
+
const result = new Array<number>(peaks.length);
|
|
179
|
+
const halfWindow = Math.floor(windowSize / 2);
|
|
180
|
+
|
|
181
|
+
for (let i = 0; i < peaks.length; i++) {
|
|
182
|
+
const start = Math.max(0, i - halfWindow);
|
|
183
|
+
const end = Math.min(peaks.length, i + halfWindow + 1);
|
|
184
|
+
|
|
185
|
+
let sum = 0;
|
|
186
|
+
for (let j = start; j < end; j++) {
|
|
187
|
+
sum += peaks[j];
|
|
188
|
+
}
|
|
189
|
+
result[i] = sum / (end - start);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
return result;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// =============================================================================
|
|
196
|
+
// PEAK CALCULATIONS
|
|
197
|
+
// =============================================================================
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Calculate peaks per second based on width and duration
|
|
201
|
+
*/
|
|
202
|
+
export function calculatePeaksCount(
|
|
203
|
+
width: number,
|
|
204
|
+
barWidth: number,
|
|
205
|
+
barGap: number
|
|
206
|
+
): number {
|
|
207
|
+
const totalBarSpace = barWidth + barGap;
|
|
208
|
+
return Math.floor(width / totalBarSpace);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Calculate which peak index corresponds to a time
|
|
213
|
+
*/
|
|
214
|
+
export function timeToIndex(
|
|
215
|
+
time: number,
|
|
216
|
+
duration: number,
|
|
217
|
+
peaksCount: number
|
|
218
|
+
): number {
|
|
219
|
+
if (duration <= 0) return 0;
|
|
220
|
+
const progress = time / duration;
|
|
221
|
+
return Math.floor(progress * peaksCount);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/**
|
|
225
|
+
* Calculate time from peak index
|
|
226
|
+
*/
|
|
227
|
+
export function indexToTime(
|
|
228
|
+
index: number,
|
|
229
|
+
peaksCount: number,
|
|
230
|
+
duration: number
|
|
231
|
+
): number {
|
|
232
|
+
if (peaksCount <= 0) return 0;
|
|
233
|
+
return (index / peaksCount) * duration;
|
|
234
|
+
}
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Progressive Audio Player Types
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
// =============================================================================
|
|
6
|
+
// WAVEFORM TYPES
|
|
7
|
+
// =============================================================================
|
|
8
|
+
|
|
9
|
+
export interface WaveformData {
|
|
10
|
+
/** Normalized peaks array (values 0-1) */
|
|
11
|
+
peaks: number[];
|
|
12
|
+
/** Duration in seconds */
|
|
13
|
+
duration: number;
|
|
14
|
+
/** Sample rate of source audio */
|
|
15
|
+
sampleRate: number;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export interface LoadedRange {
|
|
19
|
+
/** Start position (0-1) */
|
|
20
|
+
start: number;
|
|
21
|
+
/** End position (0-1) */
|
|
22
|
+
end: number;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface WaveformLoadingState {
|
|
26
|
+
/** Loading progress (0-100) */
|
|
27
|
+
percent: number;
|
|
28
|
+
/** Loaded byte ranges */
|
|
29
|
+
loadedRanges: LoadedRange[];
|
|
30
|
+
/** Is currently loading */
|
|
31
|
+
isLoading: boolean;
|
|
32
|
+
/** Error if any */
|
|
33
|
+
error: Error | null;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// =============================================================================
|
|
37
|
+
// AUDIO ELEMENT TYPES
|
|
38
|
+
// =============================================================================
|
|
39
|
+
|
|
40
|
+
export interface AudioState {
|
|
41
|
+
/** Is audio playing */
|
|
42
|
+
isPlaying: boolean;
|
|
43
|
+
/** Current playback time in seconds */
|
|
44
|
+
currentTime: number;
|
|
45
|
+
/** Total duration in seconds */
|
|
46
|
+
duration: number;
|
|
47
|
+
/** Volume level (0-1) */
|
|
48
|
+
volume: number;
|
|
49
|
+
/** Is muted */
|
|
50
|
+
isMuted: boolean;
|
|
51
|
+
/** Is looping */
|
|
52
|
+
isLooping: boolean;
|
|
53
|
+
/** Is audio ready to play */
|
|
54
|
+
isReady: boolean;
|
|
55
|
+
/** Buffered time ranges */
|
|
56
|
+
buffered: TimeRanges | null;
|
|
57
|
+
/** Error if any */
|
|
58
|
+
error: Error | null;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export interface AudioControls {
|
|
62
|
+
play: () => Promise<void>;
|
|
63
|
+
pause: () => void;
|
|
64
|
+
togglePlay: () => void;
|
|
65
|
+
seek: (time: number) => void;
|
|
66
|
+
seekTo: (progress: number) => void;
|
|
67
|
+
skip: (seconds: number) => void;
|
|
68
|
+
setVolume: (volume: number) => void;
|
|
69
|
+
toggleMute: () => void;
|
|
70
|
+
setLoop: (enabled: boolean) => void;
|
|
71
|
+
toggleLoop: () => void;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// =============================================================================
|
|
75
|
+
// CANVAS TYPES
|
|
76
|
+
// =============================================================================
|
|
77
|
+
|
|
78
|
+
export interface WaveformStyle {
|
|
79
|
+
/** Color for waveform bars */
|
|
80
|
+
waveColor?: string;
|
|
81
|
+
/** Color for played portion */
|
|
82
|
+
progressColor?: string;
|
|
83
|
+
/** Color for loading/unloaded area */
|
|
84
|
+
loadingColor?: string;
|
|
85
|
+
/** Cursor color */
|
|
86
|
+
cursorColor?: string;
|
|
87
|
+
/** Bar width in pixels */
|
|
88
|
+
barWidth?: number;
|
|
89
|
+
/** Gap between bars in pixels */
|
|
90
|
+
barGap?: number;
|
|
91
|
+
/** Bar border radius */
|
|
92
|
+
barRadius?: number;
|
|
93
|
+
/** Canvas height */
|
|
94
|
+
height?: number;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
export interface WaveformInteraction {
|
|
98
|
+
/** Called when user seeks */
|
|
99
|
+
onSeek?: (time: number) => void;
|
|
100
|
+
/** Called on hover (preview) */
|
|
101
|
+
onHover?: (time: number | null) => void;
|
|
102
|
+
/** Is interaction enabled */
|
|
103
|
+
interactive?: boolean;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// =============================================================================
|
|
107
|
+
// COMPONENT PROPS
|
|
108
|
+
// =============================================================================
|
|
109
|
+
|
|
110
|
+
export interface ProgressiveAudioPlayerProps {
|
|
111
|
+
/** Audio source URL */
|
|
112
|
+
src: string;
|
|
113
|
+
/** Track title */
|
|
114
|
+
title?: string;
|
|
115
|
+
/** Artist name */
|
|
116
|
+
artist?: string;
|
|
117
|
+
/** Cover art URL */
|
|
118
|
+
coverArt?: string;
|
|
119
|
+
|
|
120
|
+
/** Show waveform visualization */
|
|
121
|
+
showWaveform?: boolean;
|
|
122
|
+
/** Show playback controls */
|
|
123
|
+
showControls?: boolean;
|
|
124
|
+
/** Show time display */
|
|
125
|
+
showTimer?: boolean;
|
|
126
|
+
/** Show volume control */
|
|
127
|
+
showVolume?: boolean;
|
|
128
|
+
/** Show loop button */
|
|
129
|
+
showLoop?: boolean;
|
|
130
|
+
|
|
131
|
+
/** Auto-play on load */
|
|
132
|
+
autoPlay?: boolean;
|
|
133
|
+
|
|
134
|
+
/** Waveform styling */
|
|
135
|
+
waveformStyle?: WaveformStyle;
|
|
136
|
+
|
|
137
|
+
/** Additional class name */
|
|
138
|
+
className?: string;
|
|
139
|
+
|
|
140
|
+
/** Callbacks */
|
|
141
|
+
onPlay?: () => void;
|
|
142
|
+
onPause?: () => void;
|
|
143
|
+
onEnded?: () => void;
|
|
144
|
+
onTimeUpdate?: (time: number) => void;
|
|
145
|
+
onError?: (error: Error) => void;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// =============================================================================
|
|
149
|
+
// DECODER TYPES
|
|
150
|
+
// =============================================================================
|
|
151
|
+
|
|
152
|
+
export interface ChunkInfo {
|
|
153
|
+
/** Chunk index */
|
|
154
|
+
index: number;
|
|
155
|
+
/** Start byte */
|
|
156
|
+
start: number;
|
|
157
|
+
/** End byte */
|
|
158
|
+
end: number;
|
|
159
|
+
/** Chunk data */
|
|
160
|
+
data: ArrayBuffer;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
export interface DecodeResult {
|
|
164
|
+
/** Extracted peaks for this chunk */
|
|
165
|
+
peaks: number[];
|
|
166
|
+
/** Chunk duration in seconds */
|
|
167
|
+
duration: number;
|
|
168
|
+
/** Sample rate */
|
|
169
|
+
sampleRate: number;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
export interface DecoderOptions {
|
|
173
|
+
/** Target number of peaks per second of audio */
|
|
174
|
+
peaksPerSecond?: number;
|
|
175
|
+
/** Chunk size for fetching (bytes) */
|
|
176
|
+
chunkSize?: number;
|
|
177
|
+
/** Number of parallel fetches */
|
|
178
|
+
parallelFetches?: number;
|
|
179
|
+
}
|
|
@@ -0,0 +1,289 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* useAudioElement - Hook for managing HTML5 Audio element
|
|
5
|
+
*
|
|
6
|
+
* Provides reactive state and controls for audio playback.
|
|
7
|
+
* Uses native browser Range request handling for streaming.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { useRef, useState, useCallback, useEffect } from 'react';
|
|
11
|
+
import type { AudioState, AudioControls } from './types';
|
|
12
|
+
|
|
13
|
+
// =============================================================================
|
|
14
|
+
// TYPES
|
|
15
|
+
// =============================================================================
|
|
16
|
+
|
|
17
|
+
interface UseAudioElementOptions {
|
|
18
|
+
/** Audio source URL */
|
|
19
|
+
src: string;
|
|
20
|
+
/** Auto-play when loaded */
|
|
21
|
+
autoPlay?: boolean;
|
|
22
|
+
/** Initial volume (0-1) */
|
|
23
|
+
initialVolume?: number;
|
|
24
|
+
/** Loop playback */
|
|
25
|
+
loop?: boolean;
|
|
26
|
+
/** Callbacks */
|
|
27
|
+
onPlay?: () => void;
|
|
28
|
+
onPause?: () => void;
|
|
29
|
+
onEnded?: () => void;
|
|
30
|
+
onTimeUpdate?: (time: number) => void;
|
|
31
|
+
onDurationChange?: (duration: number) => void;
|
|
32
|
+
onError?: (error: Error) => void;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
interface UseAudioElementReturn extends AudioState, AudioControls {
|
|
36
|
+
/** Ref to audio element */
|
|
37
|
+
audioRef: React.RefObject<HTMLAudioElement>;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// =============================================================================
|
|
41
|
+
// HOOK
|
|
42
|
+
// =============================================================================
|
|
43
|
+
|
|
44
|
+
export function useAudioElement(options: UseAudioElementOptions): UseAudioElementReturn {
|
|
45
|
+
const {
|
|
46
|
+
src,
|
|
47
|
+
autoPlay = false,
|
|
48
|
+
initialVolume = 1,
|
|
49
|
+
loop = false,
|
|
50
|
+
onPlay,
|
|
51
|
+
onPause,
|
|
52
|
+
onEnded,
|
|
53
|
+
onTimeUpdate,
|
|
54
|
+
onDurationChange,
|
|
55
|
+
onError,
|
|
56
|
+
} = options;
|
|
57
|
+
|
|
58
|
+
const audioRef = useRef<HTMLAudioElement>(null);
|
|
59
|
+
|
|
60
|
+
// State
|
|
61
|
+
const [isPlaying, setIsPlaying] = useState(false);
|
|
62
|
+
const [currentTime, setCurrentTime] = useState(0);
|
|
63
|
+
const [duration, setDuration] = useState(0);
|
|
64
|
+
const [volume, setVolumeState] = useState(initialVolume);
|
|
65
|
+
const [isMuted, setIsMuted] = useState(false);
|
|
66
|
+
const [isLooping, setIsLooping] = useState(loop);
|
|
67
|
+
const [isReady, setIsReady] = useState(false);
|
|
68
|
+
const [buffered, setBuffered] = useState<TimeRanges | null>(null);
|
|
69
|
+
const [error, setError] = useState<Error | null>(null);
|
|
70
|
+
|
|
71
|
+
// ==========================================================================
|
|
72
|
+
// CONTROLS
|
|
73
|
+
// ==========================================================================
|
|
74
|
+
|
|
75
|
+
const play = useCallback(async () => {
|
|
76
|
+
const audio = audioRef.current;
|
|
77
|
+
if (!audio) return;
|
|
78
|
+
|
|
79
|
+
try {
|
|
80
|
+
await audio.play();
|
|
81
|
+
} catch (err) {
|
|
82
|
+
const error = err instanceof Error ? err : new Error('Playback failed');
|
|
83
|
+
setError(error);
|
|
84
|
+
onError?.(error);
|
|
85
|
+
}
|
|
86
|
+
}, [onError]);
|
|
87
|
+
|
|
88
|
+
const pause = useCallback(() => {
|
|
89
|
+
audioRef.current?.pause();
|
|
90
|
+
}, []);
|
|
91
|
+
|
|
92
|
+
const togglePlay = useCallback(() => {
|
|
93
|
+
if (isPlaying) {
|
|
94
|
+
pause();
|
|
95
|
+
} else {
|
|
96
|
+
play();
|
|
97
|
+
}
|
|
98
|
+
}, [isPlaying, play, pause]);
|
|
99
|
+
|
|
100
|
+
const seek = useCallback((time: number) => {
|
|
101
|
+
const audio = audioRef.current;
|
|
102
|
+
if (!audio || !isFinite(time)) return;
|
|
103
|
+
|
|
104
|
+
const clampedTime = Math.max(0, Math.min(time, duration));
|
|
105
|
+
audio.currentTime = clampedTime;
|
|
106
|
+
setCurrentTime(clampedTime);
|
|
107
|
+
}, [duration]);
|
|
108
|
+
|
|
109
|
+
const seekTo = useCallback((progress: number) => {
|
|
110
|
+
const clampedProgress = Math.max(0, Math.min(progress, 1));
|
|
111
|
+
seek(duration * clampedProgress);
|
|
112
|
+
}, [duration, seek]);
|
|
113
|
+
|
|
114
|
+
const skip = useCallback((seconds: number) => {
|
|
115
|
+
seek(currentTime + seconds);
|
|
116
|
+
}, [currentTime, seek]);
|
|
117
|
+
|
|
118
|
+
const setVolume = useCallback((vol: number) => {
|
|
119
|
+
const audio = audioRef.current;
|
|
120
|
+
if (!audio) return;
|
|
121
|
+
|
|
122
|
+
const clampedVol = Math.max(0, Math.min(vol, 1));
|
|
123
|
+
audio.volume = clampedVol;
|
|
124
|
+
setVolumeState(clampedVol);
|
|
125
|
+
|
|
126
|
+
if (clampedVol > 0 && isMuted) {
|
|
127
|
+
audio.muted = false;
|
|
128
|
+
setIsMuted(false);
|
|
129
|
+
}
|
|
130
|
+
}, [isMuted]);
|
|
131
|
+
|
|
132
|
+
const toggleMute = useCallback(() => {
|
|
133
|
+
const audio = audioRef.current;
|
|
134
|
+
if (!audio) return;
|
|
135
|
+
|
|
136
|
+
audio.muted = !audio.muted;
|
|
137
|
+
setIsMuted(audio.muted);
|
|
138
|
+
}, []);
|
|
139
|
+
|
|
140
|
+
const setLoop = useCallback((enabled: boolean) => {
|
|
141
|
+
const audio = audioRef.current;
|
|
142
|
+
if (audio) {
|
|
143
|
+
audio.loop = enabled;
|
|
144
|
+
}
|
|
145
|
+
setIsLooping(enabled);
|
|
146
|
+
}, []);
|
|
147
|
+
|
|
148
|
+
const toggleLoop = useCallback(() => {
|
|
149
|
+
setLoop(!isLooping);
|
|
150
|
+
}, [isLooping, setLoop]);
|
|
151
|
+
|
|
152
|
+
// ==========================================================================
|
|
153
|
+
// EVENT HANDLERS
|
|
154
|
+
// ==========================================================================
|
|
155
|
+
|
|
156
|
+
useEffect(() => {
|
|
157
|
+
const audio = audioRef.current;
|
|
158
|
+
if (!audio) return;
|
|
159
|
+
|
|
160
|
+
// Set initial properties
|
|
161
|
+
audio.volume = initialVolume;
|
|
162
|
+
audio.loop = loop;
|
|
163
|
+
|
|
164
|
+
const handleLoadedMetadata = () => {
|
|
165
|
+
setDuration(audio.duration);
|
|
166
|
+
setIsReady(true);
|
|
167
|
+
onDurationChange?.(audio.duration);
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
const handleCanPlay = () => {
|
|
171
|
+
setIsReady(true);
|
|
172
|
+
if (autoPlay) {
|
|
173
|
+
audio.play().catch(() => {});
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
const handlePlay = () => {
|
|
178
|
+
setIsPlaying(true);
|
|
179
|
+
onPlay?.();
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
const handlePause = () => {
|
|
183
|
+
setIsPlaying(false);
|
|
184
|
+
onPause?.();
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
const handleEnded = () => {
|
|
188
|
+
setIsPlaying(false);
|
|
189
|
+
if (!isLooping) {
|
|
190
|
+
onEnded?.();
|
|
191
|
+
}
|
|
192
|
+
};
|
|
193
|
+
|
|
194
|
+
const handleTimeUpdate = () => {
|
|
195
|
+
setCurrentTime(audio.currentTime);
|
|
196
|
+
onTimeUpdate?.(audio.currentTime);
|
|
197
|
+
};
|
|
198
|
+
|
|
199
|
+
const handleProgress = () => {
|
|
200
|
+
setBuffered(audio.buffered);
|
|
201
|
+
};
|
|
202
|
+
|
|
203
|
+
const handleError = () => {
|
|
204
|
+
const err = new Error(
|
|
205
|
+
audio.error?.message || 'Audio loading failed'
|
|
206
|
+
);
|
|
207
|
+
setError(err);
|
|
208
|
+
setIsReady(false);
|
|
209
|
+
onError?.(err);
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
const handleVolumeChange = () => {
|
|
213
|
+
setVolumeState(audio.volume);
|
|
214
|
+
setIsMuted(audio.muted);
|
|
215
|
+
};
|
|
216
|
+
|
|
217
|
+
// Add listeners
|
|
218
|
+
audio.addEventListener('loadedmetadata', handleLoadedMetadata);
|
|
219
|
+
audio.addEventListener('canplay', handleCanPlay);
|
|
220
|
+
audio.addEventListener('play', handlePlay);
|
|
221
|
+
audio.addEventListener('pause', handlePause);
|
|
222
|
+
audio.addEventListener('ended', handleEnded);
|
|
223
|
+
audio.addEventListener('timeupdate', handleTimeUpdate);
|
|
224
|
+
audio.addEventListener('progress', handleProgress);
|
|
225
|
+
audio.addEventListener('error', handleError);
|
|
226
|
+
audio.addEventListener('volumechange', handleVolumeChange);
|
|
227
|
+
|
|
228
|
+
return () => {
|
|
229
|
+
audio.removeEventListener('loadedmetadata', handleLoadedMetadata);
|
|
230
|
+
audio.removeEventListener('canplay', handleCanPlay);
|
|
231
|
+
audio.removeEventListener('play', handlePlay);
|
|
232
|
+
audio.removeEventListener('pause', handlePause);
|
|
233
|
+
audio.removeEventListener('ended', handleEnded);
|
|
234
|
+
audio.removeEventListener('timeupdate', handleTimeUpdate);
|
|
235
|
+
audio.removeEventListener('progress', handleProgress);
|
|
236
|
+
audio.removeEventListener('error', handleError);
|
|
237
|
+
audio.removeEventListener('volumechange', handleVolumeChange);
|
|
238
|
+
};
|
|
239
|
+
}, [
|
|
240
|
+
autoPlay,
|
|
241
|
+
initialVolume,
|
|
242
|
+
loop,
|
|
243
|
+
isLooping,
|
|
244
|
+
onPlay,
|
|
245
|
+
onPause,
|
|
246
|
+
onEnded,
|
|
247
|
+
onTimeUpdate,
|
|
248
|
+
onDurationChange,
|
|
249
|
+
onError,
|
|
250
|
+
]);
|
|
251
|
+
|
|
252
|
+
// Update src
|
|
253
|
+
useEffect(() => {
|
|
254
|
+
const audio = audioRef.current;
|
|
255
|
+
if (!audio) return;
|
|
256
|
+
|
|
257
|
+
setIsReady(false);
|
|
258
|
+
setError(null);
|
|
259
|
+
setCurrentTime(0);
|
|
260
|
+
setDuration(0);
|
|
261
|
+
audio.src = src;
|
|
262
|
+
audio.load();
|
|
263
|
+
}, [src]);
|
|
264
|
+
|
|
265
|
+
return {
|
|
266
|
+
audioRef,
|
|
267
|
+
// State
|
|
268
|
+
isPlaying,
|
|
269
|
+
currentTime,
|
|
270
|
+
duration,
|
|
271
|
+
volume,
|
|
272
|
+
isMuted,
|
|
273
|
+
isLooping,
|
|
274
|
+
isReady,
|
|
275
|
+
buffered,
|
|
276
|
+
error,
|
|
277
|
+
// Controls
|
|
278
|
+
play,
|
|
279
|
+
pause,
|
|
280
|
+
togglePlay,
|
|
281
|
+
seek,
|
|
282
|
+
seekTo,
|
|
283
|
+
skip,
|
|
284
|
+
setVolume,
|
|
285
|
+
toggleMute,
|
|
286
|
+
setLoop,
|
|
287
|
+
toggleLoop,
|
|
288
|
+
};
|
|
289
|
+
}
|