audio-channel-queue 1.8.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core.js +4 -3
- package/dist/errors.js +27 -17
- package/dist/events.js +21 -14
- package/dist/index.d.ts +5 -5
- package/dist/index.js +3 -2
- package/dist/info.js +8 -7
- package/dist/pause.d.ts +24 -12
- package/dist/pause.js +93 -41
- package/dist/types.d.ts +12 -3
- package/dist/types.js +6 -1
- package/dist/utils.js +4 -3
- package/dist/volume.js +37 -15
- package/package.json +8 -3
- package/src/core.ts +59 -42
- package/src/errors.ts +504 -480
- package/src/events.ts +36 -27
- package/src/index.ts +47 -43
- package/src/info.ts +23 -22
- package/src/pause.ts +168 -85
- package/src/types.ts +12 -2
- package/src/utils.ts +7 -7
- package/src/volume.ts +47 -30
package/src/pause.ts
CHANGED
|
@@ -2,20 +2,19 @@
|
|
|
2
2
|
* @fileoverview Pause and resume management functions for the audio-channel-queue package
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
|
-
import {
|
|
5
|
+
import {
|
|
6
|
+
ExtendedAudioQueueChannel,
|
|
7
|
+
AudioInfo,
|
|
8
|
+
FadeType,
|
|
9
|
+
FadeConfig,
|
|
10
|
+
ChannelFadeState
|
|
11
|
+
} from './types';
|
|
6
12
|
import { audioChannels } from './info';
|
|
7
13
|
import { getAudioInfoFromElement } from './utils';
|
|
8
14
|
import { emitAudioPause, emitAudioResume } from './events';
|
|
9
|
-
import { transitionVolume } from './volume';
|
|
15
|
+
import { transitionVolume, getFadeConfig } from './volume';
|
|
16
|
+
|
|
10
17
|
|
|
11
|
-
/**
|
|
12
|
-
* Predefined fade configurations for different transition types
|
|
13
|
-
*/
|
|
14
|
-
const FADE_CONFIGS: Record<FadeType, FadeConfig> = {
|
|
15
|
-
[FadeType.Linear]: { duration: 800, pauseCurve: EasingType.Linear, resumeCurve: EasingType.Linear },
|
|
16
|
-
[FadeType.Gentle]: { duration: 800, pauseCurve: EasingType.EaseOut, resumeCurve: EasingType.EaseIn },
|
|
17
|
-
[FadeType.Dramatic]: { duration: 800, pauseCurve: EasingType.EaseIn, resumeCurve: EasingType.EaseOut }
|
|
18
|
-
};
|
|
19
18
|
|
|
20
19
|
/**
|
|
21
20
|
* Gets the current volume for a channel, accounting for synchronous state
|
|
@@ -24,7 +23,7 @@ const FADE_CONFIGS: Record<FadeType, FadeConfig> = {
|
|
|
24
23
|
*/
|
|
25
24
|
const getChannelVolumeSync = (channelNumber: number): number => {
|
|
26
25
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
27
|
-
return channel?.volume
|
|
26
|
+
return channel?.volume ?? 1.0;
|
|
28
27
|
};
|
|
29
28
|
|
|
30
29
|
/**
|
|
@@ -46,46 +45,77 @@ const setChannelVolumeSync = (channelNumber: number, volume: number): void => {
|
|
|
46
45
|
* Pauses the currently playing audio in a specific channel with smooth volume fade
|
|
47
46
|
* @param fadeType - Type of fade transition to apply
|
|
48
47
|
* @param channelNumber - The channel number to pause (defaults to 0)
|
|
48
|
+
* @param duration - Optional custom fade duration in milliseconds (uses fadeType default if not provided)
|
|
49
49
|
* @returns Promise that resolves when the pause and fade are complete
|
|
50
50
|
* @example
|
|
51
51
|
* ```typescript
|
|
52
52
|
* await pauseWithFade(FadeType.Gentle, 0); // Pause with gentle fade out over 800ms
|
|
53
|
-
* await pauseWithFade(FadeType.Dramatic, 1); // Pause with dramatic fade out over
|
|
54
|
-
* await pauseWithFade(FadeType.Linear, 2); // Linear pause with
|
|
53
|
+
* await pauseWithFade(FadeType.Dramatic, 1, 1500); // Pause with dramatic fade out over 1.5s
|
|
54
|
+
* await pauseWithFade(FadeType.Linear, 2, 500); // Linear pause with custom 500ms fade
|
|
55
55
|
* ```
|
|
56
56
|
*/
|
|
57
|
-
export const pauseWithFade = async (
|
|
57
|
+
export const pauseWithFade = async (
|
|
58
|
+
fadeType: FadeType = FadeType.Gentle,
|
|
59
|
+
channelNumber: number = 0,
|
|
60
|
+
duration?: number
|
|
61
|
+
): Promise<void> => {
|
|
58
62
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
59
|
-
|
|
63
|
+
|
|
60
64
|
if (!channel || channel.queue.length === 0) return;
|
|
61
|
-
|
|
65
|
+
|
|
62
66
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
63
|
-
|
|
67
|
+
|
|
64
68
|
// Don't pause if already paused or ended
|
|
65
69
|
if (currentAudio.paused || currentAudio.ended) return;
|
|
66
|
-
|
|
67
|
-
const config: FadeConfig =
|
|
68
|
-
const
|
|
69
|
-
|
|
70
|
-
//
|
|
70
|
+
|
|
71
|
+
const config: FadeConfig = getFadeConfig(fadeType);
|
|
72
|
+
const effectiveDuration: number = duration ?? config.duration;
|
|
73
|
+
|
|
74
|
+
// Race condition fix: Use existing fadeState originalVolume if already transitioning,
|
|
75
|
+
// otherwise capture current volume
|
|
76
|
+
let originalVolume: number;
|
|
77
|
+
if (channel.fadeState?.isTransitioning) {
|
|
78
|
+
// We're already in any kind of transition (pause or resume), preserve original volume
|
|
79
|
+
originalVolume = channel.fadeState.originalVolume;
|
|
80
|
+
} else {
|
|
81
|
+
// First fade or no transition in progress, capture current volume
|
|
82
|
+
// But ensure we don't capture a volume of 0 during a transition
|
|
83
|
+
const currentVolume = getChannelVolumeSync(channelNumber);
|
|
84
|
+
originalVolume = currentVolume > 0 ? currentVolume : channel.fadeState?.originalVolume ?? 1.0;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Store fade state for resumeWithFade to use (including custom duration)
|
|
71
88
|
channel.fadeState = {
|
|
72
|
-
|
|
89
|
+
customDuration: duration,
|
|
73
90
|
fadeType,
|
|
74
|
-
isPaused: true
|
|
91
|
+
isPaused: true,
|
|
92
|
+
isTransitioning: true,
|
|
93
|
+
originalVolume
|
|
75
94
|
};
|
|
76
|
-
|
|
77
|
-
if (
|
|
95
|
+
|
|
96
|
+
if (effectiveDuration === 0) {
|
|
78
97
|
// Instant pause
|
|
79
98
|
await pauseChannel(channelNumber);
|
|
99
|
+
// Reset volume to original for resume (synchronously to avoid state issues)
|
|
100
|
+
setChannelVolumeSync(channelNumber, originalVolume);
|
|
101
|
+
// Mark transition as complete for instant pause
|
|
102
|
+
if (channel.fadeState) {
|
|
103
|
+
channel.fadeState.isTransitioning = false;
|
|
104
|
+
}
|
|
80
105
|
return;
|
|
81
106
|
}
|
|
82
|
-
|
|
107
|
+
|
|
83
108
|
// Fade to 0 with pause curve, then pause
|
|
84
|
-
await transitionVolume(channelNumber, 0,
|
|
109
|
+
await transitionVolume(channelNumber, 0, effectiveDuration, config.pauseCurve);
|
|
85
110
|
await pauseChannel(channelNumber);
|
|
86
|
-
|
|
111
|
+
|
|
87
112
|
// Reset volume to original for resume (synchronously to avoid state issues)
|
|
88
113
|
setChannelVolumeSync(channelNumber, originalVolume);
|
|
114
|
+
|
|
115
|
+
// Mark transition as complete
|
|
116
|
+
if (channel.fadeState) {
|
|
117
|
+
channel.fadeState.isTransitioning = false;
|
|
118
|
+
}
|
|
89
119
|
};
|
|
90
120
|
|
|
91
121
|
/**
|
|
@@ -93,102 +123,144 @@ export const pauseWithFade = async (fadeType: FadeType = FadeType.Gentle, channe
|
|
|
93
123
|
* Uses the complementary fade curve automatically based on the pause fade type, or allows override
|
|
94
124
|
* @param fadeType - Optional fade type to override the stored fade type from pause
|
|
95
125
|
* @param channelNumber - The channel number to resume (defaults to 0)
|
|
126
|
+
* @param duration - Optional custom fade duration in milliseconds (uses stored or fadeType default if not provided)
|
|
96
127
|
* @returns Promise that resolves when the resume and fade are complete
|
|
97
128
|
* @example
|
|
98
129
|
* ```typescript
|
|
99
130
|
* await resumeWithFade(); // Resume with automatically paired fade curve from pause
|
|
100
131
|
* await resumeWithFade(FadeType.Dramatic, 0); // Override with dramatic fade
|
|
101
|
-
* await resumeWithFade(FadeType.Linear); // Override with linear fade
|
|
132
|
+
* await resumeWithFade(FadeType.Linear, 0, 1000); // Override with linear fade over 1 second
|
|
102
133
|
* ```
|
|
103
134
|
*/
|
|
104
|
-
export const resumeWithFade = async (
|
|
135
|
+
export const resumeWithFade = async (
|
|
136
|
+
fadeType?: FadeType,
|
|
137
|
+
channelNumber: number = 0,
|
|
138
|
+
duration?: number
|
|
139
|
+
): Promise<void> => {
|
|
105
140
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
106
|
-
|
|
141
|
+
|
|
107
142
|
if (!channel || channel.queue.length === 0) return;
|
|
108
|
-
|
|
143
|
+
|
|
109
144
|
const fadeState: ChannelFadeState | undefined = channel.fadeState;
|
|
110
|
-
if (!fadeState
|
|
145
|
+
if (!fadeState?.isPaused) {
|
|
111
146
|
// Fall back to regular resume if no fade state
|
|
112
147
|
await resumeChannel(channelNumber);
|
|
113
148
|
return;
|
|
114
149
|
}
|
|
115
|
-
|
|
150
|
+
|
|
116
151
|
// Use provided fadeType or fall back to stored fadeType from pause
|
|
117
|
-
const effectiveFadeType: FadeType = fadeType
|
|
118
|
-
const config: FadeConfig =
|
|
119
|
-
|
|
120
|
-
|
|
152
|
+
const effectiveFadeType: FadeType = fadeType ?? fadeState.fadeType;
|
|
153
|
+
const config: FadeConfig = getFadeConfig(effectiveFadeType);
|
|
154
|
+
|
|
155
|
+
// Determine effective duration: custom parameter > stored custom > fadeType default
|
|
156
|
+
let effectiveDuration: number;
|
|
157
|
+
if (duration !== undefined) {
|
|
158
|
+
effectiveDuration = duration;
|
|
159
|
+
} else if (fadeState.customDuration !== undefined) {
|
|
160
|
+
effectiveDuration = fadeState.customDuration;
|
|
161
|
+
} else {
|
|
162
|
+
effectiveDuration = config.duration;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
if (effectiveDuration === 0) {
|
|
121
166
|
// Instant resume
|
|
167
|
+
const targetVolume = fadeState.originalVolume > 0 ? fadeState.originalVolume : 1.0;
|
|
168
|
+
setChannelVolumeSync(channelNumber, targetVolume);
|
|
122
169
|
await resumeChannel(channelNumber);
|
|
123
170
|
fadeState.isPaused = false;
|
|
171
|
+
fadeState.isTransitioning = false;
|
|
124
172
|
return;
|
|
125
173
|
}
|
|
126
|
-
|
|
174
|
+
|
|
175
|
+
// Race condition fix: Ensure we have a valid original volume to restore to
|
|
176
|
+
const targetVolume = fadeState.originalVolume > 0 ? fadeState.originalVolume : 1.0;
|
|
177
|
+
|
|
178
|
+
// Mark as transitioning to prevent volume capture during rapid toggles
|
|
179
|
+
fadeState.isTransitioning = true;
|
|
180
|
+
|
|
127
181
|
// Set volume to 0, resume, then fade to original with resume curve
|
|
128
182
|
setChannelVolumeSync(channelNumber, 0);
|
|
129
183
|
await resumeChannel(channelNumber);
|
|
130
|
-
|
|
131
|
-
|
|
184
|
+
|
|
185
|
+
// Use the stored original volume, not current volume, to prevent race conditions
|
|
186
|
+
await transitionVolume(channelNumber, targetVolume, effectiveDuration, config.resumeCurve);
|
|
187
|
+
|
|
132
188
|
fadeState.isPaused = false;
|
|
189
|
+
fadeState.isTransitioning = false;
|
|
133
190
|
};
|
|
134
191
|
|
|
135
192
|
/**
|
|
136
193
|
* Toggles pause/resume state for a specific channel with integrated fade
|
|
137
194
|
* @param fadeType - Type of fade transition to apply when pausing
|
|
138
195
|
* @param channelNumber - The channel number to toggle (defaults to 0)
|
|
196
|
+
* @param duration - Optional custom fade duration in milliseconds (uses fadeType default if not provided)
|
|
139
197
|
* @returns Promise that resolves when the toggle and fade are complete
|
|
140
198
|
* @example
|
|
141
199
|
* ```typescript
|
|
142
200
|
* await togglePauseWithFade(FadeType.Gentle, 0); // Toggle with gentle fade
|
|
201
|
+
* await togglePauseWithFade(FadeType.Dramatic, 0, 500); // Toggle with custom 500ms fade
|
|
143
202
|
* ```
|
|
144
203
|
*/
|
|
145
|
-
export const togglePauseWithFade = async (
|
|
204
|
+
export const togglePauseWithFade = async (
|
|
205
|
+
fadeType: FadeType = FadeType.Gentle,
|
|
206
|
+
channelNumber: number = 0,
|
|
207
|
+
duration?: number
|
|
208
|
+
): Promise<void> => {
|
|
146
209
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
147
|
-
|
|
210
|
+
|
|
148
211
|
if (!channel || channel.queue.length === 0) return;
|
|
149
|
-
|
|
212
|
+
|
|
150
213
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
151
|
-
|
|
214
|
+
|
|
152
215
|
if (currentAudio.paused) {
|
|
153
|
-
await resumeWithFade(undefined, channelNumber);
|
|
216
|
+
await resumeWithFade(undefined, channelNumber, duration);
|
|
154
217
|
} else {
|
|
155
|
-
await pauseWithFade(fadeType, channelNumber);
|
|
218
|
+
await pauseWithFade(fadeType, channelNumber, duration);
|
|
156
219
|
}
|
|
157
220
|
};
|
|
158
221
|
|
|
159
222
|
/**
|
|
160
223
|
* Pauses all currently playing audio across all channels with smooth volume fade
|
|
161
224
|
* @param fadeType - Type of fade transition to apply to all channels
|
|
225
|
+
* @param duration - Optional custom fade duration in milliseconds (uses fadeType default if not provided)
|
|
162
226
|
* @returns Promise that resolves when all channels are paused and faded
|
|
163
227
|
* @example
|
|
164
228
|
* ```typescript
|
|
165
|
-
* await pauseAllWithFade(
|
|
229
|
+
* await pauseAllWithFade(FadeType.Dramatic); // Pause everything with dramatic fade
|
|
230
|
+
* await pauseAllWithFade(FadeType.Gentle, 1200); // Pause all channels with custom 1.2s fade
|
|
166
231
|
* ```
|
|
167
232
|
*/
|
|
168
|
-
export const pauseAllWithFade = async (
|
|
233
|
+
export const pauseAllWithFade = async (
|
|
234
|
+
fadeType: FadeType = FadeType.Gentle,
|
|
235
|
+
duration?: number
|
|
236
|
+
): Promise<void> => {
|
|
169
237
|
const pausePromises: Promise<void>[] = [];
|
|
170
|
-
|
|
238
|
+
|
|
171
239
|
audioChannels.forEach((_channel: ExtendedAudioQueueChannel, index: number) => {
|
|
172
|
-
pausePromises.push(pauseWithFade(fadeType, index));
|
|
240
|
+
pausePromises.push(pauseWithFade(fadeType, index, duration));
|
|
173
241
|
});
|
|
174
|
-
|
|
242
|
+
|
|
175
243
|
await Promise.all(pausePromises);
|
|
176
244
|
};
|
|
177
245
|
|
|
178
246
|
/**
|
|
179
247
|
* Resumes all currently paused audio across all channels with smooth volume fade
|
|
180
|
-
* Uses automatically paired fade curves based on each channel's pause fade type
|
|
248
|
+
* Uses automatically paired fade curves based on each channel's pause fade type, or allows override
|
|
249
|
+
* @param fadeType - Optional fade type to override stored fade types for all channels
|
|
250
|
+
* @param duration - Optional custom fade duration in milliseconds (uses stored or fadeType default if not provided)
|
|
181
251
|
* @returns Promise that resolves when all channels are resumed and faded
|
|
182
252
|
* @example
|
|
183
253
|
* ```typescript
|
|
184
254
|
* await resumeAllWithFade(); // Resume everything with paired fade curves
|
|
255
|
+
* await resumeAllWithFade(FadeType.Gentle, 800); // Override all channels with gentle fade over 800ms
|
|
256
|
+
* await resumeAllWithFade(undefined, 600); // Use stored fade types with custom 600ms duration
|
|
185
257
|
* ```
|
|
186
258
|
*/
|
|
187
|
-
export const resumeAllWithFade = async (): Promise<void> => {
|
|
259
|
+
export const resumeAllWithFade = async (fadeType?: FadeType, duration?: number): Promise<void> => {
|
|
188
260
|
const resumePromises: Promise<void>[] = [];
|
|
189
261
|
|
|
190
262
|
audioChannels.forEach((_channel: ExtendedAudioQueueChannel, index: number) => {
|
|
191
|
-
resumePromises.push(resumeWithFade(
|
|
263
|
+
resumePromises.push(resumeWithFade(fadeType, index, duration));
|
|
192
264
|
});
|
|
193
265
|
|
|
194
266
|
await Promise.all(resumePromises);
|
|
@@ -199,17 +271,22 @@ export const resumeAllWithFade = async (): Promise<void> => {
|
|
|
199
271
|
* If any channels are playing, all will be paused with fade
|
|
200
272
|
* If all channels are paused, all will be resumed with fade
|
|
201
273
|
* @param fadeType - Type of fade transition to apply when pausing
|
|
274
|
+
* @param duration - Optional custom fade duration in milliseconds (uses fadeType default if not provided)
|
|
202
275
|
* @returns Promise that resolves when all toggles and fades are complete
|
|
203
276
|
* @example
|
|
204
277
|
* ```typescript
|
|
205
|
-
* await togglePauseAllWithFade(
|
|
278
|
+
* await togglePauseAllWithFade(FadeType.Gentle); // Global toggle with gentle fade
|
|
279
|
+
* await togglePauseAllWithFade(FadeType.Dramatic, 600); // Global toggle with custom 600ms fade
|
|
206
280
|
* ```
|
|
207
281
|
*/
|
|
208
|
-
export const togglePauseAllWithFade = async (
|
|
282
|
+
export const togglePauseAllWithFade = async (
|
|
283
|
+
fadeType: FadeType = FadeType.Gentle,
|
|
284
|
+
duration?: number
|
|
285
|
+
): Promise<void> => {
|
|
209
286
|
let hasPlayingChannel: boolean = false;
|
|
210
|
-
|
|
287
|
+
|
|
211
288
|
// Check if any channel is currently playing
|
|
212
|
-
for (let i = 0; i < audioChannels.length; i++) {
|
|
289
|
+
for (let i: number = 0; i < audioChannels.length; i++) {
|
|
213
290
|
const channel: ExtendedAudioQueueChannel = audioChannels[i];
|
|
214
291
|
if (channel && channel.queue.length > 0) {
|
|
215
292
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
@@ -219,13 +296,13 @@ export const togglePauseAllWithFade = async (fadeType: FadeType = FadeType.Gentl
|
|
|
219
296
|
}
|
|
220
297
|
}
|
|
221
298
|
}
|
|
222
|
-
|
|
299
|
+
|
|
223
300
|
// If any channel is playing, pause all with fade
|
|
224
301
|
// If no channels are playing, resume all with fade
|
|
225
302
|
if (hasPlayingChannel) {
|
|
226
|
-
await pauseAllWithFade(fadeType);
|
|
303
|
+
await pauseAllWithFade(fadeType, duration);
|
|
227
304
|
} else {
|
|
228
|
-
await resumeAllWithFade();
|
|
305
|
+
await resumeAllWithFade(fadeType, duration);
|
|
229
306
|
}
|
|
230
307
|
};
|
|
231
308
|
|
|
@@ -241,15 +318,19 @@ export const togglePauseAllWithFade = async (fadeType: FadeType = FadeType.Gentl
|
|
|
241
318
|
*/
|
|
242
319
|
export const pauseChannel = async (channelNumber: number = 0): Promise<void> => {
|
|
243
320
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
244
|
-
|
|
321
|
+
|
|
245
322
|
if (channel && channel.queue.length > 0) {
|
|
246
323
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
247
|
-
|
|
324
|
+
|
|
248
325
|
if (!currentAudio.paused && !currentAudio.ended) {
|
|
249
326
|
currentAudio.pause();
|
|
250
327
|
channel.isPaused = true;
|
|
251
|
-
|
|
252
|
-
const audioInfo: AudioInfo | null = getAudioInfoFromElement(
|
|
328
|
+
|
|
329
|
+
const audioInfo: AudioInfo | null = getAudioInfoFromElement(
|
|
330
|
+
currentAudio,
|
|
331
|
+
channelNumber,
|
|
332
|
+
audioChannels
|
|
333
|
+
);
|
|
253
334
|
if (audioInfo) {
|
|
254
335
|
emitAudioPause(channelNumber, audioInfo, audioChannels);
|
|
255
336
|
}
|
|
@@ -269,16 +350,20 @@ export const pauseChannel = async (channelNumber: number = 0): Promise<void> =>
|
|
|
269
350
|
*/
|
|
270
351
|
export const resumeChannel = async (channelNumber: number = 0): Promise<void> => {
|
|
271
352
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
272
|
-
|
|
353
|
+
|
|
273
354
|
if (channel && channel.queue.length > 0) {
|
|
274
355
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
275
|
-
|
|
356
|
+
|
|
276
357
|
// Only resume if both the channel is marked as paused AND the audio element is actually paused AND not ended
|
|
277
358
|
if (channel.isPaused && currentAudio.paused && !currentAudio.ended) {
|
|
278
359
|
await currentAudio.play();
|
|
279
360
|
channel.isPaused = false;
|
|
280
|
-
|
|
281
|
-
const audioInfo: AudioInfo | null = getAudioInfoFromElement(
|
|
361
|
+
|
|
362
|
+
const audioInfo: AudioInfo | null = getAudioInfoFromElement(
|
|
363
|
+
currentAudio,
|
|
364
|
+
channelNumber,
|
|
365
|
+
audioChannels
|
|
366
|
+
);
|
|
282
367
|
if (audioInfo) {
|
|
283
368
|
emitAudioResume(channelNumber, audioInfo, audioChannels);
|
|
284
369
|
}
|
|
@@ -297,10 +382,10 @@ export const resumeChannel = async (channelNumber: number = 0): Promise<void> =>
|
|
|
297
382
|
*/
|
|
298
383
|
export const togglePauseChannel = async (channelNumber: number = 0): Promise<void> => {
|
|
299
384
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
300
|
-
|
|
385
|
+
|
|
301
386
|
if (channel && channel.queue.length > 0) {
|
|
302
387
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
303
|
-
|
|
388
|
+
|
|
304
389
|
if (currentAudio.paused) {
|
|
305
390
|
await resumeChannel(channelNumber);
|
|
306
391
|
} else {
|
|
@@ -319,11 +404,11 @@ export const togglePauseChannel = async (channelNumber: number = 0): Promise<voi
|
|
|
319
404
|
*/
|
|
320
405
|
export const pauseAllChannels = async (): Promise<void> => {
|
|
321
406
|
const pausePromises: Promise<void>[] = [];
|
|
322
|
-
|
|
407
|
+
|
|
323
408
|
audioChannels.forEach((_channel: ExtendedAudioQueueChannel, index: number) => {
|
|
324
409
|
pausePromises.push(pauseChannel(index));
|
|
325
410
|
});
|
|
326
|
-
|
|
411
|
+
|
|
327
412
|
await Promise.all(pausePromises);
|
|
328
413
|
};
|
|
329
414
|
|
|
@@ -337,11 +422,11 @@ export const pauseAllChannels = async (): Promise<void> => {
|
|
|
337
422
|
*/
|
|
338
423
|
export const resumeAllChannels = async (): Promise<void> => {
|
|
339
424
|
const resumePromises: Promise<void>[] = [];
|
|
340
|
-
|
|
425
|
+
|
|
341
426
|
audioChannels.forEach((_channel: ExtendedAudioQueueChannel, index: number) => {
|
|
342
427
|
resumePromises.push(resumeChannel(index));
|
|
343
428
|
});
|
|
344
|
-
|
|
429
|
+
|
|
345
430
|
await Promise.all(resumePromises);
|
|
346
431
|
};
|
|
347
432
|
|
|
@@ -357,7 +442,7 @@ export const resumeAllChannels = async (): Promise<void> => {
|
|
|
357
442
|
*/
|
|
358
443
|
export const isChannelPaused = (channelNumber: number = 0): boolean => {
|
|
359
444
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
360
|
-
return channel?.isPaused
|
|
445
|
+
return channel?.isPaused ?? false;
|
|
361
446
|
};
|
|
362
447
|
|
|
363
448
|
/**
|
|
@@ -372,9 +457,7 @@ export const isChannelPaused = (channelNumber: number = 0): boolean => {
|
|
|
372
457
|
* ```
|
|
373
458
|
*/
|
|
374
459
|
export const getAllChannelsPauseState = (): boolean[] => {
|
|
375
|
-
return audioChannels.map((channel: ExtendedAudioQueueChannel) =>
|
|
376
|
-
channel?.isPaused || false
|
|
377
|
-
);
|
|
460
|
+
return audioChannels.map((channel: ExtendedAudioQueueChannel) => channel?.isPaused ?? false);
|
|
378
461
|
};
|
|
379
462
|
|
|
380
463
|
/**
|
|
@@ -389,9 +472,9 @@ export const getAllChannelsPauseState = (): boolean[] => {
|
|
|
389
472
|
*/
|
|
390
473
|
export const togglePauseAllChannels = async (): Promise<void> => {
|
|
391
474
|
let hasPlayingChannel: boolean = false;
|
|
392
|
-
|
|
475
|
+
|
|
393
476
|
// Check if any channel is currently playing
|
|
394
|
-
for (let i = 0; i < audioChannels.length; i++) {
|
|
477
|
+
for (let i: number = 0; i < audioChannels.length; i++) {
|
|
395
478
|
const channel: ExtendedAudioQueueChannel = audioChannels[i];
|
|
396
479
|
if (channel && channel.queue.length > 0) {
|
|
397
480
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
@@ -401,7 +484,7 @@ export const togglePauseAllChannels = async (): Promise<void> => {
|
|
|
401
484
|
}
|
|
402
485
|
}
|
|
403
486
|
}
|
|
404
|
-
|
|
487
|
+
|
|
405
488
|
// If any channel is playing, pause all channels
|
|
406
489
|
// If no channels are playing, resume all channels
|
|
407
490
|
if (hasPlayingChannel) {
|
|
@@ -409,4 +492,4 @@ export const togglePauseAllChannels = async (): Promise<void> => {
|
|
|
409
492
|
} else {
|
|
410
493
|
await resumeAllChannels();
|
|
411
494
|
}
|
|
412
|
-
};
|
|
495
|
+
};
|
package/src/types.ts
CHANGED
|
@@ -2,6 +2,12 @@
|
|
|
2
2
|
* @fileoverview Type definitions for the audio-channel-queue package
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
|
+
/**
|
|
6
|
+
* Symbol used as a key for global (channel-wide) progress callbacks
|
|
7
|
+
* This avoids the need for `null as any` type assertions
|
|
8
|
+
*/
|
|
9
|
+
export const GLOBAL_PROGRESS_KEY: unique symbol = Symbol('global-progress-callbacks');
|
|
10
|
+
|
|
5
11
|
/**
|
|
6
12
|
* Array of HTMLAudioElement objects representing an audio queue
|
|
7
13
|
*/
|
|
@@ -10,7 +16,7 @@ export type AudioQueue = HTMLAudioElement[];
|
|
|
10
16
|
/**
|
|
11
17
|
* Basic audio queue channel structure
|
|
12
18
|
*/
|
|
13
|
-
export
|
|
19
|
+
export interface AudioQueueChannel {
|
|
14
20
|
queue: AudioQueue;
|
|
15
21
|
}
|
|
16
22
|
|
|
@@ -228,7 +234,7 @@ export interface ExtendedAudioQueueChannel {
|
|
|
228
234
|
audioStartCallbacks: Set<AudioStartCallback>;
|
|
229
235
|
fadeState?: ChannelFadeState;
|
|
230
236
|
isPaused?: boolean;
|
|
231
|
-
progressCallbacks: Map<HTMLAudioElement |
|
|
237
|
+
progressCallbacks: Map<HTMLAudioElement | typeof GLOBAL_PROGRESS_KEY, Set<ProgressCallback>>;
|
|
232
238
|
queue: HTMLAudioElement[];
|
|
233
239
|
queueChangeCallbacks: Set<QueueChangeCallback>;
|
|
234
240
|
retryConfig?: RetryConfig;
|
|
@@ -277,4 +283,8 @@ export interface ChannelFadeState {
|
|
|
277
283
|
fadeType: FadeType;
|
|
278
284
|
/** Whether the channel is currently paused due to fade */
|
|
279
285
|
isPaused: boolean;
|
|
286
|
+
/** Custom duration in milliseconds if specified (overrides fade type default) */
|
|
287
|
+
customDuration?: number;
|
|
288
|
+
/** Whether the channel is currently transitioning (during any fade operation) to prevent capturing intermediate volumes during rapid pause/resume toggles */
|
|
289
|
+
isTransitioning?: boolean;
|
|
280
290
|
}
|
package/src/utils.ts
CHANGED
|
@@ -40,14 +40,14 @@ export const extractFileName = (url: string): string => {
|
|
|
40
40
|
* const audioElement = new Audio('song.mp3');
|
|
41
41
|
* const info = getAudioInfoFromElement(audioElement);
|
|
42
42
|
* console.log(info?.progress); // Current progress as decimal (0-1)
|
|
43
|
-
*
|
|
43
|
+
*
|
|
44
44
|
* // With channel context for remainingInQueue
|
|
45
45
|
* const infoWithQueue = getAudioInfoFromElement(audioElement, 0, audioChannels);
|
|
46
46
|
* console.log(infoWithQueue?.remainingInQueue); // Number of items left in queue
|
|
47
47
|
* ```
|
|
48
48
|
*/
|
|
49
49
|
export const getAudioInfoFromElement = (
|
|
50
|
-
audio: HTMLAudioElement,
|
|
50
|
+
audio: HTMLAudioElement,
|
|
51
51
|
channelNumber?: number,
|
|
52
52
|
audioChannels?: ExtendedAudioQueueChannel[]
|
|
53
53
|
): AudioInfo | null => {
|
|
@@ -60,7 +60,7 @@ export const getAudioInfoFromElement = (
|
|
|
60
60
|
|
|
61
61
|
// Calculate remainingInQueue if channel context is provided
|
|
62
62
|
let remainingInQueue: number = 0;
|
|
63
|
-
if (channelNumber !== undefined && audioChannels
|
|
63
|
+
if (channelNumber !== undefined && audioChannels?.[channelNumber]) {
|
|
64
64
|
const channel = audioChannels[channelNumber];
|
|
65
65
|
remainingInQueue = Math.max(0, channel.queue.length - 1); // Exclude current playing audio
|
|
66
66
|
}
|
|
@@ -91,7 +91,7 @@ export const getAudioInfoFromElement = (
|
|
|
91
91
|
* ```
|
|
92
92
|
*/
|
|
93
93
|
export const createQueueSnapshot = (
|
|
94
|
-
channelNumber: number,
|
|
94
|
+
channelNumber: number,
|
|
95
95
|
audioChannels: ExtendedAudioQueueChannel[]
|
|
96
96
|
): QueueSnapshot | null => {
|
|
97
97
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
@@ -109,10 +109,10 @@ export const createQueueSnapshot = (
|
|
|
109
109
|
return {
|
|
110
110
|
channelNumber,
|
|
111
111
|
currentIndex: 0, // Current playing is always index 0 in our queue structure
|
|
112
|
-
isPaused: channel.isPaused
|
|
112
|
+
isPaused: channel.isPaused ?? false,
|
|
113
113
|
items,
|
|
114
114
|
totalItems: channel.queue.length,
|
|
115
|
-
volume: channel.volume
|
|
115
|
+
volume: channel.volume ?? 1.0
|
|
116
116
|
};
|
|
117
117
|
};
|
|
118
118
|
|
|
@@ -131,4 +131,4 @@ export const createQueueSnapshot = (
|
|
|
131
131
|
export const cleanWebpackFilename = (fileName: string): string => {
|
|
132
132
|
// Remove webpack hash pattern: filename.hash.ext → filename.ext
|
|
133
133
|
return fileName.replace(/\.[a-f0-9]{8,}\./i, '.');
|
|
134
|
-
};
|
|
134
|
+
};
|