audio-channel-queue 1.12.0 → 1.12.1-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -15
- package/dist/core.js +3 -0
- package/dist/errors.js +0 -40
- package/dist/index.d.ts +3 -2
- package/dist/index.js +18 -2
- package/dist/types.d.ts +37 -0
- package/dist/volume.d.ts +40 -0
- package/dist/volume.js +182 -16
- package/dist/web-audio.d.ts +156 -0
- package/dist/web-audio.js +327 -0
- package/package.json +5 -2
- package/src/core.ts +11 -1
- package/src/errors.ts +1 -49
- package/src/index.ts +22 -1
- package/src/types.ts +40 -0
- package/src/volume.ts +214 -14
- package/src/web-audio.ts +331 -0
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* @fileoverview Web Audio API support for enhanced volume control on iOS and other platforms
|
|
4
|
+
*/
|
|
5
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
6
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
7
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
8
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
9
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
10
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
11
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
12
|
+
});
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.cleanupWebAudioNodes = exports.resumeAudioContext = exports.getWebAudioVolume = exports.setWebAudioVolume = exports.createWebAudioNodes = exports.getAudioContext = exports.getWebAudioConfig = exports.setWebAudioConfig = exports.getWebAudioSupport = exports.shouldUseWebAudio = exports.isWebAudioSupported = exports.isIOSDevice = void 0;
|
|
16
|
+
/**
|
|
17
|
+
* Global Web Audio API configuration
|
|
18
|
+
*/
|
|
19
|
+
let webAudioConfig = {
|
|
20
|
+
autoDetectIOS: true,
|
|
21
|
+
enabled: true,
|
|
22
|
+
forceWebAudio: false
|
|
23
|
+
};
|
|
24
|
+
/**
|
|
25
|
+
* Detects if the current device is iOS
|
|
26
|
+
* @returns True if the device is iOS, false otherwise
|
|
27
|
+
* @example
|
|
28
|
+
* ```typescript
|
|
29
|
+
* if (isIOSDevice()) {
|
|
30
|
+
* console.log('Running on iOS device');
|
|
31
|
+
* }
|
|
32
|
+
* ```
|
|
33
|
+
*/
|
|
34
|
+
const isIOSDevice = () => {
|
|
35
|
+
if (typeof navigator === 'undefined')
|
|
36
|
+
return false;
|
|
37
|
+
// Modern approach using User-Agent Client Hints API
|
|
38
|
+
const navWithUA = navigator;
|
|
39
|
+
if ('userAgentData' in navigator && navWithUA.userAgentData) {
|
|
40
|
+
return navWithUA.userAgentData.platform === 'iOS';
|
|
41
|
+
}
|
|
42
|
+
// Fallback to userAgent string parsing
|
|
43
|
+
const userAgent = navigator.userAgent || '';
|
|
44
|
+
const isIOS = /iPad|iPhone|iPod/.test(userAgent);
|
|
45
|
+
// Additional check for modern iPads that report as Mac
|
|
46
|
+
const isMacWithTouch = /Macintosh/.test(userAgent) && 'maxTouchPoints' in navigator && navigator.maxTouchPoints > 1;
|
|
47
|
+
return isIOS || isMacWithTouch;
|
|
48
|
+
};
|
|
49
|
+
exports.isIOSDevice = isIOSDevice;
|
|
50
|
+
/**
|
|
51
|
+
* Checks if Web Audio API is available in the current environment
|
|
52
|
+
* @returns True if Web Audio API is supported, false otherwise
|
|
53
|
+
* @example
|
|
54
|
+
* ```typescript
|
|
55
|
+
* if (isWebAudioSupported()) {
|
|
56
|
+
* console.log('Web Audio API is available');
|
|
57
|
+
* }
|
|
58
|
+
* ```
|
|
59
|
+
*/
|
|
60
|
+
const isWebAudioSupported = () => {
|
|
61
|
+
if (typeof window === 'undefined') {
|
|
62
|
+
// In Node.js environment (tests), check if Web Audio API globals are available
|
|
63
|
+
const globalThis = global;
|
|
64
|
+
return (typeof globalThis.AudioContext !== 'undefined' ||
|
|
65
|
+
typeof globalThis.webkitAudioContext !== 'undefined');
|
|
66
|
+
}
|
|
67
|
+
const windowWithWebkit = window;
|
|
68
|
+
return (typeof AudioContext !== 'undefined' ||
|
|
69
|
+
typeof windowWithWebkit.webkitAudioContext !== 'undefined');
|
|
70
|
+
};
|
|
71
|
+
exports.isWebAudioSupported = isWebAudioSupported;
|
|
72
|
+
/**
|
|
73
|
+
* Determines if Web Audio API should be used based on configuration and device detection
|
|
74
|
+
* @returns True if Web Audio API should be used, false otherwise
|
|
75
|
+
* @example
|
|
76
|
+
* ```typescript
|
|
77
|
+
* if (shouldUseWebAudio()) {
|
|
78
|
+
* // Use Web Audio API for volume control
|
|
79
|
+
* }
|
|
80
|
+
* ```
|
|
81
|
+
*/
|
|
82
|
+
const shouldUseWebAudio = () => {
|
|
83
|
+
if (!webAudioConfig.enabled)
|
|
84
|
+
return false;
|
|
85
|
+
if (!(0, exports.isWebAudioSupported)())
|
|
86
|
+
return false;
|
|
87
|
+
if (webAudioConfig.forceWebAudio)
|
|
88
|
+
return true;
|
|
89
|
+
if (webAudioConfig.autoDetectIOS && (0, exports.isIOSDevice)())
|
|
90
|
+
return true;
|
|
91
|
+
return false;
|
|
92
|
+
};
|
|
93
|
+
exports.shouldUseWebAudio = shouldUseWebAudio;
|
|
94
|
+
/**
|
|
95
|
+
* Gets information about Web Audio API support and usage
|
|
96
|
+
* @returns Object containing Web Audio API support information
|
|
97
|
+
* @example
|
|
98
|
+
* ```typescript
|
|
99
|
+
* const support = getWebAudioSupport();
|
|
100
|
+
* console.log(`Using Web Audio: ${support.usingWebAudio}`);
|
|
101
|
+
* console.log(`Reason: ${support.reason}`);
|
|
102
|
+
* ```
|
|
103
|
+
*/
|
|
104
|
+
const getWebAudioSupport = () => {
|
|
105
|
+
const available = (0, exports.isWebAudioSupported)();
|
|
106
|
+
const isIOS = (0, exports.isIOSDevice)();
|
|
107
|
+
const usingWebAudio = (0, exports.shouldUseWebAudio)();
|
|
108
|
+
let reason = '';
|
|
109
|
+
if (!webAudioConfig.enabled) {
|
|
110
|
+
reason = 'Web Audio API disabled in configuration';
|
|
111
|
+
}
|
|
112
|
+
else if (!available) {
|
|
113
|
+
reason = 'Web Audio API not supported in this environment';
|
|
114
|
+
}
|
|
115
|
+
else if (webAudioConfig.forceWebAudio) {
|
|
116
|
+
reason = 'Web Audio API forced via configuration';
|
|
117
|
+
}
|
|
118
|
+
else if (isIOS && webAudioConfig.autoDetectIOS) {
|
|
119
|
+
reason = 'iOS device detected - using Web Audio API for volume control';
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
reason = 'Using standard HTMLAudioElement volume control';
|
|
123
|
+
}
|
|
124
|
+
return {
|
|
125
|
+
available,
|
|
126
|
+
isIOS,
|
|
127
|
+
reason,
|
|
128
|
+
usingWebAudio
|
|
129
|
+
};
|
|
130
|
+
};
|
|
131
|
+
exports.getWebAudioSupport = getWebAudioSupport;
|
|
132
|
+
/**
|
|
133
|
+
* Configures Web Audio API usage
|
|
134
|
+
* @param config - Configuration options for Web Audio API
|
|
135
|
+
* @example
|
|
136
|
+
* ```typescript
|
|
137
|
+
* // Force Web Audio API usage on all devices
|
|
138
|
+
* setWebAudioConfig({ forceWebAudio: true });
|
|
139
|
+
*
|
|
140
|
+
* // Disable Web Audio API entirely
|
|
141
|
+
* setWebAudioConfig({ enabled: false });
|
|
142
|
+
* ```
|
|
143
|
+
*/
|
|
144
|
+
const setWebAudioConfig = (config) => {
|
|
145
|
+
webAudioConfig = Object.assign(Object.assign({}, webAudioConfig), config);
|
|
146
|
+
};
|
|
147
|
+
exports.setWebAudioConfig = setWebAudioConfig;
|
|
148
|
+
/**
|
|
149
|
+
* Gets the current Web Audio API configuration
|
|
150
|
+
* @returns Current Web Audio API configuration
|
|
151
|
+
* @example
|
|
152
|
+
* ```typescript
|
|
153
|
+
* const config = getWebAudioConfig();
|
|
154
|
+
* console.log(`Web Audio enabled: ${config.enabled}`);
|
|
155
|
+
* ```
|
|
156
|
+
*/
|
|
157
|
+
const getWebAudioConfig = () => {
|
|
158
|
+
return Object.assign({}, webAudioConfig);
|
|
159
|
+
};
|
|
160
|
+
exports.getWebAudioConfig = getWebAudioConfig;
|
|
161
|
+
/**
|
|
162
|
+
* Creates or gets an AudioContext for Web Audio API operations
|
|
163
|
+
* @returns AudioContext instance or null if not supported
|
|
164
|
+
* @example
|
|
165
|
+
* ```typescript
|
|
166
|
+
* const context = getAudioContext();
|
|
167
|
+
* if (context) {
|
|
168
|
+
* console.log('Audio context created successfully');
|
|
169
|
+
* }
|
|
170
|
+
* ```
|
|
171
|
+
*/
|
|
172
|
+
const getAudioContext = () => {
|
|
173
|
+
if (!(0, exports.isWebAudioSupported)())
|
|
174
|
+
return null;
|
|
175
|
+
try {
|
|
176
|
+
// In Node.js environment (tests), return null to allow mocking
|
|
177
|
+
if (typeof window === 'undefined') {
|
|
178
|
+
return null;
|
|
179
|
+
}
|
|
180
|
+
// Use existing AudioContext or create new one
|
|
181
|
+
const windowWithWebkit = window;
|
|
182
|
+
const AudioContextClass = window.AudioContext || windowWithWebkit.webkitAudioContext;
|
|
183
|
+
return new AudioContextClass();
|
|
184
|
+
}
|
|
185
|
+
catch (error) {
|
|
186
|
+
// eslint-disable-next-line no-console
|
|
187
|
+
console.warn('Failed to create AudioContext:', error);
|
|
188
|
+
return null;
|
|
189
|
+
}
|
|
190
|
+
};
|
|
191
|
+
exports.getAudioContext = getAudioContext;
|
|
192
|
+
/**
|
|
193
|
+
* Creates Web Audio API nodes for an audio element
|
|
194
|
+
* @param audioElement - The HTML audio element to create nodes for
|
|
195
|
+
* @param audioContext - The AudioContext to use
|
|
196
|
+
* @returns Web Audio API node set or null if creation fails
|
|
197
|
+
* @example
|
|
198
|
+
* ```typescript
|
|
199
|
+
* const audio = new Audio('song.mp3');
|
|
200
|
+
* const context = getAudioContext();
|
|
201
|
+
* if (context) {
|
|
202
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
203
|
+
* if (nodes) {
|
|
204
|
+
* nodes.gainNode.gain.value = 0.5; // Set volume to 50%
|
|
205
|
+
* }
|
|
206
|
+
* }
|
|
207
|
+
* ```
|
|
208
|
+
*/
|
|
209
|
+
const createWebAudioNodes = (audioElement, audioContext) => {
|
|
210
|
+
try {
|
|
211
|
+
// Create media element source node
|
|
212
|
+
const sourceNode = audioContext.createMediaElementSource(audioElement);
|
|
213
|
+
// Create gain node for volume control
|
|
214
|
+
const gainNode = audioContext.createGain();
|
|
215
|
+
// Connect source to gain node
|
|
216
|
+
sourceNode.connect(gainNode);
|
|
217
|
+
// Connect gain node to destination (speakers)
|
|
218
|
+
gainNode.connect(audioContext.destination);
|
|
219
|
+
return {
|
|
220
|
+
gainNode,
|
|
221
|
+
sourceNode
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
catch (error) {
|
|
225
|
+
// eslint-disable-next-line no-console
|
|
226
|
+
console.warn('Failed to create Web Audio nodes:', error);
|
|
227
|
+
return null;
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
exports.createWebAudioNodes = createWebAudioNodes;
|
|
231
|
+
/**
|
|
232
|
+
* Sets volume using Web Audio API gain node
|
|
233
|
+
* @param gainNode - The gain node to set volume on
|
|
234
|
+
* @param volume - Volume level (0-1)
|
|
235
|
+
* @param transitionDuration - Optional transition duration in milliseconds
|
|
236
|
+
* @example
|
|
237
|
+
* ```typescript
|
|
238
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
239
|
+
* if (nodes) {
|
|
240
|
+
* setWebAudioVolume(nodes.gainNode, 0.5); // Set to 50% volume
|
|
241
|
+
* setWebAudioVolume(nodes.gainNode, 0.2, 300); // Fade to 20% over 300ms
|
|
242
|
+
* }
|
|
243
|
+
* ```
|
|
244
|
+
*/
|
|
245
|
+
const setWebAudioVolume = (gainNode, volume, transitionDuration) => {
|
|
246
|
+
const clampedVolume = Math.max(0, Math.min(1, volume));
|
|
247
|
+
const currentTime = gainNode.context.currentTime;
|
|
248
|
+
if (transitionDuration && transitionDuration > 0) {
|
|
249
|
+
// Smooth transition using Web Audio API's built-in scheduling
|
|
250
|
+
gainNode.gain.cancelScheduledValues(currentTime);
|
|
251
|
+
gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime);
|
|
252
|
+
gainNode.gain.linearRampToValueAtTime(clampedVolume, currentTime + transitionDuration / 1000);
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
// Instant change
|
|
256
|
+
gainNode.gain.cancelScheduledValues(currentTime);
|
|
257
|
+
gainNode.gain.setValueAtTime(clampedVolume, currentTime);
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
exports.setWebAudioVolume = setWebAudioVolume;
|
|
261
|
+
/**
|
|
262
|
+
* Gets the current volume from a Web Audio API gain node
|
|
263
|
+
* @param gainNode - The gain node to get volume from
|
|
264
|
+
* @returns Current volume level (0-1)
|
|
265
|
+
* @example
|
|
266
|
+
* ```typescript
|
|
267
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
268
|
+
* if (nodes) {
|
|
269
|
+
* const volume = getWebAudioVolume(nodes.gainNode);
|
|
270
|
+
* console.log(`Current volume: ${volume * 100}%`);
|
|
271
|
+
* }
|
|
272
|
+
* ```
|
|
273
|
+
*/
|
|
274
|
+
const getWebAudioVolume = (gainNode) => {
|
|
275
|
+
return gainNode.gain.value;
|
|
276
|
+
};
|
|
277
|
+
exports.getWebAudioVolume = getWebAudioVolume;
|
|
278
|
+
/**
|
|
279
|
+
* Resumes an AudioContext if it's in suspended state (required for autoplay policy)
|
|
280
|
+
* @param audioContext - The AudioContext to resume
|
|
281
|
+
* @returns Promise that resolves when context is resumed
|
|
282
|
+
* @example
|
|
283
|
+
* ```typescript
|
|
284
|
+
* const context = getAudioContext();
|
|
285
|
+
* if (context) {
|
|
286
|
+
* await resumeAudioContext(context);
|
|
287
|
+
* }
|
|
288
|
+
* ```
|
|
289
|
+
*/
|
|
290
|
+
const resumeAudioContext = (audioContext) => __awaiter(void 0, void 0, void 0, function* () {
|
|
291
|
+
if (audioContext.state === 'suspended') {
|
|
292
|
+
try {
|
|
293
|
+
yield audioContext.resume();
|
|
294
|
+
}
|
|
295
|
+
catch (error) {
|
|
296
|
+
// eslint-disable-next-line no-console
|
|
297
|
+
console.warn('Failed to resume AudioContext:', error);
|
|
298
|
+
// Don't throw - handle gracefully and continue
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
});
|
|
302
|
+
exports.resumeAudioContext = resumeAudioContext;
|
|
303
|
+
/**
|
|
304
|
+
* Cleans up Web Audio API nodes and connections
|
|
305
|
+
* @param nodes - The Web Audio API node set to clean up
|
|
306
|
+
* @example
|
|
307
|
+
* ```typescript
|
|
308
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
309
|
+
* if (nodes) {
|
|
310
|
+
* // Use nodes...
|
|
311
|
+
* cleanupWebAudioNodes(nodes); // Clean up when done
|
|
312
|
+
* }
|
|
313
|
+
* ```
|
|
314
|
+
*/
|
|
315
|
+
const cleanupWebAudioNodes = (nodes) => {
|
|
316
|
+
try {
|
|
317
|
+
// Disconnect all nodes
|
|
318
|
+
nodes.sourceNode.disconnect();
|
|
319
|
+
nodes.gainNode.disconnect();
|
|
320
|
+
}
|
|
321
|
+
catch (error) {
|
|
322
|
+
// Ignore errors during cleanup
|
|
323
|
+
// eslint-disable-next-line no-console
|
|
324
|
+
console.warn('Error during Web Audio cleanup:', error);
|
|
325
|
+
}
|
|
326
|
+
};
|
|
327
|
+
exports.cleanupWebAudioNodes = cleanupWebAudioNodes;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "audio-channel-queue",
|
|
3
|
-
"version": "1.12.
|
|
3
|
+
"version": "1.12.1-beta.2",
|
|
4
4
|
"description": "Allows you to queue audio files to different playback channels.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
],
|
|
11
11
|
"scripts": {
|
|
12
12
|
"build": "tsc",
|
|
13
|
-
"prepare": "
|
|
13
|
+
"prepare": "husky",
|
|
14
14
|
"test": "jest",
|
|
15
15
|
"test:watch": "jest --watch",
|
|
16
16
|
"test:coverage": "jest --coverage",
|
|
@@ -26,6 +26,7 @@
|
|
|
26
26
|
"publish:minor": "npm version minor && npm run publish:release",
|
|
27
27
|
"publish:major": "npm version major && npm run publish:release",
|
|
28
28
|
"publish:release": "npm run validate && npm run build && npm publish",
|
|
29
|
+
"publish:beta": "npm version prerelease --preid=beta && npm publish --tag beta",
|
|
29
30
|
"publish:dry": "npm run validate && npm run build && npm publish --dry-run"
|
|
30
31
|
},
|
|
31
32
|
"repository": {
|
|
@@ -49,8 +50,10 @@
|
|
|
49
50
|
},
|
|
50
51
|
"devDependencies": {
|
|
51
52
|
"@types/jest": "^29.5.13",
|
|
53
|
+
"husky": "^9.1.7",
|
|
52
54
|
"jest": "^29.7.0",
|
|
53
55
|
"jest-environment-jsdom": "^29.7.0",
|
|
56
|
+
"lint-staged": "^16.2.7",
|
|
54
57
|
"rimraf": "^6.0.1",
|
|
55
58
|
"ts-jest": "^29.2.5",
|
|
56
59
|
"typescript": "^5.6.2",
|
package/src/core.ts
CHANGED
|
@@ -12,7 +12,13 @@ import {
|
|
|
12
12
|
setupProgressTracking,
|
|
13
13
|
cleanupProgressTracking
|
|
14
14
|
} from './events';
|
|
15
|
-
import {
|
|
15
|
+
import {
|
|
16
|
+
applyVolumeDucking,
|
|
17
|
+
restoreVolumeLevels,
|
|
18
|
+
cancelVolumeTransition,
|
|
19
|
+
initializeWebAudioForAudio,
|
|
20
|
+
cleanupWebAudioForAudio
|
|
21
|
+
} from './volume';
|
|
16
22
|
import { setupAudioErrorHandling, handleAudioError } from './errors';
|
|
17
23
|
|
|
18
24
|
/**
|
|
@@ -283,6 +289,9 @@ export const queueAudio = async (
|
|
|
283
289
|
await handleAudioError(audio, channelNumber, validatedUrl, error);
|
|
284
290
|
});
|
|
285
291
|
|
|
292
|
+
// Initialize Web Audio API support if needed
|
|
293
|
+
await initializeWebAudioForAudio(audio, channelNumber);
|
|
294
|
+
|
|
286
295
|
// Apply options if provided
|
|
287
296
|
if (options) {
|
|
288
297
|
if (typeof options.loop === 'boolean') {
|
|
@@ -440,6 +449,7 @@ export const playAudioQueue = async (channelNumber: number): Promise<void> => {
|
|
|
440
449
|
// For non-looping audio, remove from queue and play next
|
|
441
450
|
currentAudio.pause();
|
|
442
451
|
cleanupProgressTracking(currentAudio, channelNumber, audioChannels);
|
|
452
|
+
cleanupWebAudioForAudio(currentAudio, channelNumber);
|
|
443
453
|
channel.queue.shift();
|
|
444
454
|
channel.isPaused = false; // Reset pause state
|
|
445
455
|
|
package/src/errors.ts
CHANGED
|
@@ -34,8 +34,6 @@ let globalErrorRecovery: ErrorRecoveryOptions = {
|
|
|
34
34
|
|
|
35
35
|
const retryAttempts: WeakMap<HTMLAudioElement, number> = new WeakMap();
|
|
36
36
|
|
|
37
|
-
const loadTimeouts: WeakMap<HTMLAudioElement, number> = new WeakMap();
|
|
38
|
-
|
|
39
37
|
/**
|
|
40
38
|
* Subscribes to audio error events for a specific channel
|
|
41
39
|
* @param channelNumber - The channel number to listen to (defaults to 0)
|
|
@@ -310,42 +308,8 @@ export const setupAudioErrorHandling = (
|
|
|
310
308
|
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
311
309
|
if (!channel) return;
|
|
312
310
|
|
|
313
|
-
// Set up loading timeout with test environment compatibility
|
|
314
|
-
let timeoutId: number;
|
|
315
|
-
if (typeof setTimeout !== 'undefined') {
|
|
316
|
-
timeoutId = setTimeout(() => {
|
|
317
|
-
if (audio.networkState === HTMLMediaElement.NETWORK_LOADING) {
|
|
318
|
-
const timeoutError = new Error(
|
|
319
|
-
`Audio loading timeout after ${globalRetryConfig.timeoutMs}ms`
|
|
320
|
-
);
|
|
321
|
-
handleAudioError(audio, channelNumber, originalUrl, timeoutError);
|
|
322
|
-
}
|
|
323
|
-
}, globalRetryConfig.timeoutMs) as unknown as number;
|
|
324
|
-
|
|
325
|
-
loadTimeouts.set(audio, timeoutId);
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
// Clear timeout when metadata loads successfully
|
|
329
|
-
const handleLoadSuccess = (): void => {
|
|
330
|
-
if (typeof setTimeout !== 'undefined') {
|
|
331
|
-
const timeoutId = loadTimeouts.get(audio);
|
|
332
|
-
if (timeoutId) {
|
|
333
|
-
clearTimeout(timeoutId);
|
|
334
|
-
loadTimeouts.delete(audio);
|
|
335
|
-
}
|
|
336
|
-
}
|
|
337
|
-
};
|
|
338
|
-
|
|
339
311
|
// Handle various error events
|
|
340
312
|
const handleError = (_event: Event): void => {
|
|
341
|
-
if (typeof setTimeout !== 'undefined') {
|
|
342
|
-
const timeoutId = loadTimeouts.get(audio);
|
|
343
|
-
if (timeoutId) {
|
|
344
|
-
clearTimeout(timeoutId);
|
|
345
|
-
loadTimeouts.delete(audio);
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
|
|
349
313
|
const error = new Error(`Audio loading failed: ${audio.error?.message || 'Unknown error'}`);
|
|
350
314
|
handleAudioError(audio, channelNumber, originalUrl, error);
|
|
351
315
|
};
|
|
@@ -364,8 +328,6 @@ export const setupAudioErrorHandling = (
|
|
|
364
328
|
audio.addEventListener('error', handleError);
|
|
365
329
|
audio.addEventListener('abort', handleAbort);
|
|
366
330
|
audio.addEventListener('stalled', handleStall);
|
|
367
|
-
audio.addEventListener('loadedmetadata', handleLoadSuccess);
|
|
368
|
-
audio.addEventListener('canplay', handleLoadSuccess);
|
|
369
331
|
|
|
370
332
|
// Custom play error handling
|
|
371
333
|
if (onError) {
|
|
@@ -423,7 +385,7 @@ export const handleAudioError = async (
|
|
|
423
385
|
currentAttempts < retryConfig.maxRetries &&
|
|
424
386
|
globalErrorRecovery.autoRetry
|
|
425
387
|
) {
|
|
426
|
-
const delay = retryConfig.exponentialBackoff
|
|
388
|
+
const delay: number = retryConfig.exponentialBackoff
|
|
427
389
|
? retryConfig.baseDelay * Math.pow(2, currentAttempts)
|
|
428
390
|
: retryConfig.baseDelay;
|
|
429
391
|
|
|
@@ -481,21 +443,11 @@ export const createProtectedAudioElement = async (
|
|
|
481
443
|
const audio = new Audio();
|
|
482
444
|
|
|
483
445
|
return new Promise((resolve, reject) => {
|
|
484
|
-
const cleanup = (): void => {
|
|
485
|
-
const timeoutId = loadTimeouts.get(audio);
|
|
486
|
-
if (timeoutId) {
|
|
487
|
-
clearTimeout(timeoutId);
|
|
488
|
-
loadTimeouts.delete(audio);
|
|
489
|
-
}
|
|
490
|
-
};
|
|
491
|
-
|
|
492
446
|
const handleSuccess = (): void => {
|
|
493
|
-
cleanup();
|
|
494
447
|
resolve(audio);
|
|
495
448
|
};
|
|
496
449
|
|
|
497
450
|
const handleError = (error: Error): void => {
|
|
498
|
-
cleanup();
|
|
499
451
|
reject(error);
|
|
500
452
|
};
|
|
501
453
|
|
package/src/index.ts
CHANGED
|
@@ -66,12 +66,30 @@ export {
|
|
|
66
66
|
getAllChannelsVolume,
|
|
67
67
|
getChannelVolume,
|
|
68
68
|
getFadeConfig,
|
|
69
|
+
getGlobalVolume,
|
|
69
70
|
setAllChannelsVolume,
|
|
70
71
|
setChannelVolume,
|
|
72
|
+
setGlobalVolume,
|
|
71
73
|
setVolumeDucking,
|
|
72
74
|
transitionVolume
|
|
73
75
|
} from './volume';
|
|
74
76
|
|
|
77
|
+
// Web Audio API support functions
|
|
78
|
+
export {
|
|
79
|
+
cleanupWebAudioNodes,
|
|
80
|
+
createWebAudioNodes,
|
|
81
|
+
getAudioContext,
|
|
82
|
+
getWebAudioConfig,
|
|
83
|
+
getWebAudioSupport,
|
|
84
|
+
getWebAudioVolume,
|
|
85
|
+
isIOSDevice,
|
|
86
|
+
isWebAudioSupported,
|
|
87
|
+
resumeAudioContext,
|
|
88
|
+
setWebAudioConfig,
|
|
89
|
+
setWebAudioVolume,
|
|
90
|
+
shouldUseWebAudio
|
|
91
|
+
} from './web-audio';
|
|
92
|
+
|
|
75
93
|
// Audio information and progress tracking functions
|
|
76
94
|
export {
|
|
77
95
|
getAllChannelsInfo,
|
|
@@ -122,12 +140,15 @@ export type {
|
|
|
122
140
|
FadeConfig,
|
|
123
141
|
ProgressCallback,
|
|
124
142
|
QueueChangeCallback,
|
|
143
|
+
QueueConfig,
|
|
125
144
|
QueueItem,
|
|
126
145
|
QueueManipulationResult,
|
|
127
146
|
QueueSnapshot,
|
|
128
147
|
RetryConfig,
|
|
129
148
|
VolumeConfig,
|
|
130
|
-
|
|
149
|
+
WebAudioConfig,
|
|
150
|
+
WebAudioNodeSet,
|
|
151
|
+
WebAudioSupport
|
|
131
152
|
} from './types';
|
|
132
153
|
|
|
133
154
|
// Enums and constants
|
package/src/types.ts
CHANGED
|
@@ -285,6 +285,42 @@ export interface ErrorRecoveryOptions {
|
|
|
285
285
|
*/
|
|
286
286
|
export type AudioErrorCallback = (errorInfo: AudioErrorInfo) => void;
|
|
287
287
|
|
|
288
|
+
/**
|
|
289
|
+
* Web Audio API configuration options
|
|
290
|
+
*/
|
|
291
|
+
export interface WebAudioConfig {
|
|
292
|
+
/** Whether to automatically use Web Audio API on iOS devices */
|
|
293
|
+
autoDetectIOS: boolean;
|
|
294
|
+
/** Whether Web Audio API support is enabled */
|
|
295
|
+
enabled: boolean;
|
|
296
|
+
/** Whether to force Web Audio API usage on all devices */
|
|
297
|
+
forceWebAudio: boolean;
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Web Audio API support information
|
|
302
|
+
*/
|
|
303
|
+
export interface WebAudioSupport {
|
|
304
|
+
/** Whether Web Audio API is available in the current environment */
|
|
305
|
+
available: boolean;
|
|
306
|
+
/** Whether the current device is iOS */
|
|
307
|
+
isIOS: boolean;
|
|
308
|
+
/** Whether Web Audio API is currently being used */
|
|
309
|
+
usingWebAudio: boolean;
|
|
310
|
+
/** Reason for current Web Audio API usage state */
|
|
311
|
+
reason: string;
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Web Audio API node set for audio element control
|
|
316
|
+
*/
|
|
317
|
+
export interface WebAudioNodeSet {
|
|
318
|
+
/** Gain node for volume control */
|
|
319
|
+
gainNode: GainNode;
|
|
320
|
+
/** Media element source node */
|
|
321
|
+
sourceNode: MediaElementAudioSourceNode;
|
|
322
|
+
}
|
|
323
|
+
|
|
288
324
|
/**
|
|
289
325
|
* Extended audio channel with comprehensive queue management, callback support, and state tracking
|
|
290
326
|
*/
|
|
@@ -317,6 +353,10 @@ export interface ExtendedAudioQueueChannel {
|
|
|
317
353
|
retryConfig?: RetryConfig;
|
|
318
354
|
/** Current volume level for the channel (0-1) */
|
|
319
355
|
volume: number;
|
|
356
|
+
/** Web Audio API context for this channel */
|
|
357
|
+
webAudioContext?: AudioContext;
|
|
358
|
+
/** Map of Web Audio API nodes for each audio element */
|
|
359
|
+
webAudioNodes?: Map<HTMLAudioElement, WebAudioNodeSet>;
|
|
320
360
|
}
|
|
321
361
|
|
|
322
362
|
/**
|