audio-channel-queue 1.12.0 → 1.12.1-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core.js +3 -0
- package/dist/errors.js +0 -40
- package/dist/index.d.ts +2 -1
- package/dist/index.js +16 -2
- package/dist/types.d.ts +37 -0
- package/dist/volume.d.ts +15 -0
- package/dist/volume.js +130 -15
- package/dist/web-audio.d.ts +156 -0
- package/dist/web-audio.js +327 -0
- package/package.json +1 -1
- package/src/core.ts +11 -1
- package/src/errors.ts +1 -49
- package/src/index.ts +20 -1
- package/src/types.ts +40 -0
- package/src/volume.ts +157 -13
- package/src/web-audio.ts +331 -0
package/src/volume.ts
CHANGED
|
@@ -12,6 +12,14 @@ import {
|
|
|
12
12
|
MAX_CHANNELS
|
|
13
13
|
} from './types';
|
|
14
14
|
import { audioChannels } from './info';
|
|
15
|
+
import {
|
|
16
|
+
shouldUseWebAudio,
|
|
17
|
+
getAudioContext,
|
|
18
|
+
createWebAudioNodes,
|
|
19
|
+
setWebAudioVolume,
|
|
20
|
+
resumeAudioContext,
|
|
21
|
+
cleanupWebAudioNodes
|
|
22
|
+
} from './web-audio';
|
|
15
23
|
|
|
16
24
|
// Store active volume transitions to handle interruptions
|
|
17
25
|
const activeTransitions: Map<number, number> = new Map();
|
|
@@ -132,7 +140,7 @@ export const transitionVolume = async (
|
|
|
132
140
|
const easingFn = easingFunctions[easing];
|
|
133
141
|
|
|
134
142
|
return new Promise<void>((resolve) => {
|
|
135
|
-
const updateVolume = (): void => {
|
|
143
|
+
const updateVolume = async (): Promise<void> => {
|
|
136
144
|
const elapsed: number = performance.now() - startTime;
|
|
137
145
|
const progress: number = Math.min(elapsed / duration, 1);
|
|
138
146
|
const easedProgress: number = easingFn(progress);
|
|
@@ -143,7 +151,7 @@ export const transitionVolume = async (
|
|
|
143
151
|
// Apply volume to both channel config and current audio
|
|
144
152
|
channel.volume = clampedVolume;
|
|
145
153
|
if (channel.queue.length > 0) {
|
|
146
|
-
channel.queue[0]
|
|
154
|
+
await setVolumeForAudio(channel.queue[0], clampedVolume, channelNumber);
|
|
147
155
|
}
|
|
148
156
|
|
|
149
157
|
if (progress >= 1) {
|
|
@@ -154,12 +162,12 @@ export const transitionVolume = async (
|
|
|
154
162
|
} else {
|
|
155
163
|
// Use requestAnimationFrame in browser, setTimeout in tests
|
|
156
164
|
if (typeof requestAnimationFrame !== 'undefined') {
|
|
157
|
-
const rafId = requestAnimationFrame(updateVolume);
|
|
165
|
+
const rafId = requestAnimationFrame(() => updateVolume());
|
|
158
166
|
activeTransitions.set(channelNumber, rafId as unknown as number);
|
|
159
167
|
timerTypes.set(channelNumber, TimerType.RequestAnimationFrame);
|
|
160
168
|
} else {
|
|
161
169
|
// In test environment, use shorter intervals
|
|
162
|
-
const timeoutId = setTimeout(updateVolume, 1);
|
|
170
|
+
const timeoutId = setTimeout(() => updateVolume(), 1);
|
|
163
171
|
activeTransitions.set(channelNumber, timeoutId as unknown as number);
|
|
164
172
|
timerTypes.set(channelNumber, TimerType.Timeout);
|
|
165
173
|
}
|
|
@@ -172,6 +180,7 @@ export const transitionVolume = async (
|
|
|
172
180
|
|
|
173
181
|
/**
|
|
174
182
|
* Sets the volume for a specific channel with optional smooth transition
|
|
183
|
+
* Automatically uses Web Audio API on iOS devices for enhanced volume control
|
|
175
184
|
* @param channelNumber - The channel number to set volume for
|
|
176
185
|
* @param volume - Volume level (0-1)
|
|
177
186
|
* @param transitionDuration - Optional transition duration in milliseconds
|
|
@@ -217,16 +226,22 @@ export const setChannelVolume = async (
|
|
|
217
226
|
return;
|
|
218
227
|
}
|
|
219
228
|
|
|
229
|
+
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
230
|
+
|
|
231
|
+
// Initialize Web Audio API if needed and supported
|
|
232
|
+
if (shouldUseWebAudio() && !channel.webAudioContext) {
|
|
233
|
+
await initializeWebAudioForChannel(channelNumber);
|
|
234
|
+
}
|
|
235
|
+
|
|
220
236
|
if (transitionDuration && transitionDuration > 0) {
|
|
221
237
|
// Smooth transition
|
|
222
238
|
await transitionVolume(channelNumber, clampedVolume, transitionDuration, easing);
|
|
223
239
|
} else {
|
|
224
240
|
// Instant change (backward compatibility)
|
|
225
|
-
|
|
226
|
-
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
241
|
+
channel.volume = clampedVolume;
|
|
227
242
|
if (channel.queue.length > 0) {
|
|
228
243
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
229
|
-
currentAudio
|
|
244
|
+
await setVolumeForAudio(currentAudio, clampedVolume, channelNumber);
|
|
230
245
|
}
|
|
231
246
|
}
|
|
232
247
|
};
|
|
@@ -364,14 +379,14 @@ export const applyVolumeDucking = async (activeChannelNumber: number): Promise<v
|
|
|
364
379
|
// Only change audio volume, preserve channel.volume as desired volume
|
|
365
380
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
366
381
|
transitionPromises.push(
|
|
367
|
-
transitionAudioVolume(currentAudio, config.priorityVolume, duration, easing)
|
|
382
|
+
transitionAudioVolume(currentAudio, config.priorityVolume, duration, easing, channelNumber)
|
|
368
383
|
);
|
|
369
384
|
} else {
|
|
370
385
|
// This is a background channel - duck it
|
|
371
386
|
// Only change audio volume, preserve channel.volume as desired volume
|
|
372
387
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
373
388
|
transitionPromises.push(
|
|
374
|
-
transitionAudioVolume(currentAudio, config.duckingVolume, duration, easing)
|
|
389
|
+
transitionAudioVolume(currentAudio, config.duckingVolume, duration, easing, channelNumber)
|
|
375
390
|
);
|
|
376
391
|
}
|
|
377
392
|
});
|
|
@@ -414,7 +429,9 @@ export const restoreVolumeLevels = async (stoppedChannelNumber: number): Promise
|
|
|
414
429
|
|
|
415
430
|
// Only transition the audio element volume, keep channel.volume as the desired volume
|
|
416
431
|
const currentAudio: HTMLAudioElement = channel.queue[0];
|
|
417
|
-
transitionPromises.push(
|
|
432
|
+
transitionPromises.push(
|
|
433
|
+
transitionAudioVolume(currentAudio, targetVolume, duration, easing, channelNumber)
|
|
434
|
+
);
|
|
418
435
|
});
|
|
419
436
|
|
|
420
437
|
// Wait for all transitions to complete
|
|
@@ -424,10 +441,12 @@ export const restoreVolumeLevels = async (stoppedChannelNumber: number): Promise
|
|
|
424
441
|
/**
|
|
425
442
|
* Transitions only the audio element volume without affecting channel.volume
|
|
426
443
|
* This is used for ducking/restoration where channel.volume represents desired volume
|
|
444
|
+
* Uses Web Audio API when available for enhanced volume control
|
|
427
445
|
* @param audio - The audio element to transition
|
|
428
446
|
* @param targetVolume - Target volume level (0-1)
|
|
429
447
|
* @param duration - Transition duration in milliseconds
|
|
430
448
|
* @param easing - Easing function type
|
|
449
|
+
* @param channelNumber - The channel number this audio belongs to (for Web Audio API)
|
|
431
450
|
* @returns Promise that resolves when transition completes
|
|
432
451
|
* @internal
|
|
433
452
|
*/
|
|
@@ -435,8 +454,25 @@ const transitionAudioVolume = async (
|
|
|
435
454
|
audio: HTMLAudioElement,
|
|
436
455
|
targetVolume: number,
|
|
437
456
|
duration: number = 250,
|
|
438
|
-
easing: EasingType = EasingType.EaseOut
|
|
457
|
+
easing: EasingType = EasingType.EaseOut,
|
|
458
|
+
channelNumber?: number
|
|
439
459
|
): Promise<void> => {
|
|
460
|
+
// Try to use Web Audio API if available and channel number is provided
|
|
461
|
+
if (channelNumber !== undefined) {
|
|
462
|
+
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
463
|
+
if (channel?.webAudioContext && channel.webAudioNodes) {
|
|
464
|
+
const nodes = channel.webAudioNodes.get(audio);
|
|
465
|
+
if (nodes) {
|
|
466
|
+
// Use Web Audio API for smooth transitions
|
|
467
|
+
setWebAudioVolume(nodes.gainNode, targetVolume, duration);
|
|
468
|
+
// Also update the audio element's volume property for consistency
|
|
469
|
+
audio.volume = targetVolume;
|
|
470
|
+
return;
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Fallback to standard HTMLAudioElement volume control with manual transition
|
|
440
476
|
const startVolume: number = audio.volume;
|
|
441
477
|
const volumeDelta: number = targetVolume - startVolume;
|
|
442
478
|
|
|
@@ -447,7 +483,7 @@ const transitionAudioVolume = async (
|
|
|
447
483
|
|
|
448
484
|
// Handle zero or negative duration - instant change
|
|
449
485
|
if (duration <= 0) {
|
|
450
|
-
audio.volume =
|
|
486
|
+
audio.volume = targetVolume;
|
|
451
487
|
return Promise.resolve();
|
|
452
488
|
}
|
|
453
489
|
|
|
@@ -473,7 +509,8 @@ const transitionAudioVolume = async (
|
|
|
473
509
|
if (typeof requestAnimationFrame !== 'undefined') {
|
|
474
510
|
requestAnimationFrame(updateVolume);
|
|
475
511
|
} else {
|
|
476
|
-
|
|
512
|
+
// In test environment, use longer intervals to prevent stack overflow
|
|
513
|
+
setTimeout(updateVolume, 16);
|
|
477
514
|
}
|
|
478
515
|
}
|
|
479
516
|
};
|
|
@@ -521,3 +558,110 @@ export const cancelAllVolumeTransitions = (): void => {
|
|
|
521
558
|
cancelVolumeTransition(channelNumber);
|
|
522
559
|
});
|
|
523
560
|
};
|
|
561
|
+
|
|
562
|
+
/**
|
|
563
|
+
* Initializes Web Audio API for a specific channel
|
|
564
|
+
* @param channelNumber - The channel number to initialize Web Audio for
|
|
565
|
+
* @internal
|
|
566
|
+
*/
|
|
567
|
+
const initializeWebAudioForChannel = async (channelNumber: number): Promise<void> => {
|
|
568
|
+
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
569
|
+
if (!channel || channel.webAudioContext) return;
|
|
570
|
+
|
|
571
|
+
const audioContext = getAudioContext();
|
|
572
|
+
if (!audioContext) {
|
|
573
|
+
throw new Error('AudioContext creation failed');
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
// Resume audio context if needed (for autoplay policy)
|
|
577
|
+
await resumeAudioContext(audioContext);
|
|
578
|
+
|
|
579
|
+
channel.webAudioContext = audioContext;
|
|
580
|
+
channel.webAudioNodes = new Map();
|
|
581
|
+
|
|
582
|
+
// Initialize Web Audio nodes for existing audio elements
|
|
583
|
+
for (const audio of channel.queue) {
|
|
584
|
+
const nodes = createWebAudioNodes(audio, audioContext);
|
|
585
|
+
if (!nodes) {
|
|
586
|
+
throw new Error('Node creation failed');
|
|
587
|
+
}
|
|
588
|
+
channel.webAudioNodes.set(audio, nodes);
|
|
589
|
+
// Set initial volume to match channel volume
|
|
590
|
+
nodes.gainNode.gain.value = channel.volume;
|
|
591
|
+
}
|
|
592
|
+
};
|
|
593
|
+
|
|
594
|
+
/**
|
|
595
|
+
* Sets volume for an audio element using the appropriate method (Web Audio API or standard)
|
|
596
|
+
* @param audio - The audio element to set volume for
|
|
597
|
+
* @param volume - Volume level (0-1)
|
|
598
|
+
* @param channelNumber - The channel number this audio belongs to
|
|
599
|
+
* @param transitionDuration - Optional transition duration in milliseconds
|
|
600
|
+
* @internal
|
|
601
|
+
*/
|
|
602
|
+
const setVolumeForAudio = async (
|
|
603
|
+
audio: HTMLAudioElement,
|
|
604
|
+
volume: number,
|
|
605
|
+
channelNumber: number,
|
|
606
|
+
transitionDuration?: number
|
|
607
|
+
): Promise<void> => {
|
|
608
|
+
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
609
|
+
|
|
610
|
+
// Use Web Audio API if available and initialized
|
|
611
|
+
if (channel?.webAudioContext && channel.webAudioNodes) {
|
|
612
|
+
const nodes = channel.webAudioNodes.get(audio);
|
|
613
|
+
if (nodes) {
|
|
614
|
+
setWebAudioVolume(nodes.gainNode, volume, transitionDuration);
|
|
615
|
+
return;
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
// Fallback to standard HTMLAudioElement volume control
|
|
620
|
+
audio.volume = volume;
|
|
621
|
+
};
|
|
622
|
+
|
|
623
|
+
/**
|
|
624
|
+
* Initializes Web Audio API nodes for a new audio element
|
|
625
|
+
* @param audio - The audio element to initialize nodes for
|
|
626
|
+
* @param channelNumber - The channel number this audio belongs to
|
|
627
|
+
* @internal
|
|
628
|
+
*/
|
|
629
|
+
export const initializeWebAudioForAudio = async (
|
|
630
|
+
audio: HTMLAudioElement,
|
|
631
|
+
channelNumber: number
|
|
632
|
+
): Promise<void> => {
|
|
633
|
+
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
634
|
+
if (!channel) return;
|
|
635
|
+
|
|
636
|
+
// Initialize Web Audio API for the channel if needed
|
|
637
|
+
if (shouldUseWebAudio() && !channel.webAudioContext) {
|
|
638
|
+
await initializeWebAudioForChannel(channelNumber);
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
// Create nodes for this specific audio element
|
|
642
|
+
if (channel.webAudioContext && channel.webAudioNodes && !channel.webAudioNodes.has(audio)) {
|
|
643
|
+
const nodes = createWebAudioNodes(audio, channel.webAudioContext);
|
|
644
|
+
if (nodes) {
|
|
645
|
+
channel.webAudioNodes.set(audio, nodes);
|
|
646
|
+
// Set initial volume to match channel volume
|
|
647
|
+
nodes.gainNode.gain.value = channel.volume;
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
};
|
|
651
|
+
|
|
652
|
+
/**
|
|
653
|
+
* Cleans up Web Audio API nodes for an audio element
|
|
654
|
+
* @param audio - The audio element to clean up nodes for
|
|
655
|
+
* @param channelNumber - The channel number this audio belongs to
|
|
656
|
+
* @internal
|
|
657
|
+
*/
|
|
658
|
+
export const cleanupWebAudioForAudio = (audio: HTMLAudioElement, channelNumber: number): void => {
|
|
659
|
+
const channel: ExtendedAudioQueueChannel = audioChannels[channelNumber];
|
|
660
|
+
if (!channel?.webAudioNodes) return;
|
|
661
|
+
|
|
662
|
+
const nodes = channel.webAudioNodes.get(audio);
|
|
663
|
+
if (nodes) {
|
|
664
|
+
cleanupWebAudioNodes(nodes);
|
|
665
|
+
channel.webAudioNodes.delete(audio);
|
|
666
|
+
}
|
|
667
|
+
};
|
package/src/web-audio.ts
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Web Audio API support for enhanced volume control on iOS and other platforms
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { WebAudioConfig, WebAudioSupport, WebAudioNodeSet } from './types';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Global Web Audio API configuration
|
|
9
|
+
*/
|
|
10
|
+
let webAudioConfig: WebAudioConfig = {
|
|
11
|
+
autoDetectIOS: true,
|
|
12
|
+
enabled: true,
|
|
13
|
+
forceWebAudio: false
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Detects if the current device is iOS
|
|
18
|
+
* @returns True if the device is iOS, false otherwise
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* if (isIOSDevice()) {
|
|
22
|
+
* console.log('Running on iOS device');
|
|
23
|
+
* }
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
export const isIOSDevice = (): boolean => {
|
|
27
|
+
if (typeof navigator === 'undefined') return false;
|
|
28
|
+
|
|
29
|
+
// Modern approach using User-Agent Client Hints API
|
|
30
|
+
const navWithUA = navigator as unknown as { userAgentData?: { platform: string } };
|
|
31
|
+
if ('userAgentData' in navigator && navWithUA.userAgentData) {
|
|
32
|
+
return navWithUA.userAgentData.platform === 'iOS';
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Fallback to userAgent string parsing
|
|
36
|
+
const userAgent = navigator.userAgent || '';
|
|
37
|
+
const isIOS = /iPad|iPhone|iPod/.test(userAgent);
|
|
38
|
+
|
|
39
|
+
// Additional check for modern iPads that report as Mac
|
|
40
|
+
const isMacWithTouch =
|
|
41
|
+
/Macintosh/.test(userAgent) && 'maxTouchPoints' in navigator && navigator.maxTouchPoints > 1;
|
|
42
|
+
|
|
43
|
+
return isIOS || isMacWithTouch;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Checks if Web Audio API is available in the current environment
|
|
48
|
+
* @returns True if Web Audio API is supported, false otherwise
|
|
49
|
+
* @example
|
|
50
|
+
* ```typescript
|
|
51
|
+
* if (isWebAudioSupported()) {
|
|
52
|
+
* console.log('Web Audio API is available');
|
|
53
|
+
* }
|
|
54
|
+
* ```
|
|
55
|
+
*/
|
|
56
|
+
export const isWebAudioSupported = (): boolean => {
|
|
57
|
+
if (typeof window === 'undefined') {
|
|
58
|
+
// In Node.js environment (tests), check if Web Audio API globals are available
|
|
59
|
+
const globalThis = global as unknown as {
|
|
60
|
+
AudioContext?: unknown;
|
|
61
|
+
webkitAudioContext?: unknown;
|
|
62
|
+
};
|
|
63
|
+
return (
|
|
64
|
+
typeof globalThis.AudioContext !== 'undefined' ||
|
|
65
|
+
typeof globalThis.webkitAudioContext !== 'undefined'
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
const windowWithWebkit = window as unknown as { webkitAudioContext?: unknown };
|
|
69
|
+
return (
|
|
70
|
+
typeof AudioContext !== 'undefined' ||
|
|
71
|
+
typeof windowWithWebkit.webkitAudioContext !== 'undefined'
|
|
72
|
+
);
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Determines if Web Audio API should be used based on configuration and device detection
|
|
77
|
+
* @returns True if Web Audio API should be used, false otherwise
|
|
78
|
+
* @example
|
|
79
|
+
* ```typescript
|
|
80
|
+
* if (shouldUseWebAudio()) {
|
|
81
|
+
* // Use Web Audio API for volume control
|
|
82
|
+
* }
|
|
83
|
+
* ```
|
|
84
|
+
*/
|
|
85
|
+
export const shouldUseWebAudio = (): boolean => {
|
|
86
|
+
if (!webAudioConfig.enabled) return false;
|
|
87
|
+
if (!isWebAudioSupported()) return false;
|
|
88
|
+
if (webAudioConfig.forceWebAudio) return true;
|
|
89
|
+
if (webAudioConfig.autoDetectIOS && isIOSDevice()) return true;
|
|
90
|
+
return false;
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Gets information about Web Audio API support and usage
|
|
95
|
+
* @returns Object containing Web Audio API support information
|
|
96
|
+
* @example
|
|
97
|
+
* ```typescript
|
|
98
|
+
* const support = getWebAudioSupport();
|
|
99
|
+
* console.log(`Using Web Audio: ${support.usingWebAudio}`);
|
|
100
|
+
* console.log(`Reason: ${support.reason}`);
|
|
101
|
+
* ```
|
|
102
|
+
*/
|
|
103
|
+
export const getWebAudioSupport = (): WebAudioSupport => {
|
|
104
|
+
const available = isWebAudioSupported();
|
|
105
|
+
const isIOS = isIOSDevice();
|
|
106
|
+
const usingWebAudio = shouldUseWebAudio();
|
|
107
|
+
|
|
108
|
+
let reason = '';
|
|
109
|
+
if (!webAudioConfig.enabled) {
|
|
110
|
+
reason = 'Web Audio API disabled in configuration';
|
|
111
|
+
} else if (!available) {
|
|
112
|
+
reason = 'Web Audio API not supported in this environment';
|
|
113
|
+
} else if (webAudioConfig.forceWebAudio) {
|
|
114
|
+
reason = 'Web Audio API forced via configuration';
|
|
115
|
+
} else if (isIOS && webAudioConfig.autoDetectIOS) {
|
|
116
|
+
reason = 'iOS device detected - using Web Audio API for volume control';
|
|
117
|
+
} else {
|
|
118
|
+
reason = 'Using standard HTMLAudioElement volume control';
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return {
|
|
122
|
+
available,
|
|
123
|
+
isIOS,
|
|
124
|
+
reason,
|
|
125
|
+
usingWebAudio
|
|
126
|
+
};
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Configures Web Audio API usage
|
|
131
|
+
* @param config - Configuration options for Web Audio API
|
|
132
|
+
* @example
|
|
133
|
+
* ```typescript
|
|
134
|
+
* // Force Web Audio API usage on all devices
|
|
135
|
+
* setWebAudioConfig({ forceWebAudio: true });
|
|
136
|
+
*
|
|
137
|
+
* // Disable Web Audio API entirely
|
|
138
|
+
* setWebAudioConfig({ enabled: false });
|
|
139
|
+
* ```
|
|
140
|
+
*/
|
|
141
|
+
export const setWebAudioConfig = (config: Partial<WebAudioConfig>): void => {
|
|
142
|
+
webAudioConfig = { ...webAudioConfig, ...config };
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Gets the current Web Audio API configuration
|
|
147
|
+
* @returns Current Web Audio API configuration
|
|
148
|
+
* @example
|
|
149
|
+
* ```typescript
|
|
150
|
+
* const config = getWebAudioConfig();
|
|
151
|
+
* console.log(`Web Audio enabled: ${config.enabled}`);
|
|
152
|
+
* ```
|
|
153
|
+
*/
|
|
154
|
+
export const getWebAudioConfig = (): WebAudioConfig => {
|
|
155
|
+
return { ...webAudioConfig };
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Creates or gets an AudioContext for Web Audio API operations
|
|
160
|
+
* @returns AudioContext instance or null if not supported
|
|
161
|
+
* @example
|
|
162
|
+
* ```typescript
|
|
163
|
+
* const context = getAudioContext();
|
|
164
|
+
* if (context) {
|
|
165
|
+
* console.log('Audio context created successfully');
|
|
166
|
+
* }
|
|
167
|
+
* ```
|
|
168
|
+
*/
|
|
169
|
+
export const getAudioContext = (): AudioContext | null => {
|
|
170
|
+
if (!isWebAudioSupported()) return null;
|
|
171
|
+
|
|
172
|
+
try {
|
|
173
|
+
// In Node.js environment (tests), return null to allow mocking
|
|
174
|
+
if (typeof window === 'undefined') {
|
|
175
|
+
return null;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Use existing AudioContext or create new one
|
|
179
|
+
const windowWithWebkit = window as unknown as { webkitAudioContext?: typeof AudioContext };
|
|
180
|
+
const AudioContextClass = window.AudioContext || windowWithWebkit.webkitAudioContext;
|
|
181
|
+
return new AudioContextClass();
|
|
182
|
+
} catch (error) {
|
|
183
|
+
// eslint-disable-next-line no-console
|
|
184
|
+
console.warn('Failed to create AudioContext:', error);
|
|
185
|
+
return null;
|
|
186
|
+
}
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Creates Web Audio API nodes for an audio element
|
|
191
|
+
* @param audioElement - The HTML audio element to create nodes for
|
|
192
|
+
* @param audioContext - The AudioContext to use
|
|
193
|
+
* @returns Web Audio API node set or null if creation fails
|
|
194
|
+
* @example
|
|
195
|
+
* ```typescript
|
|
196
|
+
* const audio = new Audio('song.mp3');
|
|
197
|
+
* const context = getAudioContext();
|
|
198
|
+
* if (context) {
|
|
199
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
200
|
+
* if (nodes) {
|
|
201
|
+
* nodes.gainNode.gain.value = 0.5; // Set volume to 50%
|
|
202
|
+
* }
|
|
203
|
+
* }
|
|
204
|
+
* ```
|
|
205
|
+
*/
|
|
206
|
+
export const createWebAudioNodes = (
|
|
207
|
+
audioElement: HTMLAudioElement,
|
|
208
|
+
audioContext: AudioContext
|
|
209
|
+
): WebAudioNodeSet | null => {
|
|
210
|
+
try {
|
|
211
|
+
// Create media element source node
|
|
212
|
+
const sourceNode = audioContext.createMediaElementSource(audioElement);
|
|
213
|
+
|
|
214
|
+
// Create gain node for volume control
|
|
215
|
+
const gainNode = audioContext.createGain();
|
|
216
|
+
|
|
217
|
+
// Connect source to gain node
|
|
218
|
+
sourceNode.connect(gainNode);
|
|
219
|
+
|
|
220
|
+
// Connect gain node to destination (speakers)
|
|
221
|
+
gainNode.connect(audioContext.destination);
|
|
222
|
+
|
|
223
|
+
return {
|
|
224
|
+
gainNode,
|
|
225
|
+
sourceNode
|
|
226
|
+
};
|
|
227
|
+
} catch (error) {
|
|
228
|
+
// eslint-disable-next-line no-console
|
|
229
|
+
console.warn('Failed to create Web Audio nodes:', error);
|
|
230
|
+
return null;
|
|
231
|
+
}
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Sets volume using Web Audio API gain node
|
|
236
|
+
* @param gainNode - The gain node to set volume on
|
|
237
|
+
* @param volume - Volume level (0-1)
|
|
238
|
+
* @param transitionDuration - Optional transition duration in milliseconds
|
|
239
|
+
* @example
|
|
240
|
+
* ```typescript
|
|
241
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
242
|
+
* if (nodes) {
|
|
243
|
+
* setWebAudioVolume(nodes.gainNode, 0.5); // Set to 50% volume
|
|
244
|
+
* setWebAudioVolume(nodes.gainNode, 0.2, 300); // Fade to 20% over 300ms
|
|
245
|
+
* }
|
|
246
|
+
* ```
|
|
247
|
+
*/
|
|
248
|
+
export const setWebAudioVolume = (
|
|
249
|
+
gainNode: GainNode,
|
|
250
|
+
volume: number,
|
|
251
|
+
transitionDuration?: number
|
|
252
|
+
): void => {
|
|
253
|
+
const clampedVolume = Math.max(0, Math.min(1, volume));
|
|
254
|
+
const currentTime = gainNode.context.currentTime;
|
|
255
|
+
|
|
256
|
+
if (transitionDuration && transitionDuration > 0) {
|
|
257
|
+
// Smooth transition using Web Audio API's built-in scheduling
|
|
258
|
+
gainNode.gain.cancelScheduledValues(currentTime);
|
|
259
|
+
gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime);
|
|
260
|
+
gainNode.gain.linearRampToValueAtTime(clampedVolume, currentTime + transitionDuration / 1000);
|
|
261
|
+
} else {
|
|
262
|
+
// Instant change
|
|
263
|
+
gainNode.gain.cancelScheduledValues(currentTime);
|
|
264
|
+
gainNode.gain.setValueAtTime(clampedVolume, currentTime);
|
|
265
|
+
}
|
|
266
|
+
};
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Gets the current volume from a Web Audio API gain node
|
|
270
|
+
* @param gainNode - The gain node to get volume from
|
|
271
|
+
* @returns Current volume level (0-1)
|
|
272
|
+
* @example
|
|
273
|
+
* ```typescript
|
|
274
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
275
|
+
* if (nodes) {
|
|
276
|
+
* const volume = getWebAudioVolume(nodes.gainNode);
|
|
277
|
+
* console.log(`Current volume: ${volume * 100}%`);
|
|
278
|
+
* }
|
|
279
|
+
* ```
|
|
280
|
+
*/
|
|
281
|
+
export const getWebAudioVolume = (gainNode: GainNode): number => {
|
|
282
|
+
return gainNode.gain.value;
|
|
283
|
+
};
|
|
284
|
+
|
|
285
|
+
/**
|
|
286
|
+
* Resumes an AudioContext if it's in suspended state (required for autoplay policy)
|
|
287
|
+
* @param audioContext - The AudioContext to resume
|
|
288
|
+
* @returns Promise that resolves when context is resumed
|
|
289
|
+
* @example
|
|
290
|
+
* ```typescript
|
|
291
|
+
* const context = getAudioContext();
|
|
292
|
+
* if (context) {
|
|
293
|
+
* await resumeAudioContext(context);
|
|
294
|
+
* }
|
|
295
|
+
* ```
|
|
296
|
+
*/
|
|
297
|
+
export const resumeAudioContext = async (audioContext: AudioContext): Promise<void> => {
|
|
298
|
+
if (audioContext.state === 'suspended') {
|
|
299
|
+
try {
|
|
300
|
+
await audioContext.resume();
|
|
301
|
+
} catch (error) {
|
|
302
|
+
// eslint-disable-next-line no-console
|
|
303
|
+
console.warn('Failed to resume AudioContext:', error);
|
|
304
|
+
// Don't throw - handle gracefully and continue
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
};
|
|
308
|
+
|
|
309
|
+
/**
|
|
310
|
+
* Cleans up Web Audio API nodes and connections
|
|
311
|
+
* @param nodes - The Web Audio API node set to clean up
|
|
312
|
+
* @example
|
|
313
|
+
* ```typescript
|
|
314
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
315
|
+
* if (nodes) {
|
|
316
|
+
* // Use nodes...
|
|
317
|
+
* cleanupWebAudioNodes(nodes); // Clean up when done
|
|
318
|
+
* }
|
|
319
|
+
* ```
|
|
320
|
+
*/
|
|
321
|
+
export const cleanupWebAudioNodes = (nodes: WebAudioNodeSet): void => {
|
|
322
|
+
try {
|
|
323
|
+
// Disconnect all nodes
|
|
324
|
+
nodes.sourceNode.disconnect();
|
|
325
|
+
nodes.gainNode.disconnect();
|
|
326
|
+
} catch (error) {
|
|
327
|
+
// Ignore errors during cleanup
|
|
328
|
+
// eslint-disable-next-line no-console
|
|
329
|
+
console.warn('Error during Web Audio cleanup:', error);
|
|
330
|
+
}
|
|
331
|
+
};
|