audio-channel-queue 1.11.0 → 1.12.1-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +48 -25
- package/dist/core.js +6 -3
- package/dist/errors.d.ts +4 -4
- package/dist/errors.js +12 -51
- package/dist/index.d.ts +4 -3
- package/dist/index.js +19 -5
- package/dist/info.d.ts +20 -15
- package/dist/info.js +20 -15
- package/dist/types.d.ts +103 -24
- package/dist/types.js +17 -4
- package/dist/volume.d.ts +15 -14
- package/dist/volume.js +131 -33
- package/dist/web-audio.d.ts +156 -0
- package/dist/web-audio.js +327 -0
- package/package.json +1 -1
- package/src/core.ts +14 -4
- package/src/errors.ts +15 -64
- package/src/index.ts +31 -6
- package/src/info.ts +20 -15
- package/src/types.ts +107 -24
- package/src/volume.ts +158 -36
- package/src/web-audio.ts +331 -0
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Web Audio API support for enhanced volume control on iOS and other platforms
|
|
3
|
+
*/
|
|
4
|
+
import { WebAudioConfig, WebAudioSupport, WebAudioNodeSet } from './types';
|
|
5
|
+
/**
|
|
6
|
+
* Detects if the current device is iOS
|
|
7
|
+
* @returns True if the device is iOS, false otherwise
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* if (isIOSDevice()) {
|
|
11
|
+
* console.log('Running on iOS device');
|
|
12
|
+
* }
|
|
13
|
+
* ```
|
|
14
|
+
*/
|
|
15
|
+
export declare const isIOSDevice: () => boolean;
|
|
16
|
+
/**
|
|
17
|
+
* Checks if Web Audio API is available in the current environment
|
|
18
|
+
* @returns True if Web Audio API is supported, false otherwise
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* if (isWebAudioSupported()) {
|
|
22
|
+
* console.log('Web Audio API is available');
|
|
23
|
+
* }
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
export declare const isWebAudioSupported: () => boolean;
|
|
27
|
+
/**
|
|
28
|
+
* Determines if Web Audio API should be used based on configuration and device detection
|
|
29
|
+
* @returns True if Web Audio API should be used, false otherwise
|
|
30
|
+
* @example
|
|
31
|
+
* ```typescript
|
|
32
|
+
* if (shouldUseWebAudio()) {
|
|
33
|
+
* // Use Web Audio API for volume control
|
|
34
|
+
* }
|
|
35
|
+
* ```
|
|
36
|
+
*/
|
|
37
|
+
export declare const shouldUseWebAudio: () => boolean;
|
|
38
|
+
/**
|
|
39
|
+
* Gets information about Web Audio API support and usage
|
|
40
|
+
* @returns Object containing Web Audio API support information
|
|
41
|
+
* @example
|
|
42
|
+
* ```typescript
|
|
43
|
+
* const support = getWebAudioSupport();
|
|
44
|
+
* console.log(`Using Web Audio: ${support.usingWebAudio}`);
|
|
45
|
+
* console.log(`Reason: ${support.reason}`);
|
|
46
|
+
* ```
|
|
47
|
+
*/
|
|
48
|
+
export declare const getWebAudioSupport: () => WebAudioSupport;
|
|
49
|
+
/**
|
|
50
|
+
* Configures Web Audio API usage
|
|
51
|
+
* @param config - Configuration options for Web Audio API
|
|
52
|
+
* @example
|
|
53
|
+
* ```typescript
|
|
54
|
+
* // Force Web Audio API usage on all devices
|
|
55
|
+
* setWebAudioConfig({ forceWebAudio: true });
|
|
56
|
+
*
|
|
57
|
+
* // Disable Web Audio API entirely
|
|
58
|
+
* setWebAudioConfig({ enabled: false });
|
|
59
|
+
* ```
|
|
60
|
+
*/
|
|
61
|
+
export declare const setWebAudioConfig: (config: Partial<WebAudioConfig>) => void;
|
|
62
|
+
/**
|
|
63
|
+
* Gets the current Web Audio API configuration
|
|
64
|
+
* @returns Current Web Audio API configuration
|
|
65
|
+
* @example
|
|
66
|
+
* ```typescript
|
|
67
|
+
* const config = getWebAudioConfig();
|
|
68
|
+
* console.log(`Web Audio enabled: ${config.enabled}`);
|
|
69
|
+
* ```
|
|
70
|
+
*/
|
|
71
|
+
export declare const getWebAudioConfig: () => WebAudioConfig;
|
|
72
|
+
/**
|
|
73
|
+
* Creates or gets an AudioContext for Web Audio API operations
|
|
74
|
+
* @returns AudioContext instance or null if not supported
|
|
75
|
+
* @example
|
|
76
|
+
* ```typescript
|
|
77
|
+
* const context = getAudioContext();
|
|
78
|
+
* if (context) {
|
|
79
|
+
* console.log('Audio context created successfully');
|
|
80
|
+
* }
|
|
81
|
+
* ```
|
|
82
|
+
*/
|
|
83
|
+
export declare const getAudioContext: () => AudioContext | null;
|
|
84
|
+
/**
|
|
85
|
+
* Creates Web Audio API nodes for an audio element
|
|
86
|
+
* @param audioElement - The HTML audio element to create nodes for
|
|
87
|
+
* @param audioContext - The AudioContext to use
|
|
88
|
+
* @returns Web Audio API node set or null if creation fails
|
|
89
|
+
* @example
|
|
90
|
+
* ```typescript
|
|
91
|
+
* const audio = new Audio('song.mp3');
|
|
92
|
+
* const context = getAudioContext();
|
|
93
|
+
* if (context) {
|
|
94
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
95
|
+
* if (nodes) {
|
|
96
|
+
* nodes.gainNode.gain.value = 0.5; // Set volume to 50%
|
|
97
|
+
* }
|
|
98
|
+
* }
|
|
99
|
+
* ```
|
|
100
|
+
*/
|
|
101
|
+
export declare const createWebAudioNodes: (audioElement: HTMLAudioElement, audioContext: AudioContext) => WebAudioNodeSet | null;
|
|
102
|
+
/**
|
|
103
|
+
* Sets volume using Web Audio API gain node
|
|
104
|
+
* @param gainNode - The gain node to set volume on
|
|
105
|
+
* @param volume - Volume level (0-1)
|
|
106
|
+
* @param transitionDuration - Optional transition duration in milliseconds
|
|
107
|
+
* @example
|
|
108
|
+
* ```typescript
|
|
109
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
110
|
+
* if (nodes) {
|
|
111
|
+
* setWebAudioVolume(nodes.gainNode, 0.5); // Set to 50% volume
|
|
112
|
+
* setWebAudioVolume(nodes.gainNode, 0.2, 300); // Fade to 20% over 300ms
|
|
113
|
+
* }
|
|
114
|
+
* ```
|
|
115
|
+
*/
|
|
116
|
+
export declare const setWebAudioVolume: (gainNode: GainNode, volume: number, transitionDuration?: number) => void;
|
|
117
|
+
/**
|
|
118
|
+
* Gets the current volume from a Web Audio API gain node
|
|
119
|
+
* @param gainNode - The gain node to get volume from
|
|
120
|
+
* @returns Current volume level (0-1)
|
|
121
|
+
* @example
|
|
122
|
+
* ```typescript
|
|
123
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
124
|
+
* if (nodes) {
|
|
125
|
+
* const volume = getWebAudioVolume(nodes.gainNode);
|
|
126
|
+
* console.log(`Current volume: ${volume * 100}%`);
|
|
127
|
+
* }
|
|
128
|
+
* ```
|
|
129
|
+
*/
|
|
130
|
+
export declare const getWebAudioVolume: (gainNode: GainNode) => number;
|
|
131
|
+
/**
|
|
132
|
+
* Resumes an AudioContext if it's in suspended state (required for autoplay policy)
|
|
133
|
+
* @param audioContext - The AudioContext to resume
|
|
134
|
+
* @returns Promise that resolves when context is resumed
|
|
135
|
+
* @example
|
|
136
|
+
* ```typescript
|
|
137
|
+
* const context = getAudioContext();
|
|
138
|
+
* if (context) {
|
|
139
|
+
* await resumeAudioContext(context);
|
|
140
|
+
* }
|
|
141
|
+
* ```
|
|
142
|
+
*/
|
|
143
|
+
export declare const resumeAudioContext: (audioContext: AudioContext) => Promise<void>;
|
|
144
|
+
/**
|
|
145
|
+
* Cleans up Web Audio API nodes and connections
|
|
146
|
+
* @param nodes - The Web Audio API node set to clean up
|
|
147
|
+
* @example
|
|
148
|
+
* ```typescript
|
|
149
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
150
|
+
* if (nodes) {
|
|
151
|
+
* // Use nodes...
|
|
152
|
+
* cleanupWebAudioNodes(nodes); // Clean up when done
|
|
153
|
+
* }
|
|
154
|
+
* ```
|
|
155
|
+
*/
|
|
156
|
+
export declare const cleanupWebAudioNodes: (nodes: WebAudioNodeSet) => void;
|
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* @fileoverview Web Audio API support for enhanced volume control on iOS and other platforms
|
|
4
|
+
*/
|
|
5
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
6
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
7
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
8
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
9
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
10
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
11
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
12
|
+
});
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.cleanupWebAudioNodes = exports.resumeAudioContext = exports.getWebAudioVolume = exports.setWebAudioVolume = exports.createWebAudioNodes = exports.getAudioContext = exports.getWebAudioConfig = exports.setWebAudioConfig = exports.getWebAudioSupport = exports.shouldUseWebAudio = exports.isWebAudioSupported = exports.isIOSDevice = void 0;
|
|
16
|
+
/**
|
|
17
|
+
* Global Web Audio API configuration
|
|
18
|
+
*/
|
|
19
|
+
let webAudioConfig = {
|
|
20
|
+
autoDetectIOS: true,
|
|
21
|
+
enabled: true,
|
|
22
|
+
forceWebAudio: false
|
|
23
|
+
};
|
|
24
|
+
/**
|
|
25
|
+
* Detects if the current device is iOS
|
|
26
|
+
* @returns True if the device is iOS, false otherwise
|
|
27
|
+
* @example
|
|
28
|
+
* ```typescript
|
|
29
|
+
* if (isIOSDevice()) {
|
|
30
|
+
* console.log('Running on iOS device');
|
|
31
|
+
* }
|
|
32
|
+
* ```
|
|
33
|
+
*/
|
|
34
|
+
const isIOSDevice = () => {
|
|
35
|
+
if (typeof navigator === 'undefined')
|
|
36
|
+
return false;
|
|
37
|
+
// Modern approach using User-Agent Client Hints API
|
|
38
|
+
const navWithUA = navigator;
|
|
39
|
+
if ('userAgentData' in navigator && navWithUA.userAgentData) {
|
|
40
|
+
return navWithUA.userAgentData.platform === 'iOS';
|
|
41
|
+
}
|
|
42
|
+
// Fallback to userAgent string parsing
|
|
43
|
+
const userAgent = navigator.userAgent || '';
|
|
44
|
+
const isIOS = /iPad|iPhone|iPod/.test(userAgent);
|
|
45
|
+
// Additional check for modern iPads that report as Mac
|
|
46
|
+
const isMacWithTouch = /Macintosh/.test(userAgent) && 'maxTouchPoints' in navigator && navigator.maxTouchPoints > 1;
|
|
47
|
+
return isIOS || isMacWithTouch;
|
|
48
|
+
};
|
|
49
|
+
exports.isIOSDevice = isIOSDevice;
|
|
50
|
+
/**
|
|
51
|
+
* Checks if Web Audio API is available in the current environment
|
|
52
|
+
* @returns True if Web Audio API is supported, false otherwise
|
|
53
|
+
* @example
|
|
54
|
+
* ```typescript
|
|
55
|
+
* if (isWebAudioSupported()) {
|
|
56
|
+
* console.log('Web Audio API is available');
|
|
57
|
+
* }
|
|
58
|
+
* ```
|
|
59
|
+
*/
|
|
60
|
+
const isWebAudioSupported = () => {
|
|
61
|
+
if (typeof window === 'undefined') {
|
|
62
|
+
// In Node.js environment (tests), check if Web Audio API globals are available
|
|
63
|
+
const globalThis = global;
|
|
64
|
+
return (typeof globalThis.AudioContext !== 'undefined' ||
|
|
65
|
+
typeof globalThis.webkitAudioContext !== 'undefined');
|
|
66
|
+
}
|
|
67
|
+
const windowWithWebkit = window;
|
|
68
|
+
return (typeof AudioContext !== 'undefined' ||
|
|
69
|
+
typeof windowWithWebkit.webkitAudioContext !== 'undefined');
|
|
70
|
+
};
|
|
71
|
+
exports.isWebAudioSupported = isWebAudioSupported;
|
|
72
|
+
/**
|
|
73
|
+
* Determines if Web Audio API should be used based on configuration and device detection
|
|
74
|
+
* @returns True if Web Audio API should be used, false otherwise
|
|
75
|
+
* @example
|
|
76
|
+
* ```typescript
|
|
77
|
+
* if (shouldUseWebAudio()) {
|
|
78
|
+
* // Use Web Audio API for volume control
|
|
79
|
+
* }
|
|
80
|
+
* ```
|
|
81
|
+
*/
|
|
82
|
+
const shouldUseWebAudio = () => {
|
|
83
|
+
if (!webAudioConfig.enabled)
|
|
84
|
+
return false;
|
|
85
|
+
if (!(0, exports.isWebAudioSupported)())
|
|
86
|
+
return false;
|
|
87
|
+
if (webAudioConfig.forceWebAudio)
|
|
88
|
+
return true;
|
|
89
|
+
if (webAudioConfig.autoDetectIOS && (0, exports.isIOSDevice)())
|
|
90
|
+
return true;
|
|
91
|
+
return false;
|
|
92
|
+
};
|
|
93
|
+
exports.shouldUseWebAudio = shouldUseWebAudio;
|
|
94
|
+
/**
|
|
95
|
+
* Gets information about Web Audio API support and usage
|
|
96
|
+
* @returns Object containing Web Audio API support information
|
|
97
|
+
* @example
|
|
98
|
+
* ```typescript
|
|
99
|
+
* const support = getWebAudioSupport();
|
|
100
|
+
* console.log(`Using Web Audio: ${support.usingWebAudio}`);
|
|
101
|
+
* console.log(`Reason: ${support.reason}`);
|
|
102
|
+
* ```
|
|
103
|
+
*/
|
|
104
|
+
const getWebAudioSupport = () => {
|
|
105
|
+
const available = (0, exports.isWebAudioSupported)();
|
|
106
|
+
const isIOS = (0, exports.isIOSDevice)();
|
|
107
|
+
const usingWebAudio = (0, exports.shouldUseWebAudio)();
|
|
108
|
+
let reason = '';
|
|
109
|
+
if (!webAudioConfig.enabled) {
|
|
110
|
+
reason = 'Web Audio API disabled in configuration';
|
|
111
|
+
}
|
|
112
|
+
else if (!available) {
|
|
113
|
+
reason = 'Web Audio API not supported in this environment';
|
|
114
|
+
}
|
|
115
|
+
else if (webAudioConfig.forceWebAudio) {
|
|
116
|
+
reason = 'Web Audio API forced via configuration';
|
|
117
|
+
}
|
|
118
|
+
else if (isIOS && webAudioConfig.autoDetectIOS) {
|
|
119
|
+
reason = 'iOS device detected - using Web Audio API for volume control';
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
reason = 'Using standard HTMLAudioElement volume control';
|
|
123
|
+
}
|
|
124
|
+
return {
|
|
125
|
+
available,
|
|
126
|
+
isIOS,
|
|
127
|
+
reason,
|
|
128
|
+
usingWebAudio
|
|
129
|
+
};
|
|
130
|
+
};
|
|
131
|
+
exports.getWebAudioSupport = getWebAudioSupport;
|
|
132
|
+
/**
|
|
133
|
+
* Configures Web Audio API usage
|
|
134
|
+
* @param config - Configuration options for Web Audio API
|
|
135
|
+
* @example
|
|
136
|
+
* ```typescript
|
|
137
|
+
* // Force Web Audio API usage on all devices
|
|
138
|
+
* setWebAudioConfig({ forceWebAudio: true });
|
|
139
|
+
*
|
|
140
|
+
* // Disable Web Audio API entirely
|
|
141
|
+
* setWebAudioConfig({ enabled: false });
|
|
142
|
+
* ```
|
|
143
|
+
*/
|
|
144
|
+
const setWebAudioConfig = (config) => {
|
|
145
|
+
webAudioConfig = Object.assign(Object.assign({}, webAudioConfig), config);
|
|
146
|
+
};
|
|
147
|
+
exports.setWebAudioConfig = setWebAudioConfig;
|
|
148
|
+
/**
|
|
149
|
+
* Gets the current Web Audio API configuration
|
|
150
|
+
* @returns Current Web Audio API configuration
|
|
151
|
+
* @example
|
|
152
|
+
* ```typescript
|
|
153
|
+
* const config = getWebAudioConfig();
|
|
154
|
+
* console.log(`Web Audio enabled: ${config.enabled}`);
|
|
155
|
+
* ```
|
|
156
|
+
*/
|
|
157
|
+
const getWebAudioConfig = () => {
|
|
158
|
+
return Object.assign({}, webAudioConfig);
|
|
159
|
+
};
|
|
160
|
+
exports.getWebAudioConfig = getWebAudioConfig;
|
|
161
|
+
/**
|
|
162
|
+
* Creates or gets an AudioContext for Web Audio API operations
|
|
163
|
+
* @returns AudioContext instance or null if not supported
|
|
164
|
+
* @example
|
|
165
|
+
* ```typescript
|
|
166
|
+
* const context = getAudioContext();
|
|
167
|
+
* if (context) {
|
|
168
|
+
* console.log('Audio context created successfully');
|
|
169
|
+
* }
|
|
170
|
+
* ```
|
|
171
|
+
*/
|
|
172
|
+
const getAudioContext = () => {
|
|
173
|
+
if (!(0, exports.isWebAudioSupported)())
|
|
174
|
+
return null;
|
|
175
|
+
try {
|
|
176
|
+
// In Node.js environment (tests), return null to allow mocking
|
|
177
|
+
if (typeof window === 'undefined') {
|
|
178
|
+
return null;
|
|
179
|
+
}
|
|
180
|
+
// Use existing AudioContext or create new one
|
|
181
|
+
const windowWithWebkit = window;
|
|
182
|
+
const AudioContextClass = window.AudioContext || windowWithWebkit.webkitAudioContext;
|
|
183
|
+
return new AudioContextClass();
|
|
184
|
+
}
|
|
185
|
+
catch (error) {
|
|
186
|
+
// eslint-disable-next-line no-console
|
|
187
|
+
console.warn('Failed to create AudioContext:', error);
|
|
188
|
+
return null;
|
|
189
|
+
}
|
|
190
|
+
};
|
|
191
|
+
exports.getAudioContext = getAudioContext;
|
|
192
|
+
/**
|
|
193
|
+
* Creates Web Audio API nodes for an audio element
|
|
194
|
+
* @param audioElement - The HTML audio element to create nodes for
|
|
195
|
+
* @param audioContext - The AudioContext to use
|
|
196
|
+
* @returns Web Audio API node set or null if creation fails
|
|
197
|
+
* @example
|
|
198
|
+
* ```typescript
|
|
199
|
+
* const audio = new Audio('song.mp3');
|
|
200
|
+
* const context = getAudioContext();
|
|
201
|
+
* if (context) {
|
|
202
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
203
|
+
* if (nodes) {
|
|
204
|
+
* nodes.gainNode.gain.value = 0.5; // Set volume to 50%
|
|
205
|
+
* }
|
|
206
|
+
* }
|
|
207
|
+
* ```
|
|
208
|
+
*/
|
|
209
|
+
const createWebAudioNodes = (audioElement, audioContext) => {
|
|
210
|
+
try {
|
|
211
|
+
// Create media element source node
|
|
212
|
+
const sourceNode = audioContext.createMediaElementSource(audioElement);
|
|
213
|
+
// Create gain node for volume control
|
|
214
|
+
const gainNode = audioContext.createGain();
|
|
215
|
+
// Connect source to gain node
|
|
216
|
+
sourceNode.connect(gainNode);
|
|
217
|
+
// Connect gain node to destination (speakers)
|
|
218
|
+
gainNode.connect(audioContext.destination);
|
|
219
|
+
return {
|
|
220
|
+
gainNode,
|
|
221
|
+
sourceNode
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
catch (error) {
|
|
225
|
+
// eslint-disable-next-line no-console
|
|
226
|
+
console.warn('Failed to create Web Audio nodes:', error);
|
|
227
|
+
return null;
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
exports.createWebAudioNodes = createWebAudioNodes;
|
|
231
|
+
/**
|
|
232
|
+
* Sets volume using Web Audio API gain node
|
|
233
|
+
* @param gainNode - The gain node to set volume on
|
|
234
|
+
* @param volume - Volume level (0-1)
|
|
235
|
+
* @param transitionDuration - Optional transition duration in milliseconds
|
|
236
|
+
* @example
|
|
237
|
+
* ```typescript
|
|
238
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
239
|
+
* if (nodes) {
|
|
240
|
+
* setWebAudioVolume(nodes.gainNode, 0.5); // Set to 50% volume
|
|
241
|
+
* setWebAudioVolume(nodes.gainNode, 0.2, 300); // Fade to 20% over 300ms
|
|
242
|
+
* }
|
|
243
|
+
* ```
|
|
244
|
+
*/
|
|
245
|
+
const setWebAudioVolume = (gainNode, volume, transitionDuration) => {
|
|
246
|
+
const clampedVolume = Math.max(0, Math.min(1, volume));
|
|
247
|
+
const currentTime = gainNode.context.currentTime;
|
|
248
|
+
if (transitionDuration && transitionDuration > 0) {
|
|
249
|
+
// Smooth transition using Web Audio API's built-in scheduling
|
|
250
|
+
gainNode.gain.cancelScheduledValues(currentTime);
|
|
251
|
+
gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime);
|
|
252
|
+
gainNode.gain.linearRampToValueAtTime(clampedVolume, currentTime + transitionDuration / 1000);
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
// Instant change
|
|
256
|
+
gainNode.gain.cancelScheduledValues(currentTime);
|
|
257
|
+
gainNode.gain.setValueAtTime(clampedVolume, currentTime);
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
exports.setWebAudioVolume = setWebAudioVolume;
|
|
261
|
+
/**
|
|
262
|
+
* Gets the current volume from a Web Audio API gain node
|
|
263
|
+
* @param gainNode - The gain node to get volume from
|
|
264
|
+
* @returns Current volume level (0-1)
|
|
265
|
+
* @example
|
|
266
|
+
* ```typescript
|
|
267
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
268
|
+
* if (nodes) {
|
|
269
|
+
* const volume = getWebAudioVolume(nodes.gainNode);
|
|
270
|
+
* console.log(`Current volume: ${volume * 100}%`);
|
|
271
|
+
* }
|
|
272
|
+
* ```
|
|
273
|
+
*/
|
|
274
|
+
const getWebAudioVolume = (gainNode) => {
|
|
275
|
+
return gainNode.gain.value;
|
|
276
|
+
};
|
|
277
|
+
exports.getWebAudioVolume = getWebAudioVolume;
|
|
278
|
+
/**
|
|
279
|
+
* Resumes an AudioContext if it's in suspended state (required for autoplay policy)
|
|
280
|
+
* @param audioContext - The AudioContext to resume
|
|
281
|
+
* @returns Promise that resolves when context is resumed
|
|
282
|
+
* @example
|
|
283
|
+
* ```typescript
|
|
284
|
+
* const context = getAudioContext();
|
|
285
|
+
* if (context) {
|
|
286
|
+
* await resumeAudioContext(context);
|
|
287
|
+
* }
|
|
288
|
+
* ```
|
|
289
|
+
*/
|
|
290
|
+
const resumeAudioContext = (audioContext) => __awaiter(void 0, void 0, void 0, function* () {
|
|
291
|
+
if (audioContext.state === 'suspended') {
|
|
292
|
+
try {
|
|
293
|
+
yield audioContext.resume();
|
|
294
|
+
}
|
|
295
|
+
catch (error) {
|
|
296
|
+
// eslint-disable-next-line no-console
|
|
297
|
+
console.warn('Failed to resume AudioContext:', error);
|
|
298
|
+
// Don't throw - handle gracefully and continue
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
});
|
|
302
|
+
exports.resumeAudioContext = resumeAudioContext;
|
|
303
|
+
/**
|
|
304
|
+
* Cleans up Web Audio API nodes and connections
|
|
305
|
+
* @param nodes - The Web Audio API node set to clean up
|
|
306
|
+
* @example
|
|
307
|
+
* ```typescript
|
|
308
|
+
* const nodes = createWebAudioNodes(audio, context);
|
|
309
|
+
* if (nodes) {
|
|
310
|
+
* // Use nodes...
|
|
311
|
+
* cleanupWebAudioNodes(nodes); // Clean up when done
|
|
312
|
+
* }
|
|
313
|
+
* ```
|
|
314
|
+
*/
|
|
315
|
+
const cleanupWebAudioNodes = (nodes) => {
|
|
316
|
+
try {
|
|
317
|
+
// Disconnect all nodes
|
|
318
|
+
nodes.sourceNode.disconnect();
|
|
319
|
+
nodes.gainNode.disconnect();
|
|
320
|
+
}
|
|
321
|
+
catch (error) {
|
|
322
|
+
// Ignore errors during cleanup
|
|
323
|
+
// eslint-disable-next-line no-console
|
|
324
|
+
console.warn('Error during Web Audio cleanup:', error);
|
|
325
|
+
}
|
|
326
|
+
};
|
|
327
|
+
exports.cleanupWebAudioNodes = cleanupWebAudioNodes;
|
package/package.json
CHANGED
package/src/core.ts
CHANGED
|
@@ -12,7 +12,13 @@ import {
|
|
|
12
12
|
setupProgressTracking,
|
|
13
13
|
cleanupProgressTracking
|
|
14
14
|
} from './events';
|
|
15
|
-
import {
|
|
15
|
+
import {
|
|
16
|
+
applyVolumeDucking,
|
|
17
|
+
restoreVolumeLevels,
|
|
18
|
+
cancelVolumeTransition,
|
|
19
|
+
initializeWebAudioForAudio,
|
|
20
|
+
cleanupWebAudioForAudio
|
|
21
|
+
} from './volume';
|
|
16
22
|
import { setupAudioErrorHandling, handleAudioError } from './errors';
|
|
17
23
|
|
|
18
24
|
/**
|
|
@@ -283,6 +289,9 @@ export const queueAudio = async (
|
|
|
283
289
|
await handleAudioError(audio, channelNumber, validatedUrl, error);
|
|
284
290
|
});
|
|
285
291
|
|
|
292
|
+
// Initialize Web Audio API support if needed
|
|
293
|
+
await initializeWebAudioForAudio(audio, channelNumber);
|
|
294
|
+
|
|
286
295
|
// Apply options if provided
|
|
287
296
|
if (options) {
|
|
288
297
|
if (typeof options.loop === 'boolean') {
|
|
@@ -300,10 +309,10 @@ export const queueAudio = async (
|
|
|
300
309
|
}
|
|
301
310
|
}
|
|
302
311
|
|
|
303
|
-
// Handle
|
|
304
|
-
const shouldAddToFront = options?.addToFront
|
|
312
|
+
// Handle addToFront option
|
|
313
|
+
const shouldAddToFront = options?.addToFront;
|
|
305
314
|
|
|
306
|
-
// Add to queue based on
|
|
315
|
+
// Add to queue based on addToFront option
|
|
307
316
|
if (shouldAddToFront && channel.queue.length > 0) {
|
|
308
317
|
// Insert after currently playing track (at index 1)
|
|
309
318
|
channel.queue.splice(1, 0, audio);
|
|
@@ -440,6 +449,7 @@ export const playAudioQueue = async (channelNumber: number): Promise<void> => {
|
|
|
440
449
|
// For non-looping audio, remove from queue and play next
|
|
441
450
|
currentAudio.pause();
|
|
442
451
|
cleanupProgressTracking(currentAudio, channelNumber, audioChannels);
|
|
452
|
+
cleanupWebAudioForAudio(currentAudio, channelNumber);
|
|
443
453
|
channel.queue.shift();
|
|
444
454
|
channel.isPaused = false; // Reset pause state
|
|
445
455
|
|