@aippy/runtime 0.2.0-dev.1 → 0.2.0-dev.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/AudioSilenceDetector.d.ts +52 -0
- package/dist/audio/index.d.ts +1 -1
- package/dist/audio/index.js +1 -1
- package/dist/audio/patchAudioContext.d.ts +7 -0
- package/dist/audio/types.d.ts +31 -0
- package/dist/audio/useAudioContext.d.ts +3 -1
- package/dist/core/index.js +1 -1
- package/dist/index/index.js +1 -1
- package/dist/useAudioContext-BrvG8DKk.js +345 -0
- package/package.json +2 -1
- package/dist/useAudioContext-DSbHyklm.js +0 -186
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { AutoPauseOptions } from './types';
|
|
2
|
+
/**
|
|
3
|
+
* Detects silence in an audio stream and automatically pauses/resumes media element
|
|
4
|
+
* to prevent iOS Safari audio buffer looping bug
|
|
5
|
+
*/
|
|
6
|
+
export declare class AudioSilenceDetector {
|
|
7
|
+
private audioContext;
|
|
8
|
+
private mediaElement;
|
|
9
|
+
private analyser;
|
|
10
|
+
private dataArray;
|
|
11
|
+
private rafId;
|
|
12
|
+
private silenceStartTime;
|
|
13
|
+
private isPaused;
|
|
14
|
+
private lastCheckTime;
|
|
15
|
+
private readonly silenceThreshold;
|
|
16
|
+
private readonly silenceDuration;
|
|
17
|
+
private readonly checkInterval;
|
|
18
|
+
private readonly debug;
|
|
19
|
+
constructor(audioContext: AudioContext, mediaElement: HTMLMediaElement, options: Required<AutoPauseOptions>, debug?: boolean);
|
|
20
|
+
/**
|
|
21
|
+
* Connect the detector to the audio stream
|
|
22
|
+
*/
|
|
23
|
+
connect(source: AudioNode): void;
|
|
24
|
+
/**
|
|
25
|
+
* Start monitoring audio levels
|
|
26
|
+
*/
|
|
27
|
+
start(): void;
|
|
28
|
+
/**
|
|
29
|
+
* Stop monitoring
|
|
30
|
+
*/
|
|
31
|
+
stop(): void;
|
|
32
|
+
/**
|
|
33
|
+
* Cleanup resources
|
|
34
|
+
*/
|
|
35
|
+
dispose(): void;
|
|
36
|
+
/**
|
|
37
|
+
* Check audio levels and pause/resume as needed
|
|
38
|
+
*/
|
|
39
|
+
private check;
|
|
40
|
+
/**
|
|
41
|
+
* Get current audio level (0-1)
|
|
42
|
+
*/
|
|
43
|
+
private getAudioLevel;
|
|
44
|
+
/**
|
|
45
|
+
* Pause media element to stop audio output
|
|
46
|
+
*/
|
|
47
|
+
private pauseMedia;
|
|
48
|
+
/**
|
|
49
|
+
* Resume media element playback
|
|
50
|
+
*/
|
|
51
|
+
private resumeMedia;
|
|
52
|
+
}
|
package/dist/audio/index.d.ts
CHANGED
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
* @module @aippy/runtime/audio
|
|
8
8
|
*/
|
|
9
9
|
export { patchAudioContext } from './patchAudioContext';
|
|
10
|
-
export type { AudioContextPatchOptions, MediaElementType, PatchedAudioContext, } from './types';
|
|
10
|
+
export type { AudioContextPatchOptions, AutoPauseOptions, MediaElementType, PatchedAudioContext, } from './types';
|
|
11
11
|
export { createHiddenMediaElement, createHiddenVideoElement, isIOSDevice, isMediaStreamAudioSupported, } from './utils';
|
|
12
12
|
export { useAudioContext } from './useAudioContext';
|
|
13
13
|
export type { UseAudioContextOptions, UseAudioContextReturn } from './useAudioContext';
|
package/dist/audio/index.js
CHANGED
|
@@ -6,6 +6,12 @@ import { AudioContextPatchOptions, PatchedAudioContext } from './types';
|
|
|
6
6
|
* This function routes audio through a MediaStreamAudioDestinationNode
|
|
7
7
|
* connected to a hidden video element, which bypasses the restriction.
|
|
8
8
|
*
|
|
9
|
+
* Features:
|
|
10
|
+
* - Auto-pause: Automatically pauses MediaElement when audio becomes silent
|
|
11
|
+
* to prevent iOS Safari audio buffer looping bug (~50ms latency)
|
|
12
|
+
* - Auto-resume: Automatically resumes playback when new audio is detected
|
|
13
|
+
* - Zero user code changes: Use standard Web Audio API after unlock
|
|
14
|
+
*
|
|
9
15
|
* On non-iOS devices, this function returns the original context with
|
|
10
16
|
* minimal modifications (zero overhead).
|
|
11
17
|
*
|
|
@@ -25,6 +31,7 @@ import { AudioContextPatchOptions, PatchedAudioContext } from './types';
|
|
|
25
31
|
* const osc = patchedCtx.createOscillator();
|
|
26
32
|
* osc.connect(patchedCtx.destination); // Auto-routed on iOS
|
|
27
33
|
* osc.start();
|
|
34
|
+
* osc.stop(); // Audio will auto-stop ~50ms after silence detected
|
|
28
35
|
* ```
|
|
29
36
|
*/
|
|
30
37
|
export declare function patchAudioContext(audioContext: AudioContext, options?: AudioContextPatchOptions): PatchedAudioContext;
|
package/dist/audio/types.d.ts
CHANGED
|
@@ -2,6 +2,31 @@
|
|
|
2
2
|
* Media element type for MediaStream playback
|
|
3
3
|
*/
|
|
4
4
|
export type MediaElementType = 'video' | 'audio';
|
|
5
|
+
/**
|
|
6
|
+
* Options for automatic pause/resume behavior
|
|
7
|
+
*/
|
|
8
|
+
export interface AutoPauseOptions {
|
|
9
|
+
/**
|
|
10
|
+
* Enable automatic pause when audio becomes silent
|
|
11
|
+
* @default true
|
|
12
|
+
*/
|
|
13
|
+
enabled?: boolean;
|
|
14
|
+
/**
|
|
15
|
+
* Silence threshold (0-1). Audio below this level is considered silent
|
|
16
|
+
* @default 0.001
|
|
17
|
+
*/
|
|
18
|
+
silenceThreshold?: number;
|
|
19
|
+
/**
|
|
20
|
+
* Duration of silence (ms) before triggering auto-pause
|
|
21
|
+
* @default 50
|
|
22
|
+
*/
|
|
23
|
+
silenceDuration?: number;
|
|
24
|
+
/**
|
|
25
|
+
* Interval (ms) for checking audio levels
|
|
26
|
+
* @default 16
|
|
27
|
+
*/
|
|
28
|
+
checkInterval?: number;
|
|
29
|
+
}
|
|
5
30
|
/**
|
|
6
31
|
* Options for patching AudioContext
|
|
7
32
|
*/
|
|
@@ -27,6 +52,12 @@ export interface AudioContextPatchOptions {
|
|
|
27
52
|
* @default 'video'
|
|
28
53
|
*/
|
|
29
54
|
mediaElementType?: MediaElementType;
|
|
55
|
+
/**
|
|
56
|
+
* Auto-pause configuration to prevent audio buffer looping on iOS
|
|
57
|
+
* When enabled, automatically pauses MediaElement when audio becomes silent
|
|
58
|
+
* @default { enabled: true, silenceThreshold: 0.001, silenceDuration: 50, checkInterval: 16 }
|
|
59
|
+
*/
|
|
60
|
+
autoPause?: AutoPauseOptions;
|
|
30
61
|
}
|
|
31
62
|
/**
|
|
32
63
|
* Extended AudioContext with iOS compatibility features
|
|
@@ -31,6 +31,7 @@ export interface UseAudioContextReturn {
|
|
|
31
31
|
*
|
|
32
32
|
* Automatically handles:
|
|
33
33
|
* - AudioContext creation and patching
|
|
34
|
+
* - Auto-pause/resume to prevent iOS audio buffer looping (~50ms latency)
|
|
34
35
|
* - Cleanup on unmount
|
|
35
36
|
* - Optional auto-unlock on first user interaction
|
|
36
37
|
*
|
|
@@ -52,11 +53,12 @@ export interface UseAudioContextReturn {
|
|
|
52
53
|
* await unlock();
|
|
53
54
|
* }
|
|
54
55
|
*
|
|
55
|
-
* // Play sound using Web Audio API
|
|
56
|
+
* // Play sound using Web Audio API - just use standard API!
|
|
56
57
|
* const osc = audioContext.createOscillator();
|
|
57
58
|
* osc.connect(audioContext.destination);
|
|
58
59
|
* osc.start();
|
|
59
60
|
* osc.stop(audioContext.currentTime + 0.3);
|
|
61
|
+
* // Audio will auto-stop ~50ms after silence detected
|
|
60
62
|
* };
|
|
61
63
|
*
|
|
62
64
|
* return <button onClick={playSound}>Play Sound</button>;
|
package/dist/core/index.js
CHANGED
package/dist/index/index.js
CHANGED
|
@@ -3,7 +3,7 @@ import { A, E, c } from "../errors-DAz5_jDJ.js";
|
|
|
3
3
|
import { CameraAPI, FileSystemAPI, GeolocationAPI, SensorsAPI, camera, fileSystem, geolocation, sensors, vibrate } from "../device/index.js";
|
|
4
4
|
import { c as c2, a, P, b, p, d } from "../pwa-BkviTQoN.js";
|
|
5
5
|
import { a as a2, b as b2 } from "../useTweaks-mK5PAWOs.js";
|
|
6
|
-
import { c as c3, a as a3, i, b as b3, p as p2, u } from "../useAudioContext-
|
|
6
|
+
import { c as c3, a as a3, i, b as b3, p as p2, u } from "../useAudioContext-BrvG8DKk.js";
|
|
7
7
|
export {
|
|
8
8
|
A as AippyRuntimeError,
|
|
9
9
|
CameraAPI,
|
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
import { useState, useRef, useEffect } from "react";
|
|
2
|
+
function isIOSDevice() {
|
|
3
|
+
const userAgent = navigator.userAgent;
|
|
4
|
+
if (/iPad|iPhone|iPod/.test(userAgent)) {
|
|
5
|
+
return true;
|
|
6
|
+
}
|
|
7
|
+
if (navigator.platform === "MacIntel" && navigator.maxTouchPoints > 1) {
|
|
8
|
+
return true;
|
|
9
|
+
}
|
|
10
|
+
return false;
|
|
11
|
+
}
|
|
12
|
+
function isMediaStreamAudioSupported() {
|
|
13
|
+
try {
|
|
14
|
+
if (!window.AudioContext) {
|
|
15
|
+
return false;
|
|
16
|
+
}
|
|
17
|
+
const tempContext = new AudioContext();
|
|
18
|
+
const hasMethod = typeof tempContext.createMediaStreamDestination === "function";
|
|
19
|
+
tempContext.close();
|
|
20
|
+
return hasMethod;
|
|
21
|
+
} catch {
|
|
22
|
+
return false;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function createHiddenMediaElement(type = "video", debug = false) {
|
|
26
|
+
const element = document.createElement(type);
|
|
27
|
+
element.muted = false;
|
|
28
|
+
element.autoplay = true;
|
|
29
|
+
if (type === "video") {
|
|
30
|
+
element.playsInline = true;
|
|
31
|
+
}
|
|
32
|
+
if (debug) {
|
|
33
|
+
element.style.cssText = "position:fixed;bottom:10px;right:10px;width:200px;background:#ff0000;z-index:9999;";
|
|
34
|
+
} else {
|
|
35
|
+
element.style.cssText = "position:fixed;width:1px;height:1px;opacity:0;pointer-events:none;";
|
|
36
|
+
}
|
|
37
|
+
return element;
|
|
38
|
+
}
|
|
39
|
+
function createHiddenVideoElement(debug = false) {
|
|
40
|
+
return createHiddenMediaElement("video", debug);
|
|
41
|
+
}
|
|
42
|
+
class AudioSilenceDetector {
|
|
43
|
+
constructor(audioContext, mediaElement, options, debug = false) {
|
|
44
|
+
this.audioContext = audioContext;
|
|
45
|
+
this.mediaElement = mediaElement;
|
|
46
|
+
this.silenceThreshold = options.silenceThreshold;
|
|
47
|
+
this.silenceDuration = options.silenceDuration;
|
|
48
|
+
this.checkInterval = options.checkInterval;
|
|
49
|
+
this.debug = debug;
|
|
50
|
+
this.analyser = audioContext.createAnalyser();
|
|
51
|
+
this.analyser.fftSize = 512;
|
|
52
|
+
this.analyser.smoothingTimeConstant = 0.3;
|
|
53
|
+
this.dataArray = new Uint8Array(this.analyser.frequencyBinCount);
|
|
54
|
+
}
|
|
55
|
+
analyser;
|
|
56
|
+
dataArray;
|
|
57
|
+
rafId = null;
|
|
58
|
+
silenceStartTime = 0;
|
|
59
|
+
isPaused = false;
|
|
60
|
+
lastCheckTime = 0;
|
|
61
|
+
silenceThreshold;
|
|
62
|
+
silenceDuration;
|
|
63
|
+
checkInterval;
|
|
64
|
+
debug;
|
|
65
|
+
/**
|
|
66
|
+
* Connect the detector to the audio stream
|
|
67
|
+
*/
|
|
68
|
+
connect(source) {
|
|
69
|
+
source.connect(this.analyser);
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Start monitoring audio levels
|
|
73
|
+
*/
|
|
74
|
+
start() {
|
|
75
|
+
if (this.rafId !== null) return;
|
|
76
|
+
this.lastCheckTime = performance.now();
|
|
77
|
+
this.check();
|
|
78
|
+
if (this.debug) {
|
|
79
|
+
console.log("[AudioSilenceDetector] Started monitoring");
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Stop monitoring
|
|
84
|
+
*/
|
|
85
|
+
stop() {
|
|
86
|
+
if (this.rafId !== null) {
|
|
87
|
+
cancelAnimationFrame(this.rafId);
|
|
88
|
+
this.rafId = null;
|
|
89
|
+
}
|
|
90
|
+
if (this.debug) {
|
|
91
|
+
console.log("[AudioSilenceDetector] Stopped monitoring");
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Cleanup resources
|
|
96
|
+
*/
|
|
97
|
+
dispose() {
|
|
98
|
+
this.stop();
|
|
99
|
+
this.analyser.disconnect();
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Check audio levels and pause/resume as needed
|
|
103
|
+
*/
|
|
104
|
+
check = () => {
|
|
105
|
+
const now = performance.now();
|
|
106
|
+
const elapsed = now - this.lastCheckTime;
|
|
107
|
+
if (elapsed >= this.checkInterval) {
|
|
108
|
+
this.lastCheckTime = now;
|
|
109
|
+
const volume = this.getAudioLevel();
|
|
110
|
+
if (volume < this.silenceThreshold) {
|
|
111
|
+
if (this.silenceStartTime === 0) {
|
|
112
|
+
this.silenceStartTime = now;
|
|
113
|
+
} else {
|
|
114
|
+
const silenceDuration = now - this.silenceStartTime;
|
|
115
|
+
if (silenceDuration >= this.silenceDuration && !this.isPaused) {
|
|
116
|
+
this.pauseMedia();
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
} else {
|
|
120
|
+
this.silenceStartTime = 0;
|
|
121
|
+
if (this.isPaused) {
|
|
122
|
+
this.resumeMedia();
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
this.rafId = requestAnimationFrame(this.check);
|
|
127
|
+
};
|
|
128
|
+
/**
|
|
129
|
+
* Get current audio level (0-1)
|
|
130
|
+
*/
|
|
131
|
+
getAudioLevel() {
|
|
132
|
+
this.analyser.getByteTimeDomainData(this.dataArray);
|
|
133
|
+
let sum = 0;
|
|
134
|
+
for (let i = 0; i < this.dataArray.length; i++) {
|
|
135
|
+
const normalized = (this.dataArray[i] - 128) / 128;
|
|
136
|
+
sum += normalized * normalized;
|
|
137
|
+
}
|
|
138
|
+
return Math.sqrt(sum / this.dataArray.length);
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Pause media element to stop audio output
|
|
142
|
+
*/
|
|
143
|
+
pauseMedia() {
|
|
144
|
+
try {
|
|
145
|
+
this.mediaElement.pause();
|
|
146
|
+
this.isPaused = true;
|
|
147
|
+
if (this.debug) {
|
|
148
|
+
console.log("[AudioSilenceDetector] Paused media element (silence detected)");
|
|
149
|
+
}
|
|
150
|
+
} catch (error) {
|
|
151
|
+
console.error("[AudioSilenceDetector] Failed to pause:", error);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Resume media element playback
|
|
156
|
+
*/
|
|
157
|
+
resumeMedia() {
|
|
158
|
+
try {
|
|
159
|
+
if (this.audioContext.state === "running") {
|
|
160
|
+
this.mediaElement.play().catch((error) => {
|
|
161
|
+
if (this.debug) {
|
|
162
|
+
console.warn("[AudioSilenceDetector] Failed to resume:", error);
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
this.isPaused = false;
|
|
167
|
+
if (this.debug) {
|
|
168
|
+
console.log("[AudioSilenceDetector] Resumed media element (audio detected)");
|
|
169
|
+
}
|
|
170
|
+
} catch (error) {
|
|
171
|
+
console.error("[AudioSilenceDetector] Failed to resume:", error);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
function patchAudioContext(audioContext, options = {}) {
|
|
176
|
+
const {
|
|
177
|
+
forceEnable = false,
|
|
178
|
+
autoCleanup = true,
|
|
179
|
+
debug = false,
|
|
180
|
+
mediaElementType = "video",
|
|
181
|
+
autoPause = {}
|
|
182
|
+
} = options;
|
|
183
|
+
const autoPauseOptions = {
|
|
184
|
+
enabled: autoPause.enabled ?? true,
|
|
185
|
+
silenceThreshold: autoPause.silenceThreshold ?? 1e-3,
|
|
186
|
+
silenceDuration: autoPause.silenceDuration ?? 50,
|
|
187
|
+
checkInterval: autoPause.checkInterval ?? 16
|
|
188
|
+
};
|
|
189
|
+
const needsPatch = forceEnable || isIOSDevice();
|
|
190
|
+
if (!needsPatch) {
|
|
191
|
+
return Object.assign(audioContext, {
|
|
192
|
+
unlock: async () => {
|
|
193
|
+
if (audioContext.state === "suspended") {
|
|
194
|
+
await audioContext.resume();
|
|
195
|
+
}
|
|
196
|
+
},
|
|
197
|
+
cleanup: () => {
|
|
198
|
+
},
|
|
199
|
+
isPatched: false,
|
|
200
|
+
originalDestination: audioContext.destination
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
if (!isMediaStreamAudioSupported()) {
|
|
204
|
+
console.warn(
|
|
205
|
+
"[AudioContext] MediaStreamAudioDestinationNode not supported, falling back to native"
|
|
206
|
+
);
|
|
207
|
+
return Object.assign(audioContext, {
|
|
208
|
+
unlock: async () => audioContext.resume(),
|
|
209
|
+
cleanup: () => {
|
|
210
|
+
},
|
|
211
|
+
isPatched: false,
|
|
212
|
+
originalDestination: audioContext.destination
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
const originalDestination = audioContext.destination;
|
|
216
|
+
const streamDestination = audioContext.createMediaStreamDestination();
|
|
217
|
+
const mediaElement = createHiddenMediaElement(mediaElementType, debug);
|
|
218
|
+
mediaElement.srcObject = streamDestination.stream;
|
|
219
|
+
document.body.appendChild(mediaElement);
|
|
220
|
+
let silenceDetector = null;
|
|
221
|
+
if (autoPauseOptions.enabled) {
|
|
222
|
+
silenceDetector = new AudioSilenceDetector(
|
|
223
|
+
audioContext,
|
|
224
|
+
mediaElement,
|
|
225
|
+
autoPauseOptions,
|
|
226
|
+
debug
|
|
227
|
+
);
|
|
228
|
+
silenceDetector.connect(streamDestination);
|
|
229
|
+
}
|
|
230
|
+
Object.defineProperty(audioContext, "destination", {
|
|
231
|
+
get: () => streamDestination,
|
|
232
|
+
enumerable: true,
|
|
233
|
+
configurable: true
|
|
234
|
+
});
|
|
235
|
+
if (!("maxChannelCount" in streamDestination)) {
|
|
236
|
+
Object.defineProperty(streamDestination, "maxChannelCount", {
|
|
237
|
+
get: () => originalDestination.maxChannelCount,
|
|
238
|
+
enumerable: true
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
let isUnlocked = false;
|
|
242
|
+
const unlock = async () => {
|
|
243
|
+
if (isUnlocked) {
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
try {
|
|
247
|
+
await mediaElement.play();
|
|
248
|
+
if (audioContext.state === "suspended") {
|
|
249
|
+
await audioContext.resume();
|
|
250
|
+
}
|
|
251
|
+
if (silenceDetector) {
|
|
252
|
+
silenceDetector.start();
|
|
253
|
+
}
|
|
254
|
+
isUnlocked = true;
|
|
255
|
+
if (debug) {
|
|
256
|
+
console.log("[AudioContext] iOS unlock successful");
|
|
257
|
+
}
|
|
258
|
+
} catch (error) {
|
|
259
|
+
console.error("[AudioContext] Unlock failed:", error);
|
|
260
|
+
throw error;
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
const cleanup = () => {
|
|
264
|
+
try {
|
|
265
|
+
if (silenceDetector) {
|
|
266
|
+
silenceDetector.dispose();
|
|
267
|
+
silenceDetector = null;
|
|
268
|
+
}
|
|
269
|
+
mediaElement.pause();
|
|
270
|
+
mediaElement.srcObject = null;
|
|
271
|
+
mediaElement.remove();
|
|
272
|
+
if (debug) {
|
|
273
|
+
console.log("[AudioContext] Cleanup completed");
|
|
274
|
+
}
|
|
275
|
+
} catch (error) {
|
|
276
|
+
console.error("[AudioContext] Cleanup error:", error);
|
|
277
|
+
}
|
|
278
|
+
};
|
|
279
|
+
if (autoCleanup) {
|
|
280
|
+
const originalClose = audioContext.close.bind(audioContext);
|
|
281
|
+
audioContext.close = async () => {
|
|
282
|
+
cleanup();
|
|
283
|
+
return originalClose();
|
|
284
|
+
};
|
|
285
|
+
}
|
|
286
|
+
return Object.assign(audioContext, {
|
|
287
|
+
unlock,
|
|
288
|
+
cleanup,
|
|
289
|
+
isPatched: true,
|
|
290
|
+
originalDestination
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
function useAudioContext(options = {}) {
|
|
294
|
+
const { autoUnlock = true, ...patchOptions } = options;
|
|
295
|
+
const [audioContext, setAudioContext] = useState(null);
|
|
296
|
+
const [isUnlocked, setIsUnlocked] = useState(false);
|
|
297
|
+
const unlockFnRef = useRef(null);
|
|
298
|
+
useEffect(() => {
|
|
299
|
+
const ctx = new AudioContext();
|
|
300
|
+
const patchedCtx = patchAudioContext(ctx, patchOptions);
|
|
301
|
+
setAudioContext(patchedCtx);
|
|
302
|
+
return () => {
|
|
303
|
+
patchedCtx.cleanup();
|
|
304
|
+
patchedCtx.close();
|
|
305
|
+
};
|
|
306
|
+
}, []);
|
|
307
|
+
useEffect(() => {
|
|
308
|
+
if (!audioContext) return;
|
|
309
|
+
unlockFnRef.current = async () => {
|
|
310
|
+
if (isUnlocked) return;
|
|
311
|
+
try {
|
|
312
|
+
await audioContext.unlock();
|
|
313
|
+
setIsUnlocked(true);
|
|
314
|
+
} catch (error) {
|
|
315
|
+
console.warn("Failed to unlock audio:", error);
|
|
316
|
+
}
|
|
317
|
+
};
|
|
318
|
+
}, [audioContext, isUnlocked]);
|
|
319
|
+
useEffect(() => {
|
|
320
|
+
if (!autoUnlock || !audioContext) return;
|
|
321
|
+
const handleInteraction = async () => {
|
|
322
|
+
await unlockFnRef.current?.();
|
|
323
|
+
};
|
|
324
|
+
document.addEventListener("click", handleInteraction, { once: true });
|
|
325
|
+
document.addEventListener("touchstart", handleInteraction, { once: true });
|
|
326
|
+
return () => {
|
|
327
|
+
document.removeEventListener("click", handleInteraction);
|
|
328
|
+
document.removeEventListener("touchstart", handleInteraction);
|
|
329
|
+
};
|
|
330
|
+
}, [autoUnlock, audioContext]);
|
|
331
|
+
return {
|
|
332
|
+
audioContext,
|
|
333
|
+
isUnlocked,
|
|
334
|
+
unlock: unlockFnRef.current || (async () => {
|
|
335
|
+
})
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
export {
|
|
339
|
+
createHiddenVideoElement as a,
|
|
340
|
+
isMediaStreamAudioSupported as b,
|
|
341
|
+
createHiddenMediaElement as c,
|
|
342
|
+
isIOSDevice as i,
|
|
343
|
+
patchAudioContext as p,
|
|
344
|
+
useAudioContext as u
|
|
345
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aippy/runtime",
|
|
3
|
-
"version": "0.2.0-dev.
|
|
3
|
+
"version": "0.2.0-dev.3",
|
|
4
4
|
"description": "Aippy Runtime SDK - Runtime SDK for Aippy projects",
|
|
5
5
|
"private": false,
|
|
6
6
|
"type": "module",
|
|
@@ -55,6 +55,7 @@
|
|
|
55
55
|
"url": "https://discord.com/invite/G94ZAx6gVq"
|
|
56
56
|
},
|
|
57
57
|
"devDependencies": {
|
|
58
|
+
"@eslint/js": "^9.36.0",
|
|
58
59
|
"@types/node": "^24.5.2",
|
|
59
60
|
"@types/react": "^19.2.2",
|
|
60
61
|
"@types/ua-parser-js": "^0.7.39",
|
|
@@ -1,186 +0,0 @@
|
|
|
1
|
-
import { useRef, useEffect } from "react";
|
|
2
|
-
function isIOSDevice() {
|
|
3
|
-
const userAgent = navigator.userAgent;
|
|
4
|
-
if (/iPad|iPhone|iPod/.test(userAgent)) {
|
|
5
|
-
return true;
|
|
6
|
-
}
|
|
7
|
-
if (navigator.platform === "MacIntel" && navigator.maxTouchPoints > 1) {
|
|
8
|
-
return true;
|
|
9
|
-
}
|
|
10
|
-
return false;
|
|
11
|
-
}
|
|
12
|
-
function isMediaStreamAudioSupported() {
|
|
13
|
-
try {
|
|
14
|
-
if (!window.AudioContext) {
|
|
15
|
-
return false;
|
|
16
|
-
}
|
|
17
|
-
const tempContext = new AudioContext();
|
|
18
|
-
const hasMethod = typeof tempContext.createMediaStreamDestination === "function";
|
|
19
|
-
tempContext.close();
|
|
20
|
-
return hasMethod;
|
|
21
|
-
} catch {
|
|
22
|
-
return false;
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
function createHiddenMediaElement(type = "video", debug = false) {
|
|
26
|
-
const element = document.createElement(type);
|
|
27
|
-
element.muted = false;
|
|
28
|
-
element.autoplay = true;
|
|
29
|
-
if (type === "video") {
|
|
30
|
-
element.playsInline = true;
|
|
31
|
-
}
|
|
32
|
-
if (debug) {
|
|
33
|
-
element.style.cssText = "position:fixed;bottom:10px;right:10px;width:200px;background:#ff0000;z-index:9999;";
|
|
34
|
-
} else {
|
|
35
|
-
element.style.cssText = "position:fixed;width:1px;height:1px;opacity:0;pointer-events:none;";
|
|
36
|
-
}
|
|
37
|
-
return element;
|
|
38
|
-
}
|
|
39
|
-
function createHiddenVideoElement(debug = false) {
|
|
40
|
-
return createHiddenMediaElement("video", debug);
|
|
41
|
-
}
|
|
42
|
-
function patchAudioContext(audioContext, options = {}) {
|
|
43
|
-
const {
|
|
44
|
-
forceEnable = false,
|
|
45
|
-
autoCleanup = true,
|
|
46
|
-
debug = false,
|
|
47
|
-
mediaElementType = "video"
|
|
48
|
-
} = options;
|
|
49
|
-
const needsPatch = forceEnable || isIOSDevice();
|
|
50
|
-
if (!needsPatch) {
|
|
51
|
-
return Object.assign(audioContext, {
|
|
52
|
-
unlock: async () => {
|
|
53
|
-
if (audioContext.state === "suspended") {
|
|
54
|
-
await audioContext.resume();
|
|
55
|
-
}
|
|
56
|
-
},
|
|
57
|
-
cleanup: () => {
|
|
58
|
-
},
|
|
59
|
-
isPatched: false,
|
|
60
|
-
originalDestination: audioContext.destination
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
if (!isMediaStreamAudioSupported()) {
|
|
64
|
-
console.warn(
|
|
65
|
-
"[AudioContext] MediaStreamAudioDestinationNode not supported, falling back to native"
|
|
66
|
-
);
|
|
67
|
-
return Object.assign(audioContext, {
|
|
68
|
-
unlock: async () => audioContext.resume(),
|
|
69
|
-
cleanup: () => {
|
|
70
|
-
},
|
|
71
|
-
isPatched: false,
|
|
72
|
-
originalDestination: audioContext.destination
|
|
73
|
-
});
|
|
74
|
-
}
|
|
75
|
-
const originalDestination = audioContext.destination;
|
|
76
|
-
const streamDestination = audioContext.createMediaStreamDestination();
|
|
77
|
-
const mediaElement = createHiddenMediaElement(mediaElementType, debug);
|
|
78
|
-
mediaElement.srcObject = streamDestination.stream;
|
|
79
|
-
document.body.appendChild(mediaElement);
|
|
80
|
-
Object.defineProperty(audioContext, "destination", {
|
|
81
|
-
get: () => streamDestination,
|
|
82
|
-
enumerable: true,
|
|
83
|
-
configurable: true
|
|
84
|
-
});
|
|
85
|
-
if (!("maxChannelCount" in streamDestination)) {
|
|
86
|
-
Object.defineProperty(streamDestination, "maxChannelCount", {
|
|
87
|
-
get: () => originalDestination.maxChannelCount,
|
|
88
|
-
enumerable: true
|
|
89
|
-
});
|
|
90
|
-
}
|
|
91
|
-
let isUnlocked = false;
|
|
92
|
-
const unlock = async () => {
|
|
93
|
-
if (isUnlocked) {
|
|
94
|
-
return;
|
|
95
|
-
}
|
|
96
|
-
try {
|
|
97
|
-
await mediaElement.play();
|
|
98
|
-
if (audioContext.state === "suspended") {
|
|
99
|
-
await audioContext.resume();
|
|
100
|
-
}
|
|
101
|
-
isUnlocked = true;
|
|
102
|
-
if (debug) {
|
|
103
|
-
console.log("[AudioContext] iOS unlock successful");
|
|
104
|
-
}
|
|
105
|
-
} catch (error) {
|
|
106
|
-
console.error("[AudioContext] Unlock failed:", error);
|
|
107
|
-
throw error;
|
|
108
|
-
}
|
|
109
|
-
};
|
|
110
|
-
const cleanup = () => {
|
|
111
|
-
try {
|
|
112
|
-
mediaElement.pause();
|
|
113
|
-
mediaElement.srcObject = null;
|
|
114
|
-
mediaElement.remove();
|
|
115
|
-
if (debug) {
|
|
116
|
-
console.log("[AudioContext] Cleanup completed");
|
|
117
|
-
}
|
|
118
|
-
} catch (error) {
|
|
119
|
-
console.error("[AudioContext] Cleanup error:", error);
|
|
120
|
-
}
|
|
121
|
-
};
|
|
122
|
-
if (autoCleanup) {
|
|
123
|
-
const originalClose = audioContext.close.bind(audioContext);
|
|
124
|
-
audioContext.close = async () => {
|
|
125
|
-
cleanup();
|
|
126
|
-
return originalClose();
|
|
127
|
-
};
|
|
128
|
-
}
|
|
129
|
-
return Object.assign(audioContext, {
|
|
130
|
-
unlock,
|
|
131
|
-
cleanup,
|
|
132
|
-
isPatched: true,
|
|
133
|
-
originalDestination
|
|
134
|
-
});
|
|
135
|
-
}
|
|
136
|
-
function useAudioContext(options = {}) {
|
|
137
|
-
const { autoUnlock = true, ...patchOptions } = options;
|
|
138
|
-
const audioContextRef = useRef(null);
|
|
139
|
-
const isUnlockedRef = useRef(false);
|
|
140
|
-
const unlockFnRef = useRef(null);
|
|
141
|
-
useEffect(() => {
|
|
142
|
-
const ctx = new AudioContext();
|
|
143
|
-
audioContextRef.current = patchAudioContext(ctx, patchOptions);
|
|
144
|
-
return () => {
|
|
145
|
-
audioContextRef.current?.cleanup();
|
|
146
|
-
audioContextRef.current?.close();
|
|
147
|
-
};
|
|
148
|
-
}, []);
|
|
149
|
-
if (!unlockFnRef.current) {
|
|
150
|
-
unlockFnRef.current = async () => {
|
|
151
|
-
const ctx = audioContextRef.current;
|
|
152
|
-
if (!ctx || isUnlockedRef.current) return;
|
|
153
|
-
try {
|
|
154
|
-
await ctx.unlock();
|
|
155
|
-
isUnlockedRef.current = true;
|
|
156
|
-
} catch (error) {
|
|
157
|
-
console.warn("Failed to unlock audio:", error);
|
|
158
|
-
}
|
|
159
|
-
};
|
|
160
|
-
}
|
|
161
|
-
useEffect(() => {
|
|
162
|
-
if (!autoUnlock) return;
|
|
163
|
-
const handleInteraction = async () => {
|
|
164
|
-
await unlockFnRef.current?.();
|
|
165
|
-
};
|
|
166
|
-
document.addEventListener("click", handleInteraction, { once: true });
|
|
167
|
-
document.addEventListener("touchstart", handleInteraction, { once: true });
|
|
168
|
-
return () => {
|
|
169
|
-
document.removeEventListener("click", handleInteraction);
|
|
170
|
-
document.removeEventListener("touchstart", handleInteraction);
|
|
171
|
-
};
|
|
172
|
-
}, [autoUnlock]);
|
|
173
|
-
return {
|
|
174
|
-
audioContext: audioContextRef.current,
|
|
175
|
-
isUnlocked: isUnlockedRef.current,
|
|
176
|
-
unlock: unlockFnRef.current
|
|
177
|
-
};
|
|
178
|
-
}
|
|
179
|
-
export {
|
|
180
|
-
createHiddenVideoElement as a,
|
|
181
|
-
isMediaStreamAudioSupported as b,
|
|
182
|
-
createHiddenMediaElement as c,
|
|
183
|
-
isIOSDevice as i,
|
|
184
|
-
patchAudioContext as p,
|
|
185
|
-
useAudioContext as u
|
|
186
|
-
};
|