@gjsify/webaudio 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/esm/audio-buffer-source-node.js +63 -0
- package/lib/esm/audio-buffer.js +37 -0
- package/lib/esm/audio-context.js +94 -0
- package/lib/esm/audio-destination-node.js +10 -0
- package/lib/esm/audio-node.js +33 -0
- package/lib/esm/audio-param.js +78 -0
- package/lib/esm/gain-node.js +19 -0
- package/lib/esm/gst-decoder.js +64 -0
- package/lib/esm/gst-init.js +12 -0
- package/lib/esm/gst-player.js +125 -0
- package/lib/esm/html-audio-element.js +61 -0
- package/lib/esm/index.js +18 -0
- package/lib/esm/register.js +13 -0
- package/lib/types/audio-buffer-source-node.d.ts +18 -0
- package/lib/types/audio-buffer.d.ts +17 -0
- package/lib/types/audio-context.d.ts +34 -0
- package/lib/types/audio-destination-node.d.ts +5 -0
- package/lib/types/audio-node.d.ts +12 -0
- package/lib/types/audio-param.d.ts +20 -0
- package/lib/types/gain-node.d.ts +9 -0
- package/lib/types/gst-decoder.d.ts +9 -0
- package/lib/types/gst-init.d.ts +3 -0
- package/lib/types/gst-player.d.ts +39 -0
- package/lib/types/html-audio-element.d.ts +17 -0
- package/lib/types/index.d.ts +8 -0
- package/lib/types/register.d.ts +1 -0
- package/lib/types/webaudio.spec.d.ts +2 -0
- package/package.json +53 -0
- package/src/audio-buffer-source-node.ts +84 -0
- package/src/audio-buffer.ts +47 -0
- package/src/audio-context.ts +102 -0
- package/src/audio-destination-node.ts +12 -0
- package/src/audio-node.ts +37 -0
- package/src/audio-param.ts +103 -0
- package/src/gain-node.ts +23 -0
- package/src/gst-decoder.ts +102 -0
- package/src/gst-init.ts +15 -0
- package/src/gst-player.ts +178 -0
- package/src/html-audio-element.ts +76 -0
- package/src/index.ts +14 -0
- package/src/register.ts +17 -0
- package/src/test.mts +5 -0
- package/src/webaudio.spec.ts +351 -0
- package/tsconfig.json +36 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
// GStreamer decode pipeline: ArrayBuffer (MP3/WAV/OGG) → AudioBuffer (PCM Float32)
|
|
2
|
+
//
|
|
3
|
+
// Pipeline: appsrc → decodebin → audioconvert → audioresample → capsfilter(F32LE) → appsink
|
|
4
|
+
// Uses try_pull_sample() for synchronous decoding (avoids GJS thread-safety issues).
|
|
5
|
+
//
|
|
6
|
+
// Reference: GStreamer 1.0 via gi://Gst, GstApp via gi://GstApp
|
|
7
|
+
|
|
8
|
+
import { ensureGstInit, Gst } from './gst-init.js';
|
|
9
|
+
import { AudioBuffer } from './audio-buffer.js';
|
|
10
|
+
|
|
11
|
+
// Force GstApp typelib load so get_by_name() resolves AppSrc/AppSink types
|
|
12
|
+
import GstApp from 'gi://GstApp?version=1.0';
|
|
13
|
+
void GstApp;
|
|
14
|
+
|
|
15
|
+
const PIPELINE_DESC =
|
|
16
|
+
'appsrc name=src ! decodebin ! audioconvert ! audioresample ! ' +
|
|
17
|
+
'capsfilter caps=audio/x-raw,format=F32LE,layout=interleaved ! ' +
|
|
18
|
+
'appsink name=sink sync=false';
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Decode encoded audio data (MP3, WAV, OGG, FLAC, etc.) into an AudioBuffer
|
|
22
|
+
* containing PCM Float32 channel data.
|
|
23
|
+
*
|
|
24
|
+
* This is a synchronous operation that blocks until decoding completes.
|
|
25
|
+
* It must be called from the main thread (GJS requirement).
|
|
26
|
+
*/
|
|
27
|
+
export function decodeAudioDataSync(arrayBuffer: ArrayBuffer): AudioBuffer {
|
|
28
|
+
ensureGstInit();
|
|
29
|
+
|
|
30
|
+
const pipeline = Gst.parse_launch(PIPELINE_DESC) as Gst.Bin;
|
|
31
|
+
const appsrc = pipeline.get_by_name('src')!;
|
|
32
|
+
const appsink = pipeline.get_by_name('sink')!;
|
|
33
|
+
|
|
34
|
+
pipeline.set_state(Gst.State.PLAYING);
|
|
35
|
+
|
|
36
|
+
// Push encoded data into the pipeline
|
|
37
|
+
const data = new Uint8Array(arrayBuffer);
|
|
38
|
+
(appsrc as any).push_buffer(Gst.Buffer.new_wrapped(data));
|
|
39
|
+
(appsrc as any).end_of_stream();
|
|
40
|
+
|
|
41
|
+
// Pull decoded PCM samples
|
|
42
|
+
const chunks: Uint8Array[] = [];
|
|
43
|
+
let sampleRate = 0;
|
|
44
|
+
let channels = 0;
|
|
45
|
+
|
|
46
|
+
while (true) {
|
|
47
|
+
const sample = (appsink as any).try_pull_sample(2 * Number(Gst.SECOND));
|
|
48
|
+
if (!sample) break;
|
|
49
|
+
|
|
50
|
+
// Read format from the first sample's negotiated caps
|
|
51
|
+
if (sampleRate === 0) {
|
|
52
|
+
const caps = sample.get_caps();
|
|
53
|
+
if (caps) {
|
|
54
|
+
const struct = caps.get_structure(0);
|
|
55
|
+
[, sampleRate] = struct.get_int('rate');
|
|
56
|
+
[, channels] = struct.get_int('channels');
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const buffer = sample.get_buffer();
|
|
61
|
+
if (!buffer) continue;
|
|
62
|
+
|
|
63
|
+
const [ok, mapInfo] = buffer.map(Gst.MapFlags.READ);
|
|
64
|
+
if (ok) {
|
|
65
|
+
// Copy data — mapInfo.data is only valid until unmap
|
|
66
|
+
chunks.push(new Uint8Array(mapInfo.data));
|
|
67
|
+
buffer.unmap(mapInfo);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
pipeline.set_state(Gst.State.NULL);
|
|
72
|
+
|
|
73
|
+
if (sampleRate === 0 || channels === 0) {
|
|
74
|
+
throw new DOMException('Unable to decode audio data', 'EncodingError');
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Concatenate chunks into a single interleaved Float32 buffer
|
|
78
|
+
let totalBytes = 0;
|
|
79
|
+
for (const c of chunks) totalBytes += c.length;
|
|
80
|
+
const totalFrames = totalBytes / (4 * channels);
|
|
81
|
+
|
|
82
|
+
const audioBuffer = new AudioBuffer({
|
|
83
|
+
numberOfChannels: channels,
|
|
84
|
+
length: totalFrames,
|
|
85
|
+
sampleRate,
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
// De-interleave into per-channel Float32Arrays
|
|
89
|
+
let offset = 0;
|
|
90
|
+
for (const chunk of chunks) {
|
|
91
|
+
const f32 = new Float32Array(chunk.buffer, chunk.byteOffset, chunk.length / 4);
|
|
92
|
+
const framesInChunk = f32.length / channels;
|
|
93
|
+
for (let frame = 0; frame < framesInChunk; frame++) {
|
|
94
|
+
for (let ch = 0; ch < channels; ch++) {
|
|
95
|
+
audioBuffer._channelData[ch][offset + frame] = f32[frame * channels + ch];
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
offset += framesInChunk;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return audioBuffer;
|
|
102
|
+
}
|
package/src/gst-init.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
// Lazy GStreamer initialization — call ensureGstInit() before any Gst API usage.
|
|
2
|
+
// Reference: GStreamer 1.0 via gi://Gst
|
|
3
|
+
|
|
4
|
+
import Gst from 'gi://Gst?version=1.0';
|
|
5
|
+
|
|
6
|
+
let initialized = false;
|
|
7
|
+
|
|
8
|
+
export function ensureGstInit(): void {
|
|
9
|
+
if (!initialized) {
|
|
10
|
+
Gst.init(null);
|
|
11
|
+
initialized = true;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export { Gst };
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
// GStreamer playback pipeline: AudioBuffer PCM → audio output
|
|
2
|
+
//
|
|
3
|
+
// Pipeline: appsrc(F32LE) → audioconvert → volume → autoaudiosink
|
|
4
|
+
// Each GstPlayer instance is single-use (matches W3C AudioBufferSourceNode).
|
|
5
|
+
//
|
|
6
|
+
// Reference: GStreamer 1.0 via gi://Gst
|
|
7
|
+
|
|
8
|
+
import { ensureGstInit, Gst } from './gst-init.js';
|
|
9
|
+
import type { AudioBuffer } from './audio-buffer.js';
|
|
10
|
+
|
|
11
|
+
// Force GstApp typelib load
|
|
12
|
+
import GstApp from 'gi://GstApp?version=1.0';
|
|
13
|
+
void GstApp;
|
|
14
|
+
|
|
15
|
+
export interface GstPlayerOptions {
|
|
16
|
+
audioBuffer: AudioBuffer;
|
|
17
|
+
volume: number;
|
|
18
|
+
loop: boolean;
|
|
19
|
+
offset: number; // start offset in seconds
|
|
20
|
+
duration?: number; // play duration in seconds (undefined = full)
|
|
21
|
+
playbackRate: number;
|
|
22
|
+
onEnded: () => void;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Manages a single GStreamer playback pipeline for one AudioBufferSourceNode.start() call.
|
|
27
|
+
*/
|
|
28
|
+
export class GstPlayer {
|
|
29
|
+
private _pipeline: any = null;
|
|
30
|
+
private _volumeElement: any = null;
|
|
31
|
+
private _busWatchId: number | null = null;
|
|
32
|
+
private _ended = false;
|
|
33
|
+
private _loop: boolean;
|
|
34
|
+
private _onEnded: () => void;
|
|
35
|
+
private _audioBuffer: AudioBuffer;
|
|
36
|
+
|
|
37
|
+
constructor(options: GstPlayerOptions) {
|
|
38
|
+
ensureGstInit();
|
|
39
|
+
this._loop = options.loop;
|
|
40
|
+
this._onEnded = options.onEnded;
|
|
41
|
+
this._audioBuffer = options.audioBuffer;
|
|
42
|
+
|
|
43
|
+
const { audioBuffer, volume, offset, duration, playbackRate } = options;
|
|
44
|
+
const sr = audioBuffer.sampleRate;
|
|
45
|
+
const ch = audioBuffer.numberOfChannels;
|
|
46
|
+
|
|
47
|
+
// Build interleaved PCM data
|
|
48
|
+
const pcmData = this._interleave(audioBuffer, offset, duration);
|
|
49
|
+
if (pcmData.length === 0) {
|
|
50
|
+
// Empty buffer — fire ended immediately
|
|
51
|
+
this._fireEnded();
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Build pipeline — format=3 (TIME) ensures downstream gets TIME-based
|
|
56
|
+
// segments, preventing gst_segment_to_stream_time assertion failures.
|
|
57
|
+
const capsStr = `audio/x-raw,format=F32LE,rate=${sr},channels=${ch},layout=interleaved`;
|
|
58
|
+
const desc = `appsrc name=src caps="${capsStr}" format=3 ! audioconvert ! volume name=vol ! autoaudiosink`;
|
|
59
|
+
this._pipeline = Gst.parse_launch(desc);
|
|
60
|
+
this._volumeElement = this._pipeline.get_by_name('vol');
|
|
61
|
+
const appsrc = this._pipeline.get_by_name('src')!;
|
|
62
|
+
|
|
63
|
+
// Set volume
|
|
64
|
+
this._volumeElement.set_property('volume', Math.max(0, Math.min(volume, 10)));
|
|
65
|
+
|
|
66
|
+
// Set up bus watch for EOS/ERROR
|
|
67
|
+
const bus = this._pipeline.get_bus();
|
|
68
|
+
this._busWatchId = bus.add_watch(0, (_bus: any, msg: any) => {
|
|
69
|
+
if (msg.type === Gst.MessageType.EOS) {
|
|
70
|
+
if (this._loop && !this._ended) {
|
|
71
|
+
// Restart: push data again
|
|
72
|
+
this._restartPlayback(appsrc, pcmData);
|
|
73
|
+
} else {
|
|
74
|
+
this._fireEnded();
|
|
75
|
+
}
|
|
76
|
+
} else if (msg.type === Gst.MessageType.ERROR) {
|
|
77
|
+
this._fireEnded();
|
|
78
|
+
}
|
|
79
|
+
return true; // keep watching
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
// Push PCM data with proper timestamps for TIME-format segments
|
|
83
|
+
const gstBuf = Gst.Buffer.new_wrapped(pcmData);
|
|
84
|
+
const totalFrames = pcmData.length / (4 * ch);
|
|
85
|
+
gstBuf.pts = 0;
|
|
86
|
+
gstBuf.duration = Math.floor((totalFrames / sr) * Number(Gst.SECOND));
|
|
87
|
+
appsrc.push_buffer(gstBuf);
|
|
88
|
+
appsrc.end_of_stream();
|
|
89
|
+
|
|
90
|
+
// Apply playback rate if not 1.0
|
|
91
|
+
if (playbackRate !== 1.0) {
|
|
92
|
+
this._pipeline.set_state(Gst.State.PAUSED);
|
|
93
|
+
// Seek with rate change
|
|
94
|
+
this._pipeline.seek(
|
|
95
|
+
playbackRate,
|
|
96
|
+
Gst.Format.TIME,
|
|
97
|
+
Gst.SeekFlags.FLUSH | Gst.SeekFlags.ACCURATE,
|
|
98
|
+
Gst.SeekType.SET, 0,
|
|
99
|
+
Gst.SeekType.NONE, -1
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
this._pipeline.set_state(Gst.State.PLAYING);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/** Update volume on a running pipeline */
|
|
107
|
+
setVolume(value: number): void {
|
|
108
|
+
if (this._volumeElement && !this._ended) {
|
|
109
|
+
this._volumeElement.set_property('volume', Math.max(0, Math.min(value, 10)));
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/** Update loop flag */
|
|
114
|
+
setLoop(value: boolean): void {
|
|
115
|
+
this._loop = value;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/** Stop playback and clean up */
|
|
119
|
+
stop(): void {
|
|
120
|
+
if (this._ended) return;
|
|
121
|
+
this._fireEnded();
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/** Whether playback has ended */
|
|
125
|
+
get ended(): boolean {
|
|
126
|
+
return this._ended;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
private _restartPlayback(appsrc: any, pcmData: Uint8Array): void {
|
|
130
|
+
// For looping: seek pipeline to start
|
|
131
|
+
if (this._pipeline) {
|
|
132
|
+
this._pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, 0);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
private _fireEnded(): void {
|
|
137
|
+
if (this._ended) return;
|
|
138
|
+
this._ended = true;
|
|
139
|
+
this._cleanup();
|
|
140
|
+
this._onEnded();
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
private _cleanup(): void {
|
|
144
|
+
if (this._pipeline) {
|
|
145
|
+
this._pipeline.set_state(Gst.State.NULL);
|
|
146
|
+
this._pipeline = null;
|
|
147
|
+
}
|
|
148
|
+
if (this._busWatchId !== null) {
|
|
149
|
+
// Bus watch is automatically removed when pipeline is disposed
|
|
150
|
+
this._busWatchId = null;
|
|
151
|
+
}
|
|
152
|
+
this._volumeElement = null;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Interleave AudioBuffer's per-channel Float32Arrays into a single Uint8Array.
|
|
157
|
+
* Applies offset (seconds) and optional duration (seconds).
|
|
158
|
+
*/
|
|
159
|
+
private _interleave(buf: AudioBuffer, offsetSec: number, durationSec?: number): Uint8Array {
|
|
160
|
+
const ch = buf.numberOfChannels;
|
|
161
|
+
const startFrame = Math.min(Math.floor(offsetSec * buf.sampleRate), buf.length);
|
|
162
|
+
const maxFrames = buf.length - startFrame;
|
|
163
|
+
const frames = durationSec !== undefined
|
|
164
|
+
? Math.min(Math.floor(durationSec * buf.sampleRate), maxFrames)
|
|
165
|
+
: maxFrames;
|
|
166
|
+
|
|
167
|
+
if (frames <= 0) return new Uint8Array(0);
|
|
168
|
+
|
|
169
|
+
const interleaved = new Float32Array(frames * ch);
|
|
170
|
+
for (let frame = 0; frame < frames; frame++) {
|
|
171
|
+
for (let c = 0; c < ch; c++) {
|
|
172
|
+
interleaved[frame * ch + c] = buf._channelData[c][startFrame + frame];
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return new Uint8Array(interleaved.buffer);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
// HTMLAudioElement — format detection + basic playback via GStreamer playbin.
|
|
2
|
+
// Used by Excalibur.js for canPlayType() format sniffing.
|
|
3
|
+
//
|
|
4
|
+
// Reference: https://developer.mozilla.org/en-US/docs/Web/API/HTMLAudioElement
|
|
5
|
+
|
|
6
|
+
import { ensureGstInit, Gst } from './gst-init.js';
|
|
7
|
+
|
|
8
|
+
// GStreamer-supported MIME types (common on GNOME systems)
|
|
9
|
+
const SUPPORTED_TYPES = new Set([
|
|
10
|
+
'audio/mpeg',
|
|
11
|
+
'audio/mp3',
|
|
12
|
+
'audio/wav',
|
|
13
|
+
'audio/x-wav',
|
|
14
|
+
'audio/ogg',
|
|
15
|
+
'audio/webm',
|
|
16
|
+
'audio/flac',
|
|
17
|
+
'audio/x-flac',
|
|
18
|
+
'audio/aac',
|
|
19
|
+
'audio/mp4',
|
|
20
|
+
]);
|
|
21
|
+
|
|
22
|
+
export class HTMLAudioElement {
|
|
23
|
+
src = '';
|
|
24
|
+
volume = 1;
|
|
25
|
+
loop = false;
|
|
26
|
+
paused = true;
|
|
27
|
+
currentTime = 0;
|
|
28
|
+
duration = 0;
|
|
29
|
+
readyState = 0;
|
|
30
|
+
|
|
31
|
+
private _pipeline: any = null;
|
|
32
|
+
|
|
33
|
+
canPlayType(type: string): CanPlayTypeResult {
|
|
34
|
+
// Strip codecs parameter: "audio/ogg; codecs=vorbis" → "audio/ogg"
|
|
35
|
+
const mime = type.split(';')[0].trim().toLowerCase();
|
|
36
|
+
return SUPPORTED_TYPES.has(mime) ? 'maybe' : '';
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
play(): Promise<void> {
|
|
40
|
+
if (!this.src) return Promise.resolve();
|
|
41
|
+
|
|
42
|
+
ensureGstInit();
|
|
43
|
+
this._cleanup();
|
|
44
|
+
|
|
45
|
+
this._pipeline = Gst.ElementFactory.make('playbin', 'player');
|
|
46
|
+
if (!this._pipeline) return Promise.resolve();
|
|
47
|
+
|
|
48
|
+
this._pipeline.set_property('uri', this.src);
|
|
49
|
+
this._pipeline.set_property('volume', this.volume);
|
|
50
|
+
this._pipeline.set_state(Gst.State.PLAYING);
|
|
51
|
+
this.paused = false;
|
|
52
|
+
|
|
53
|
+
return Promise.resolve();
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
pause(): void {
|
|
57
|
+
if (this._pipeline) {
|
|
58
|
+
this._pipeline.set_state(Gst.State.PAUSED);
|
|
59
|
+
this.paused = true;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
load(): void {
|
|
64
|
+
this._cleanup();
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
addEventListener(_type: string, _listener: any): void {}
|
|
68
|
+
removeEventListener(_type: string, _listener: any): void {}
|
|
69
|
+
|
|
70
|
+
private _cleanup(): void {
|
|
71
|
+
if (this._pipeline) {
|
|
72
|
+
this._pipeline.set_state(Gst.State.NULL);
|
|
73
|
+
this._pipeline = null;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
// Web Audio API for GJS — backed by GStreamer 1.0.
|
|
2
|
+
//
|
|
3
|
+
// This module has no side effects. Importing @gjsify/webaudio gives
|
|
4
|
+
// named access to Web Audio classes but does NOT register globals.
|
|
5
|
+
// Use @gjsify/webaudio/register to set globalThis.AudioContext, etc.
|
|
6
|
+
|
|
7
|
+
export { AudioContext } from './audio-context.js';
|
|
8
|
+
export { AudioBuffer } from './audio-buffer.js';
|
|
9
|
+
export { AudioNode } from './audio-node.js';
|
|
10
|
+
export { AudioDestinationNode } from './audio-destination-node.js';
|
|
11
|
+
export { AudioBufferSourceNode } from './audio-buffer-source-node.js';
|
|
12
|
+
export { GainNode } from './gain-node.js';
|
|
13
|
+
export { AudioParam } from './audio-param.js';
|
|
14
|
+
export { HTMLAudioElement } from './html-audio-element.js';
|
package/src/register.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
// Side-effect module: registers Web Audio API globals on GJS.
|
|
2
|
+
// On Node.js the alias layer routes this to @gjsify/empty.
|
|
3
|
+
|
|
4
|
+
import { AudioContext, HTMLAudioElement } from './index.js';
|
|
5
|
+
|
|
6
|
+
if (typeof (globalThis as any).AudioContext === 'undefined') {
|
|
7
|
+
(globalThis as any).AudioContext = AudioContext;
|
|
8
|
+
}
|
|
9
|
+
if (typeof (globalThis as any).webkitAudioContext === 'undefined') {
|
|
10
|
+
(globalThis as any).webkitAudioContext = AudioContext;
|
|
11
|
+
}
|
|
12
|
+
if (typeof (globalThis as any).Audio === 'undefined') {
|
|
13
|
+
(globalThis as any).Audio = HTMLAudioElement;
|
|
14
|
+
}
|
|
15
|
+
if (typeof (globalThis as any).HTMLAudioElement === 'undefined') {
|
|
16
|
+
(globalThis as any).HTMLAudioElement = HTMLAudioElement;
|
|
17
|
+
}
|