@gjsify/webaudio 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/lib/esm/audio-buffer-source-node.js +63 -0
  2. package/lib/esm/audio-buffer.js +37 -0
  3. package/lib/esm/audio-context.js +94 -0
  4. package/lib/esm/audio-destination-node.js +10 -0
  5. package/lib/esm/audio-node.js +33 -0
  6. package/lib/esm/audio-param.js +78 -0
  7. package/lib/esm/gain-node.js +19 -0
  8. package/lib/esm/gst-decoder.js +64 -0
  9. package/lib/esm/gst-init.js +12 -0
  10. package/lib/esm/gst-player.js +125 -0
  11. package/lib/esm/html-audio-element.js +61 -0
  12. package/lib/esm/index.js +18 -0
  13. package/lib/esm/register.js +13 -0
  14. package/lib/types/audio-buffer-source-node.d.ts +18 -0
  15. package/lib/types/audio-buffer.d.ts +17 -0
  16. package/lib/types/audio-context.d.ts +34 -0
  17. package/lib/types/audio-destination-node.d.ts +5 -0
  18. package/lib/types/audio-node.d.ts +12 -0
  19. package/lib/types/audio-param.d.ts +20 -0
  20. package/lib/types/gain-node.d.ts +9 -0
  21. package/lib/types/gst-decoder.d.ts +9 -0
  22. package/lib/types/gst-init.d.ts +3 -0
  23. package/lib/types/gst-player.d.ts +39 -0
  24. package/lib/types/html-audio-element.d.ts +17 -0
  25. package/lib/types/index.d.ts +8 -0
  26. package/lib/types/register.d.ts +1 -0
  27. package/lib/types/webaudio.spec.d.ts +2 -0
  28. package/package.json +53 -0
  29. package/src/audio-buffer-source-node.ts +84 -0
  30. package/src/audio-buffer.ts +47 -0
  31. package/src/audio-context.ts +102 -0
  32. package/src/audio-destination-node.ts +12 -0
  33. package/src/audio-node.ts +37 -0
  34. package/src/audio-param.ts +103 -0
  35. package/src/gain-node.ts +23 -0
  36. package/src/gst-decoder.ts +102 -0
  37. package/src/gst-init.ts +15 -0
  38. package/src/gst-player.ts +178 -0
  39. package/src/html-audio-element.ts +76 -0
  40. package/src/index.ts +14 -0
  41. package/src/register.ts +17 -0
  42. package/src/test.mts +5 -0
  43. package/src/webaudio.spec.ts +351 -0
  44. package/tsconfig.json +36 -0
  45. package/tsconfig.tsbuildinfo +1 -0
@@ -0,0 +1,63 @@
1
+ import { AudioNode } from "./audio-node.js";
2
+ import { AudioParam } from "./audio-param.js";
3
+ import { GstPlayer } from "./gst-player.js";
4
+ import { GainNode } from "./gain-node.js";
5
+ class AudioBufferSourceNode extends AudioNode {
6
+ buffer = null;
7
+ loop = false;
8
+ loopStart = 0;
9
+ loopEnd = 0;
10
+ playbackRate;
11
+ onended = null;
12
+ _player = null;
13
+ _started = false;
14
+ constructor() {
15
+ super(0, 1);
16
+ this.playbackRate = new AudioParam(1, 0.0625, 16);
17
+ }
18
+ start(when = 0, offset = 0, duration) {
19
+ if (this._started) {
20
+ throw new DOMException("AudioBufferSourceNode can only be started once", "InvalidStateError");
21
+ }
22
+ this._started = true;
23
+ if (!this.buffer) return;
24
+ const gainNode = this._findGainNode();
25
+ const volume = gainNode ? gainNode.gain.value : 1;
26
+ this._player = new GstPlayer({
27
+ audioBuffer: this.buffer,
28
+ volume,
29
+ loop: this.loop,
30
+ offset,
31
+ duration,
32
+ playbackRate: this.playbackRate.value,
33
+ onEnded: () => {
34
+ if (gainNode) {
35
+ gainNode._activePlayers.delete(this._player);
36
+ }
37
+ this._player = null;
38
+ this.onended?.();
39
+ }
40
+ });
41
+ if (gainNode && this._player) {
42
+ gainNode._activePlayers.add(this._player);
43
+ }
44
+ }
45
+ stop(_when = 0) {
46
+ if (this._player) {
47
+ this._player.stop();
48
+ }
49
+ }
50
+ /** Walk the output chain to find a GainNode */
51
+ _findGainNode() {
52
+ for (const node of this._outputs) {
53
+ if (node instanceof GainNode) return node;
54
+ for (const inner of node._outputs) {
55
+ if (inner instanceof GainNode) return inner;
56
+ }
57
+ }
58
+ return null;
59
+ }
60
+ }
61
+ export {
62
+ AudioBufferSourceNode
63
+ };
@@ -0,0 +1,37 @@
1
+ class AudioBuffer {
2
+ sampleRate;
3
+ length;
4
+ duration;
5
+ numberOfChannels;
6
+ /** @internal */
7
+ _channelData;
8
+ constructor(options) {
9
+ this.sampleRate = options.sampleRate;
10
+ this.length = options.length;
11
+ this.numberOfChannels = options.numberOfChannels;
12
+ this.duration = this.length / this.sampleRate;
13
+ this._channelData = [];
14
+ for (let i = 0; i < this.numberOfChannels; i++) {
15
+ this._channelData.push(new Float32Array(this.length));
16
+ }
17
+ }
18
+ getChannelData(channel) {
19
+ if (channel < 0 || channel >= this.numberOfChannels) {
20
+ throw new RangeError(`channel index ${channel} out of range [0, ${this.numberOfChannels})`);
21
+ }
22
+ return this._channelData[channel];
23
+ }
24
+ copyFromChannel(destination, channelNumber, bufferOffset = 0) {
25
+ const src = this.getChannelData(channelNumber);
26
+ const len = Math.min(destination.length, src.length - bufferOffset);
27
+ destination.set(src.subarray(bufferOffset, bufferOffset + len));
28
+ }
29
+ copyToChannel(source, channelNumber, bufferOffset = 0) {
30
+ const dest = this.getChannelData(channelNumber);
31
+ const len = Math.min(source.length, dest.length - bufferOffset);
32
+ dest.set(source.subarray(0, len), bufferOffset);
33
+ }
34
+ }
35
+ export {
36
+ AudioBuffer
37
+ };
@@ -0,0 +1,94 @@
1
+ import GLib from "gi://GLib?version=2.0";
2
+ import { ensureGstInit } from "./gst-init.js";
3
+ import { AudioBuffer } from "./audio-buffer.js";
4
+ import { AudioNode } from "./audio-node.js";
5
+ import { AudioDestinationNode } from "./audio-destination-node.js";
6
+ import { AudioBufferSourceNode } from "./audio-buffer-source-node.js";
7
+ import { GainNode } from "./gain-node.js";
8
+ import { decodeAudioDataSync } from "./gst-decoder.js";
9
+ class AudioContext {
10
+ state = "suspended";
11
+ sampleRate = 44100;
12
+ destination;
13
+ listener = {};
14
+ _startTime;
15
+ constructor() {
16
+ ensureGstInit();
17
+ this._startTime = GLib.get_monotonic_time();
18
+ this.destination = new AudioDestinationNode();
19
+ }
20
+ /** Monotonically increasing time in seconds since context creation. */
21
+ get currentTime() {
22
+ return (GLib.get_monotonic_time() - this._startTime) / 1e6;
23
+ }
24
+ createGain() {
25
+ return new GainNode();
26
+ }
27
+ createBufferSource() {
28
+ return new AudioBufferSourceNode();
29
+ }
30
+ createBuffer(numberOfChannels, length, sampleRate) {
31
+ return new AudioBuffer({ numberOfChannels, length, sampleRate });
32
+ }
33
+ /**
34
+ * Decode encoded audio data (MP3, WAV, OGG, etc.) into an AudioBuffer.
35
+ * Uses GStreamer's decodebin for format-agnostic decoding.
36
+ */
37
+ decodeAudioData(arrayBuffer, successCallback, errorCallback) {
38
+ try {
39
+ const buffer = decodeAudioDataSync(arrayBuffer);
40
+ successCallback?.(buffer);
41
+ return Promise.resolve(buffer);
42
+ } catch (err) {
43
+ const domErr = err instanceof DOMException ? err : new DOMException("Unable to decode audio data", "EncodingError");
44
+ errorCallback?.(domErr);
45
+ return Promise.reject(domErr);
46
+ }
47
+ }
48
+ async resume() {
49
+ this.state = "running";
50
+ }
51
+ async suspend() {
52
+ this.state = "suspended";
53
+ }
54
+ async close() {
55
+ this.state = "closed";
56
+ }
57
+ // Stub methods for APIs not yet backed by GStreamer (Phase 3)
58
+ createAnalyser() {
59
+ return {
60
+ connect: () => {
61
+ },
62
+ disconnect: () => {
63
+ },
64
+ fftSize: 2048,
65
+ frequencyBinCount: 1024,
66
+ getByteFrequencyData: () => {
67
+ },
68
+ getFloatFrequencyData: () => {
69
+ }
70
+ };
71
+ }
72
+ createDynamicsCompressor() {
73
+ return new AudioNode();
74
+ }
75
+ createBiquadFilter() {
76
+ return new AudioNode();
77
+ }
78
+ createConvolver() {
79
+ return new AudioNode();
80
+ }
81
+ createPanner() {
82
+ return new AudioNode();
83
+ }
84
+ createStereoPanner() {
85
+ return new AudioNode();
86
+ }
87
+ addEventListener(_type, _listener) {
88
+ }
89
+ removeEventListener(_type, _listener) {
90
+ }
91
+ }
92
+ export {
93
+ AudioContext
94
+ };
@@ -0,0 +1,10 @@
1
+ import { AudioNode } from "./audio-node.js";
2
+ class AudioDestinationNode extends AudioNode {
3
+ maxChannelCount = 2;
4
+ constructor() {
5
+ super(1, 0);
6
+ }
7
+ }
8
+ export {
9
+ AudioDestinationNode
10
+ };
@@ -0,0 +1,33 @@
1
+ class AudioNode {
2
+ /** @internal downstream connections */
3
+ _outputs = /* @__PURE__ */ new Set();
4
+ /** @internal upstream connections */
5
+ _inputs = /* @__PURE__ */ new Set();
6
+ numberOfInputs;
7
+ numberOfOutputs;
8
+ channelCount;
9
+ constructor(numberOfInputs = 1, numberOfOutputs = 1) {
10
+ this.numberOfInputs = numberOfInputs;
11
+ this.numberOfOutputs = numberOfOutputs;
12
+ this.channelCount = 2;
13
+ }
14
+ connect(destination) {
15
+ this._outputs.add(destination);
16
+ destination._inputs.add(this);
17
+ return destination;
18
+ }
19
+ disconnect(destination) {
20
+ if (destination) {
21
+ this._outputs.delete(destination);
22
+ destination._inputs.delete(this);
23
+ } else {
24
+ for (const node of this._outputs) {
25
+ node._inputs.delete(this);
26
+ }
27
+ this._outputs.clear();
28
+ }
29
+ }
30
+ }
31
+ export {
32
+ AudioNode
33
+ };
@@ -0,0 +1,78 @@
1
+ import GLib from "gi://GLib?version=2.0";
2
+ class AudioParam {
3
+ defaultValue;
4
+ minValue;
5
+ maxValue;
6
+ /** @internal callback invoked when value changes */
7
+ _onChange = null;
8
+ _value;
9
+ _rampTimerId = null;
10
+ constructor(defaultValue = 0, minValue = -34028235e31, maxValue = 34028235e31) {
11
+ this.defaultValue = defaultValue;
12
+ this.minValue = minValue;
13
+ this.maxValue = maxValue;
14
+ this._value = defaultValue;
15
+ }
16
+ get value() {
17
+ return this._value;
18
+ }
19
+ set value(v) {
20
+ this._cancelRamp();
21
+ this._value = Math.max(this.minValue, Math.min(this.maxValue, v));
22
+ this._onChange?.(this._value);
23
+ }
24
+ setValueAtTime(value, _startTime) {
25
+ this.value = value;
26
+ return this;
27
+ }
28
+ linearRampToValueAtTime(value, _endTime) {
29
+ this.value = value;
30
+ return this;
31
+ }
32
+ exponentialRampToValueAtTime(value, _endTime) {
33
+ this.value = value;
34
+ return this;
35
+ }
36
+ setTargetAtTime(target, _startTime, timeConstant) {
37
+ this._cancelRamp();
38
+ if (timeConstant <= 0) {
39
+ this.value = target;
40
+ return this;
41
+ }
42
+ const stepMs = Math.max(10, Math.round(timeConstant * 100));
43
+ this._rampTimerId = GLib.timeout_add(GLib.PRIORITY_DEFAULT, stepMs, () => {
44
+ const diff = target - this._value;
45
+ if (Math.abs(diff) < 1e-3) {
46
+ this._value = target;
47
+ this._onChange?.(this._value);
48
+ this._rampTimerId = null;
49
+ return GLib.SOURCE_REMOVE;
50
+ }
51
+ const factor = 1 - Math.exp(-stepMs / (timeConstant * 1e3));
52
+ this._value += diff * factor;
53
+ this._onChange?.(this._value);
54
+ return GLib.SOURCE_CONTINUE;
55
+ });
56
+ return this;
57
+ }
58
+ setValueCurveAtTime(_values, _startTime, _duration) {
59
+ return this;
60
+ }
61
+ cancelScheduledValues(_startTime) {
62
+ this._cancelRamp();
63
+ return this;
64
+ }
65
+ cancelAndHoldAtTime(_cancelTime) {
66
+ this._cancelRamp();
67
+ return this;
68
+ }
69
+ _cancelRamp() {
70
+ if (this._rampTimerId !== null) {
71
+ GLib.source_remove(this._rampTimerId);
72
+ this._rampTimerId = null;
73
+ }
74
+ }
75
+ }
76
+ export {
77
+ AudioParam
78
+ };
@@ -0,0 +1,19 @@
1
+ import { AudioNode } from "./audio-node.js";
2
+ import { AudioParam } from "./audio-param.js";
3
+ class GainNode extends AudioNode {
4
+ gain;
5
+ /** @internal active players that need volume updates */
6
+ _activePlayers = /* @__PURE__ */ new Set();
7
+ constructor() {
8
+ super(1, 1);
9
+ this.gain = new AudioParam(1, 0, 10);
10
+ this.gain._onChange = (value) => {
11
+ for (const player of this._activePlayers) {
12
+ player.setVolume(value);
13
+ }
14
+ };
15
+ }
16
+ }
17
+ export {
18
+ GainNode
19
+ };
@@ -0,0 +1,64 @@
1
+ import { ensureGstInit, Gst } from "./gst-init.js";
2
+ import { AudioBuffer } from "./audio-buffer.js";
3
+ import GstApp from "gi://GstApp?version=1.0";
4
+ void GstApp;
5
+ const PIPELINE_DESC = "appsrc name=src ! decodebin ! audioconvert ! audioresample ! capsfilter caps=audio/x-raw,format=F32LE,layout=interleaved ! appsink name=sink sync=false";
6
+ function decodeAudioDataSync(arrayBuffer) {
7
+ ensureGstInit();
8
+ const pipeline = Gst.parse_launch(PIPELINE_DESC);
9
+ const appsrc = pipeline.get_by_name("src");
10
+ const appsink = pipeline.get_by_name("sink");
11
+ pipeline.set_state(Gst.State.PLAYING);
12
+ const data = new Uint8Array(arrayBuffer);
13
+ appsrc.push_buffer(Gst.Buffer.new_wrapped(data));
14
+ appsrc.end_of_stream();
15
+ const chunks = [];
16
+ let sampleRate = 0;
17
+ let channels = 0;
18
+ while (true) {
19
+ const sample = appsink.try_pull_sample(2 * Number(Gst.SECOND));
20
+ if (!sample) break;
21
+ if (sampleRate === 0) {
22
+ const caps = sample.get_caps();
23
+ if (caps) {
24
+ const struct = caps.get_structure(0);
25
+ [, sampleRate] = struct.get_int("rate");
26
+ [, channels] = struct.get_int("channels");
27
+ }
28
+ }
29
+ const buffer = sample.get_buffer();
30
+ if (!buffer) continue;
31
+ const [ok, mapInfo] = buffer.map(Gst.MapFlags.READ);
32
+ if (ok) {
33
+ chunks.push(new Uint8Array(mapInfo.data));
34
+ buffer.unmap(mapInfo);
35
+ }
36
+ }
37
+ pipeline.set_state(Gst.State.NULL);
38
+ if (sampleRate === 0 || channels === 0) {
39
+ throw new DOMException("Unable to decode audio data", "EncodingError");
40
+ }
41
+ let totalBytes = 0;
42
+ for (const c of chunks) totalBytes += c.length;
43
+ const totalFrames = totalBytes / (4 * channels);
44
+ const audioBuffer = new AudioBuffer({
45
+ numberOfChannels: channels,
46
+ length: totalFrames,
47
+ sampleRate
48
+ });
49
+ let offset = 0;
50
+ for (const chunk of chunks) {
51
+ const f32 = new Float32Array(chunk.buffer, chunk.byteOffset, chunk.length / 4);
52
+ const framesInChunk = f32.length / channels;
53
+ for (let frame = 0; frame < framesInChunk; frame++) {
54
+ for (let ch = 0; ch < channels; ch++) {
55
+ audioBuffer._channelData[ch][offset + frame] = f32[frame * channels + ch];
56
+ }
57
+ }
58
+ offset += framesInChunk;
59
+ }
60
+ return audioBuffer;
61
+ }
62
+ export {
63
+ decodeAudioDataSync
64
+ };
@@ -0,0 +1,12 @@
1
+ import Gst from "gi://Gst?version=1.0";
2
+ let initialized = false;
3
+ function ensureGstInit() {
4
+ if (!initialized) {
5
+ Gst.init(null);
6
+ initialized = true;
7
+ }
8
+ }
9
+ export {
10
+ Gst,
11
+ ensureGstInit
12
+ };
@@ -0,0 +1,125 @@
1
+ import { ensureGstInit, Gst } from "./gst-init.js";
2
+ import GstApp from "gi://GstApp?version=1.0";
3
+ void GstApp;
4
+ class GstPlayer {
5
+ _pipeline = null;
6
+ _volumeElement = null;
7
+ _busWatchId = null;
8
+ _ended = false;
9
+ _loop;
10
+ _onEnded;
11
+ _audioBuffer;
12
+ constructor(options) {
13
+ ensureGstInit();
14
+ this._loop = options.loop;
15
+ this._onEnded = options.onEnded;
16
+ this._audioBuffer = options.audioBuffer;
17
+ const { audioBuffer, volume, offset, duration, playbackRate } = options;
18
+ const sr = audioBuffer.sampleRate;
19
+ const ch = audioBuffer.numberOfChannels;
20
+ const pcmData = this._interleave(audioBuffer, offset, duration);
21
+ if (pcmData.length === 0) {
22
+ this._fireEnded();
23
+ return;
24
+ }
25
+ const capsStr = `audio/x-raw,format=F32LE,rate=${sr},channels=${ch},layout=interleaved`;
26
+ const desc = `appsrc name=src caps="${capsStr}" format=3 ! audioconvert ! volume name=vol ! autoaudiosink`;
27
+ this._pipeline = Gst.parse_launch(desc);
28
+ this._volumeElement = this._pipeline.get_by_name("vol");
29
+ const appsrc = this._pipeline.get_by_name("src");
30
+ this._volumeElement.set_property("volume", Math.max(0, Math.min(volume, 10)));
31
+ const bus = this._pipeline.get_bus();
32
+ this._busWatchId = bus.add_watch(0, (_bus, msg) => {
33
+ if (msg.type === Gst.MessageType.EOS) {
34
+ if (this._loop && !this._ended) {
35
+ this._restartPlayback(appsrc, pcmData);
36
+ } else {
37
+ this._fireEnded();
38
+ }
39
+ } else if (msg.type === Gst.MessageType.ERROR) {
40
+ this._fireEnded();
41
+ }
42
+ return true;
43
+ });
44
+ const gstBuf = Gst.Buffer.new_wrapped(pcmData);
45
+ const totalFrames = pcmData.length / (4 * ch);
46
+ gstBuf.pts = 0;
47
+ gstBuf.duration = Math.floor(totalFrames / sr * Number(Gst.SECOND));
48
+ appsrc.push_buffer(gstBuf);
49
+ appsrc.end_of_stream();
50
+ if (playbackRate !== 1) {
51
+ this._pipeline.set_state(Gst.State.PAUSED);
52
+ this._pipeline.seek(
53
+ playbackRate,
54
+ Gst.Format.TIME,
55
+ Gst.SeekFlags.FLUSH | Gst.SeekFlags.ACCURATE,
56
+ Gst.SeekType.SET,
57
+ 0,
58
+ Gst.SeekType.NONE,
59
+ -1
60
+ );
61
+ }
62
+ this._pipeline.set_state(Gst.State.PLAYING);
63
+ }
64
+ /** Update volume on a running pipeline */
65
+ setVolume(value) {
66
+ if (this._volumeElement && !this._ended) {
67
+ this._volumeElement.set_property("volume", Math.max(0, Math.min(value, 10)));
68
+ }
69
+ }
70
+ /** Update loop flag */
71
+ setLoop(value) {
72
+ this._loop = value;
73
+ }
74
+ /** Stop playback and clean up */
75
+ stop() {
76
+ if (this._ended) return;
77
+ this._fireEnded();
78
+ }
79
+ /** Whether playback has ended */
80
+ get ended() {
81
+ return this._ended;
82
+ }
83
+ _restartPlayback(appsrc, pcmData) {
84
+ if (this._pipeline) {
85
+ this._pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, 0);
86
+ }
87
+ }
88
+ _fireEnded() {
89
+ if (this._ended) return;
90
+ this._ended = true;
91
+ this._cleanup();
92
+ this._onEnded();
93
+ }
94
+ _cleanup() {
95
+ if (this._pipeline) {
96
+ this._pipeline.set_state(Gst.State.NULL);
97
+ this._pipeline = null;
98
+ }
99
+ if (this._busWatchId !== null) {
100
+ this._busWatchId = null;
101
+ }
102
+ this._volumeElement = null;
103
+ }
104
+ /**
105
+ * Interleave AudioBuffer's per-channel Float32Arrays into a single Uint8Array.
106
+ * Applies offset (seconds) and optional duration (seconds).
107
+ */
108
+ _interleave(buf, offsetSec, durationSec) {
109
+ const ch = buf.numberOfChannels;
110
+ const startFrame = Math.min(Math.floor(offsetSec * buf.sampleRate), buf.length);
111
+ const maxFrames = buf.length - startFrame;
112
+ const frames = durationSec !== void 0 ? Math.min(Math.floor(durationSec * buf.sampleRate), maxFrames) : maxFrames;
113
+ if (frames <= 0) return new Uint8Array(0);
114
+ const interleaved = new Float32Array(frames * ch);
115
+ for (let frame = 0; frame < frames; frame++) {
116
+ for (let c = 0; c < ch; c++) {
117
+ interleaved[frame * ch + c] = buf._channelData[c][startFrame + frame];
118
+ }
119
+ }
120
+ return new Uint8Array(interleaved.buffer);
121
+ }
122
+ }
123
+ export {
124
+ GstPlayer
125
+ };
@@ -0,0 +1,61 @@
1
+ import { ensureGstInit, Gst } from "./gst-init.js";
2
+ const SUPPORTED_TYPES = /* @__PURE__ */ new Set([
3
+ "audio/mpeg",
4
+ "audio/mp3",
5
+ "audio/wav",
6
+ "audio/x-wav",
7
+ "audio/ogg",
8
+ "audio/webm",
9
+ "audio/flac",
10
+ "audio/x-flac",
11
+ "audio/aac",
12
+ "audio/mp4"
13
+ ]);
14
+ class HTMLAudioElement {
15
+ src = "";
16
+ volume = 1;
17
+ loop = false;
18
+ paused = true;
19
+ currentTime = 0;
20
+ duration = 0;
21
+ readyState = 0;
22
+ _pipeline = null;
23
+ canPlayType(type) {
24
+ const mime = type.split(";")[0].trim().toLowerCase();
25
+ return SUPPORTED_TYPES.has(mime) ? "maybe" : "";
26
+ }
27
+ play() {
28
+ if (!this.src) return Promise.resolve();
29
+ ensureGstInit();
30
+ this._cleanup();
31
+ this._pipeline = Gst.ElementFactory.make("playbin", "player");
32
+ if (!this._pipeline) return Promise.resolve();
33
+ this._pipeline.set_property("uri", this.src);
34
+ this._pipeline.set_property("volume", this.volume);
35
+ this._pipeline.set_state(Gst.State.PLAYING);
36
+ this.paused = false;
37
+ return Promise.resolve();
38
+ }
39
+ pause() {
40
+ if (this._pipeline) {
41
+ this._pipeline.set_state(Gst.State.PAUSED);
42
+ this.paused = true;
43
+ }
44
+ }
45
+ load() {
46
+ this._cleanup();
47
+ }
48
+ addEventListener(_type, _listener) {
49
+ }
50
+ removeEventListener(_type, _listener) {
51
+ }
52
+ _cleanup() {
53
+ if (this._pipeline) {
54
+ this._pipeline.set_state(Gst.State.NULL);
55
+ this._pipeline = null;
56
+ }
57
+ }
58
+ }
59
+ export {
60
+ HTMLAudioElement
61
+ };
@@ -0,0 +1,18 @@
1
+ import { AudioContext } from "./audio-context.js";
2
+ import { AudioBuffer } from "./audio-buffer.js";
3
+ import { AudioNode } from "./audio-node.js";
4
+ import { AudioDestinationNode } from "./audio-destination-node.js";
5
+ import { AudioBufferSourceNode } from "./audio-buffer-source-node.js";
6
+ import { GainNode } from "./gain-node.js";
7
+ import { AudioParam } from "./audio-param.js";
8
+ import { HTMLAudioElement } from "./html-audio-element.js";
9
+ export {
10
+ AudioBuffer,
11
+ AudioBufferSourceNode,
12
+ AudioContext,
13
+ AudioDestinationNode,
14
+ AudioNode,
15
+ AudioParam,
16
+ GainNode,
17
+ HTMLAudioElement
18
+ };
@@ -0,0 +1,13 @@
1
+ import { AudioContext, HTMLAudioElement } from "./index.js";
2
+ if (typeof globalThis.AudioContext === "undefined") {
3
+ globalThis.AudioContext = AudioContext;
4
+ }
5
+ if (typeof globalThis.webkitAudioContext === "undefined") {
6
+ globalThis.webkitAudioContext = AudioContext;
7
+ }
8
+ if (typeof globalThis.Audio === "undefined") {
9
+ globalThis.Audio = HTMLAudioElement;
10
+ }
11
+ if (typeof globalThis.HTMLAudioElement === "undefined") {
12
+ globalThis.HTMLAudioElement = HTMLAudioElement;
13
+ }
@@ -0,0 +1,18 @@
1
+ import { AudioNode } from './audio-node.js';
2
+ import { AudioParam } from './audio-param.js';
3
+ import type { AudioBuffer } from './audio-buffer.js';
4
+ export declare class AudioBufferSourceNode extends AudioNode {
5
+ buffer: AudioBuffer | null;
6
+ loop: boolean;
7
+ loopStart: number;
8
+ loopEnd: number;
9
+ readonly playbackRate: AudioParam;
10
+ onended: (() => void) | null;
11
+ private _player;
12
+ private _started;
13
+ constructor();
14
+ start(when?: number, offset?: number, duration?: number): void;
15
+ stop(_when?: number): void;
16
+ /** Walk the output chain to find a GainNode */
17
+ private _findGainNode;
18
+ }
@@ -0,0 +1,17 @@
1
+ export interface AudioBufferOptions {
2
+ numberOfChannels: number;
3
+ length: number;
4
+ sampleRate: number;
5
+ }
6
+ export declare class AudioBuffer {
7
+ readonly sampleRate: number;
8
+ readonly length: number;
9
+ readonly duration: number;
10
+ readonly numberOfChannels: number;
11
+ /** @internal */
12
+ _channelData: Float32Array[];
13
+ constructor(options: AudioBufferOptions);
14
+ getChannelData(channel: number): Float32Array;
15
+ copyFromChannel(destination: Float32Array, channelNumber: number, bufferOffset?: number): void;
16
+ copyToChannel(source: Float32Array, channelNumber: number, bufferOffset?: number): void;
17
+ }