node-web-audio-api 0.19.0 → 0.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/TODOS.md +31 -37
- package/index.mjs +7 -1
- package/js/AnalyserNode.js +0 -3
- package/js/AudioBuffer.js +10 -11
- package/js/AudioBufferSourceNode.js +0 -6
- package/js/AudioContext.js +28 -5
- package/js/AudioDestinationNode.js +1 -1
- package/js/AudioListener.js +2 -2
- package/js/AudioScheduledSourceNode.js +15 -0
- package/js/BaseAudioContext.js +15 -0
- package/js/BiquadFilterNode.js +0 -3
- package/js/ChannelMergerNode.js +0 -3
- package/js/ChannelSplitterNode.js +0 -3
- package/js/ConstantSourceNode.js +0 -6
- package/js/ConvolverNode.js +0 -3
- package/js/DelayNode.js +0 -3
- package/js/DynamicsCompressorNode.js +0 -3
- package/js/Events.js +84 -0
- package/js/GainNode.js +0 -3
- package/js/IIRFilterNode.js +0 -3
- package/js/MediaStreamAudioSourceNode.js +0 -3
- package/js/OfflineAudioContext.js +51 -36
- package/js/OscillatorNode.js +0 -6
- package/js/PannerNode.js +0 -3
- package/js/ScriptProcessorNode.js +179 -0
- package/js/StereoPannerNode.js +0 -3
- package/js/WaveShaperNode.js +0 -3
- package/js/lib/events.js +6 -16
- package/js/lib/symbols.js +17 -2
- package/js/monkey-patch.js +8 -1
- package/node-web-audio-api.darwin-arm64.node +0 -0
- package/node-web-audio-api.darwin-x64.node +0 -0
- package/node-web-audio-api.linux-arm-gnueabihf.node +0 -0
- package/node-web-audio-api.linux-arm64-gnu.node +0 -0
- package/node-web-audio-api.linux-x64-gnu.node +0 -0
- package/node-web-audio-api.win32-arm64-msvc.node +0 -0
- package/node-web-audio-api.win32-x64-msvc.node +0 -0
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
## v0.20.0 (29/04/2024)
|
|
2
|
+
|
|
3
|
+
- Update upstream crate to [v0.44.0](https://github.com/orottier/web-audio-api-rs/blob/main/CHANGELOG.md#version-0440-2024-04-22)
|
|
4
|
+
- Implement ScriptProcessorNode
|
|
5
|
+
- Fix memory leak introduced in v0.19.0
|
|
6
|
+
- Improve events compliance
|
|
7
|
+
|
|
1
8
|
## v0.19.0 (17/04/2024)
|
|
2
9
|
|
|
3
10
|
- Update upstream crate to [1.0.0-rc.5](https://github.com/orottier/web-audio-api-rs/blob/main/CHANGELOG.md#version-0430--100-rc5-2024-04-15)
|
package/TODOS.md
CHANGED
|
@@ -5,8 +5,17 @@
|
|
|
5
5
|
- [x] decode audio data in dedicated thread
|
|
6
6
|
- [x] Use node DOMExeption <https://nodejs.org/api/globals.html#domexception>
|
|
7
7
|
- [x] connect / disconnect
|
|
8
|
-
- [
|
|
9
|
-
- [
|
|
8
|
+
- [x] _napi_ review tsfn store implementation _or remove_
|
|
9
|
+
- [x] implement ScriptProcesorNode _on going_
|
|
10
|
+
- [x] _napi_ clean internal audio_buffer __internal_caller__, use JsNull instead
|
|
11
|
+
- [x] Refactor Events:
|
|
12
|
+
+ [x] extend Event for all specific event types
|
|
13
|
+
+ [x] Register AudioScheduledSourceNode listener only on start
|
|
14
|
+
|
|
15
|
+
- [ ] reuse event objects across calls
|
|
16
|
+
|
|
17
|
+
-> proxy EventListener
|
|
18
|
+
|
|
10
19
|
- [ ] wpt: mock for `URL.createObjectURL`
|
|
11
20
|
- [ ] wpt: mock for `requestAnimationFrame` cf. https://github.com/nodejs/help/issues/2483
|
|
12
21
|
- [ ] make sure we don't try to use `in` operator on null values
|
|
@@ -14,16 +23,29 @@
|
|
|
14
23
|
- [ ] _rust_ AudioParam failing tests
|
|
15
24
|
- [ ] _rust_ AudioBufferSourceNode failing tests
|
|
16
25
|
- [ ] _rust_ IIRFilter node
|
|
17
|
-
|
|
26
|
+
|
|
18
27
|
- [ ] refactor - Add context string in `throwSanitizedError` and to `toSanitizedSequence`
|
|
19
28
|
- [ ] Rust - review Symbol.toStringTag https://github.com/nodejs/node/issues/41358
|
|
29
|
+
cf. https://github.com/nodejs/node/issues/41358#issuecomment-1003595890
|
|
20
30
|
```
|
|
21
|
-
//
|
|
22
|
-
let
|
|
23
|
-
|
|
31
|
+
// fails...
|
|
32
|
+
let symbols = ctx
|
|
33
|
+
.env
|
|
34
|
+
.get_global()?
|
|
35
|
+
.get_named_property::<JsUnknown>("Symbol")?
|
|
36
|
+
.coerce_to_object()?;
|
|
37
|
+
let to_string_tag = symbols.get_named_property("toStringTag")?;
|
|
38
|
+
|
|
39
|
+
js_this.set_property(to_string_tag, &ctx.env.create_string("AudioContext")?);
|
|
40
|
+
|
|
41
|
+
// ----
|
|
42
|
+
151 | js_this.set_property(to_string_tag, &ctx.env.create_string("AudioContext")?);
|
|
43
|
+
| ------------ ^^^^^^^^^^^^^ the trait `napi::NapiRaw` is not implemented for `()`
|
|
24
44
|
```
|
|
45
|
+
|
|
25
46
|
- [ ] wpt bot
|
|
26
47
|
- [ ] wpt - handle loading of 4-channels sound file
|
|
48
|
+
|
|
27
49
|
- [ ] _rust_ decodeAudioData should throw EncodingError
|
|
28
50
|
- review JS side when done
|
|
29
51
|
|
|
@@ -114,36 +136,8 @@ Startup { graph } => {
|
|
|
114
136
|
|
|
115
137
|
------------------------------------------------------------------------
|
|
116
138
|
|
|
117
|
-
|
|
118
|
-
#### main
|
|
119
|
-
```
|
|
120
|
-
RESULTS:
|
|
121
|
-
- # pass: 6848
|
|
122
|
-
- # fail: 706
|
|
123
|
-
- # type error issues: 5
|
|
124
|
-
> wpt duration: 2:22.697 (m:ss.mmm)
|
|
125
|
-
```
|
|
126
|
-
|
|
127
|
-
#### feat/ended-events
|
|
128
|
-
```
|
|
129
|
-
RESULTS:
|
|
130
|
-
- # pass: 6854
|
|
131
|
-
- # fail: 704
|
|
132
|
-
- # type error issues: 5
|
|
133
|
-
> wpt duration: 2:08.718 (m:ss.mmm)
|
|
134
|
-
```
|
|
135
|
-
|
|
136
|
-
#### refactor/napi-wrappers
|
|
137
|
-
|
|
138
|
-
w/ https://github.com/orottier/web-audio-api-rs/pull/492
|
|
139
|
-
|
|
140
|
-
```
|
|
141
|
-
RESULTS:
|
|
142
|
-
- # pass: 6897
|
|
143
|
-
- # fail: 692
|
|
144
|
-
- # type error issues: 5
|
|
145
|
-
> wpt duration: 1:59.505 (m:ss.mmm)
|
|
146
|
-
```
|
|
147
|
-
|
|
148
139
|
ls wpt/webaudio/the-audio-api/the-audiocontext-interface | xargs -I {} ./run-wpt.sh {}
|
|
149
140
|
ls wpt/webaudio/the-audio-api/the-dynamicscompressornode-interface | xargs -I {} ./run-wpt.sh {}
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
971
|
package/index.mjs
CHANGED
|
@@ -27,6 +27,11 @@ const require = createRequire(import.meta.url);
|
|
|
27
27
|
|
|
28
28
|
const nativeModule = require('./index.cjs');
|
|
29
29
|
export const {
|
|
30
|
+
// events
|
|
31
|
+
OfflineAudioCompletionEvent,
|
|
32
|
+
AudioProcessingEvent,
|
|
33
|
+
|
|
34
|
+
// manually written nodes
|
|
30
35
|
BaseAudioContext,
|
|
31
36
|
AudioContext,
|
|
32
37
|
OfflineAudioContext,
|
|
@@ -39,7 +44,8 @@ export const {
|
|
|
39
44
|
|
|
40
45
|
PeriodicWave,
|
|
41
46
|
AudioBuffer,
|
|
42
|
-
// generated
|
|
47
|
+
// generated nodes
|
|
48
|
+
ScriptProcessorNode,
|
|
43
49
|
AnalyserNode,
|
|
44
50
|
AudioBufferSourceNode,
|
|
45
51
|
BiquadFilterNode,
|
package/js/AnalyserNode.js
CHANGED
package/js/AudioBuffer.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const conversions = require(
|
|
1
|
+
const conversions = require('webidl-conversions');
|
|
2
2
|
|
|
3
3
|
const {
|
|
4
4
|
throwSanitizedError,
|
|
@@ -9,7 +9,6 @@ const {
|
|
|
9
9
|
} = require('./lib/utils.js');
|
|
10
10
|
const {
|
|
11
11
|
kNapiObj,
|
|
12
|
-
kAudioBuffer,
|
|
13
12
|
} = require('./lib/symbols.js');
|
|
14
13
|
|
|
15
14
|
|
|
@@ -21,7 +20,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
21
20
|
}
|
|
22
21
|
|
|
23
22
|
if (typeof options !== 'object') {
|
|
24
|
-
throw new TypeError(
|
|
23
|
+
throw new TypeError(`Failed to construct 'AudioBuffer': argument 1 is not of type 'AudioBufferOptions'`);
|
|
25
24
|
}
|
|
26
25
|
|
|
27
26
|
if (kNapiObj in options) {
|
|
@@ -82,7 +81,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
82
81
|
|
|
83
82
|
get sampleRate() {
|
|
84
83
|
if (!(this instanceof AudioBuffer)) {
|
|
85
|
-
throw new TypeError(
|
|
84
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
86
85
|
}
|
|
87
86
|
|
|
88
87
|
return this[kNapiObj].sampleRate;
|
|
@@ -90,7 +89,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
90
89
|
|
|
91
90
|
get duration() {
|
|
92
91
|
if (!(this instanceof AudioBuffer)) {
|
|
93
|
-
throw new TypeError(
|
|
92
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
94
93
|
}
|
|
95
94
|
|
|
96
95
|
return this[kNapiObj].duration;
|
|
@@ -98,7 +97,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
98
97
|
|
|
99
98
|
get length() {
|
|
100
99
|
if (!(this instanceof AudioBuffer)) {
|
|
101
|
-
throw new TypeError(
|
|
100
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
102
101
|
}
|
|
103
102
|
|
|
104
103
|
return this[kNapiObj].length;
|
|
@@ -106,7 +105,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
106
105
|
|
|
107
106
|
get numberOfChannels() {
|
|
108
107
|
if (!(this instanceof AudioBuffer)) {
|
|
109
|
-
throw new TypeError(
|
|
108
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
110
109
|
}
|
|
111
110
|
|
|
112
111
|
return this[kNapiObj].numberOfChannels;
|
|
@@ -114,7 +113,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
114
113
|
|
|
115
114
|
copyFromChannel(destination, channelNumber, bufferOffset = 0) {
|
|
116
115
|
if (!(this instanceof AudioBuffer)) {
|
|
117
|
-
throw new TypeError(
|
|
116
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
118
117
|
}
|
|
119
118
|
|
|
120
119
|
if (arguments.length < 2) {
|
|
@@ -149,7 +148,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
149
148
|
|
|
150
149
|
copyToChannel(source, channelNumber, bufferOffset = 0) {
|
|
151
150
|
if (!(this instanceof AudioBuffer)) {
|
|
152
|
-
throw new TypeError(
|
|
151
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
153
152
|
}
|
|
154
153
|
|
|
155
154
|
if (arguments.length < 2) {
|
|
@@ -184,7 +183,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
184
183
|
|
|
185
184
|
getChannelData(channel) {
|
|
186
185
|
if (!(this instanceof AudioBuffer)) {
|
|
187
|
-
throw new TypeError(
|
|
186
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioBuffer'`);
|
|
188
187
|
}
|
|
189
188
|
|
|
190
189
|
if (arguments.length < 1) {
|
|
@@ -220,7 +219,7 @@ module.exports = (_jsExport, nativeBinding) => {
|
|
|
220
219
|
},
|
|
221
220
|
});
|
|
222
221
|
|
|
223
|
-
Object.defineProperties(AudioBuffer.prototype,
|
|
222
|
+
Object.defineProperties(AudioBuffer.prototype, {
|
|
224
223
|
[Symbol.toStringTag]: {
|
|
225
224
|
__proto__: null,
|
|
226
225
|
writable: false,
|
|
@@ -33,9 +33,6 @@ const {
|
|
|
33
33
|
kNapiObj,
|
|
34
34
|
kAudioBuffer,
|
|
35
35
|
} = require('./lib/symbols.js');
|
|
36
|
-
const {
|
|
37
|
-
bridgeEventTarget,
|
|
38
|
-
} = require('./lib/events.js');
|
|
39
36
|
/* eslint-enable no-unused-vars */
|
|
40
37
|
|
|
41
38
|
const AudioScheduledSourceNode = require('./AudioScheduledSourceNode.js');
|
|
@@ -142,9 +139,6 @@ module.exports = (jsExport, nativeBinding) => {
|
|
|
142
139
|
this[kAudioBuffer] = options.buffer;
|
|
143
140
|
}
|
|
144
141
|
|
|
145
|
-
// Bridge Rust native event to Node EventTarget
|
|
146
|
-
bridgeEventTarget(this);
|
|
147
|
-
|
|
148
142
|
this.#playbackRate = new jsExport.AudioParam({
|
|
149
143
|
[kNapiObj]: this[kNapiObj].playbackRate,
|
|
150
144
|
});
|
package/js/AudioContext.js
CHANGED
|
@@ -1,17 +1,19 @@
|
|
|
1
|
-
const conversions = require(
|
|
1
|
+
const conversions = require('webidl-conversions');
|
|
2
2
|
|
|
3
3
|
const {
|
|
4
4
|
throwSanitizedError,
|
|
5
5
|
} = require('./lib/errors.js');
|
|
6
6
|
const {
|
|
7
7
|
isFunction,
|
|
8
|
-
kEnumerableProperty
|
|
8
|
+
kEnumerableProperty,
|
|
9
9
|
} = require('./lib/utils.js');
|
|
10
10
|
const {
|
|
11
11
|
kNapiObj,
|
|
12
|
+
kOnStateChange,
|
|
13
|
+
kOnSinkChange,
|
|
12
14
|
} = require('./lib/symbols.js');
|
|
13
15
|
const {
|
|
14
|
-
|
|
16
|
+
propagateEvent,
|
|
15
17
|
} = require('./lib/events.js');
|
|
16
18
|
|
|
17
19
|
let contextId = 0;
|
|
@@ -81,8 +83,29 @@ module.exports = function(jsExport, nativeBinding) {
|
|
|
81
83
|
this.#sinkId = options.sinkId;
|
|
82
84
|
}
|
|
83
85
|
|
|
84
|
-
//
|
|
85
|
-
|
|
86
|
+
// Add function to Napi object to bridge from Rust events to JS EventTarget
|
|
87
|
+
this[kNapiObj][kOnStateChange] = (err, rawEvent) => {
|
|
88
|
+
if (typeof rawEvent !== 'object' && !('type' in rawEvent)) {
|
|
89
|
+
throw new TypeError('Invalid [kOnStateChange] Invocation: rawEvent should have a type property');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const event = new Event(rawEvent.type);
|
|
93
|
+
propagateEvent(this, event);
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
this[kNapiObj][kOnSinkChange] = (err, rawEvent) => {
|
|
97
|
+
if (typeof rawEvent !== 'object' && !('type' in rawEvent)) {
|
|
98
|
+
throw new TypeError('Invalid [kOnSinkChange] Invocation: rawEvent should have a type property');
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const event = new Event(rawEvent.type);
|
|
102
|
+
propagateEvent(this, event);
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
// Workaround to bind the `sinkchange` and `statechange` events to EventTarget.
|
|
106
|
+
// This must be called from JS facade ctor as the JS handler are added to the Napi
|
|
107
|
+
// object after its instantiation, and that we don't have any initial `resume` call.
|
|
108
|
+
this[kNapiObj].listen_to_events();
|
|
86
109
|
|
|
87
110
|
// @todo - check if this is still required
|
|
88
111
|
// prevent garbage collection and process exit
|
|
@@ -20,7 +20,7 @@ class AudioDestinationNode extends AudioNode {
|
|
|
20
20
|
|
|
21
21
|
get maxChannelCount() {
|
|
22
22
|
if (!(this instanceof AudioDestinationNode)) {
|
|
23
|
-
throw new TypeError(
|
|
23
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'AudioDestinationNode'`);
|
|
24
24
|
}
|
|
25
25
|
|
|
26
26
|
return this[kNapiObj].maxChannelCount;
|
package/js/AudioListener.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const conversions = require(
|
|
1
|
+
const conversions = require('webidl-conversions');
|
|
2
2
|
|
|
3
3
|
const { throwSanitizedError } = require('./lib/errors.js');
|
|
4
4
|
const { kEnumerableProperty, kHiddenProperty } = require('./lib/utils.js');
|
|
@@ -160,7 +160,7 @@ class AudioListener {
|
|
|
160
160
|
context: `Failed to execute 'setOrientation' on 'AudioListener': The provided float value`,
|
|
161
161
|
});
|
|
162
162
|
|
|
163
|
-
|
|
163
|
+
z = conversions['float'](z, {
|
|
164
164
|
context: `Failed to execute 'setOrientation' on 'AudioListener': The provided float value`,
|
|
165
165
|
});
|
|
166
166
|
|
|
@@ -3,12 +3,16 @@ const conversions = require('webidl-conversions');
|
|
|
3
3
|
const {
|
|
4
4
|
throwSanitizedError,
|
|
5
5
|
} = require('./lib/errors.js');
|
|
6
|
+
const {
|
|
7
|
+
propagateEvent,
|
|
8
|
+
} = require('./lib/events.js');
|
|
6
9
|
const {
|
|
7
10
|
isFunction,
|
|
8
11
|
kEnumerableProperty,
|
|
9
12
|
} = require('./lib/utils.js');
|
|
10
13
|
const {
|
|
11
14
|
kNapiObj,
|
|
15
|
+
kOnEnded,
|
|
12
16
|
} = require('./lib/symbols.js');
|
|
13
17
|
|
|
14
18
|
const AudioNode = require('./AudioNode.js');
|
|
@@ -26,6 +30,17 @@ class AudioScheduledSourceNode extends AudioNode {
|
|
|
26
30
|
}
|
|
27
31
|
|
|
28
32
|
super(context, options);
|
|
33
|
+
|
|
34
|
+
// Add function to Napi object to bridge from Rust events to JS EventTarget
|
|
35
|
+
// It will be effectively registered on rust side when `start` is called
|
|
36
|
+
this[kNapiObj][kOnEnded] = (err, rawEvent) => {
|
|
37
|
+
if (typeof rawEvent !== 'object' && !('type' in rawEvent)) {
|
|
38
|
+
throw new TypeError('Invalid [kOnEnded] Invocation: rawEvent should have a type property');
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const event = new Event(rawEvent.type);
|
|
42
|
+
propagateEvent(this, event);
|
|
43
|
+
};
|
|
29
44
|
}
|
|
30
45
|
|
|
31
46
|
get onended() {
|
package/js/BaseAudioContext.js
CHANGED
|
@@ -210,6 +210,20 @@ module.exports = (jsExport, _nativeBinding) => {
|
|
|
210
210
|
// --------------------------------------------------------------------
|
|
211
211
|
// Factory Methods (use the patched AudioNodes)
|
|
212
212
|
// --------------------------------------------------------------------
|
|
213
|
+
createScriptProcessor(bufferSize = 0, numberOfInputChannels = 2, numberOfOutputChannels = 2) {
|
|
214
|
+
if (!(this instanceof BaseAudioContext)) {
|
|
215
|
+
throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'BaseAudioContext\'');
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
const options = {
|
|
219
|
+
bufferSize,
|
|
220
|
+
numberOfInputChannels,
|
|
221
|
+
numberOfOutputChannels,
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
return new jsExport.ScriptProcessorNode(this, options);
|
|
225
|
+
}
|
|
226
|
+
|
|
213
227
|
createAnalyser() {
|
|
214
228
|
if (!(this instanceof BaseAudioContext)) {
|
|
215
229
|
throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'BaseAudioContext\'');
|
|
@@ -389,6 +403,7 @@ module.exports = (jsExport, _nativeBinding) => {
|
|
|
389
403
|
configurable: true,
|
|
390
404
|
value: 'BaseAudioContext',
|
|
391
405
|
},
|
|
406
|
+
createScriptProcessor: kEnumerableProperty,
|
|
392
407
|
createAnalyser: kEnumerableProperty,
|
|
393
408
|
createBufferSource: kEnumerableProperty,
|
|
394
409
|
createBiquadFilter: kEnumerableProperty,
|
package/js/BiquadFilterNode.js
CHANGED
package/js/ChannelMergerNode.js
CHANGED
package/js/ConstantSourceNode.js
CHANGED
|
@@ -33,9 +33,6 @@ const {
|
|
|
33
33
|
kNapiObj,
|
|
34
34
|
kAudioBuffer,
|
|
35
35
|
} = require('./lib/symbols.js');
|
|
36
|
-
const {
|
|
37
|
-
bridgeEventTarget,
|
|
38
|
-
} = require('./lib/events.js');
|
|
39
36
|
/* eslint-enable no-unused-vars */
|
|
40
37
|
|
|
41
38
|
const AudioScheduledSourceNode = require('./AudioScheduledSourceNode.js');
|
|
@@ -82,9 +79,6 @@ module.exports = (jsExport, nativeBinding) => {
|
|
|
82
79
|
[kNapiObj]: napiObj,
|
|
83
80
|
});
|
|
84
81
|
|
|
85
|
-
// Bridge Rust native event to Node EventTarget
|
|
86
|
-
bridgeEventTarget(this);
|
|
87
|
-
|
|
88
82
|
this.#offset = new jsExport.AudioParam({
|
|
89
83
|
[kNapiObj]: this[kNapiObj].offset,
|
|
90
84
|
});
|
package/js/ConvolverNode.js
CHANGED
package/js/DelayNode.js
CHANGED
package/js/Events.js
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
const { kEnumerableProperty } = require('./lib/utils.js');
|
|
2
|
+
|
|
3
|
+
class OfflineAudioCompletionEvent extends Event {
|
|
4
|
+
#renderedBuffer = null;
|
|
5
|
+
|
|
6
|
+
constructor(type, eventInitDict) {
|
|
7
|
+
super(type);
|
|
8
|
+
|
|
9
|
+
if (typeof eventInitDict !== 'object' || eventInitDict === null || !('renderedBuffer' in eventInitDict)) {
|
|
10
|
+
throw TypeError(`Failed to construct 'OfflineAudioCompletionEvent': Failed to read the 'renderedBuffer' property from 'OfflineAudioCompletionEvent': Required member is undefined.`);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
this.#renderedBuffer = eventInitDict.renderedBuffer;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
get renderedBuffer() {
|
|
17
|
+
return this.#renderedBuffer;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
Object.defineProperties(OfflineAudioCompletionEvent.prototype, {
|
|
22
|
+
[Symbol.toStringTag]: {
|
|
23
|
+
__proto__: null,
|
|
24
|
+
writable: false,
|
|
25
|
+
enumerable: false,
|
|
26
|
+
configurable: true,
|
|
27
|
+
value: 'OfflineAudioCompletionEvent',
|
|
28
|
+
},
|
|
29
|
+
|
|
30
|
+
renderedBuffer: kEnumerableProperty,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
class AudioProcessingEvent extends Event {
|
|
34
|
+
#playbackTime = null;
|
|
35
|
+
#inputBuffer = null;
|
|
36
|
+
#outputBuffer = null;
|
|
37
|
+
|
|
38
|
+
constructor(type, eventInitDict) {
|
|
39
|
+
if (
|
|
40
|
+
typeof eventInitDict !== 'object'
|
|
41
|
+
|| eventInitDict === null
|
|
42
|
+
|| !('playbackTime' in eventInitDict)
|
|
43
|
+
|| !('inputBuffer' in eventInitDict)
|
|
44
|
+
|| !('outputBuffer' in eventInitDict)
|
|
45
|
+
) {
|
|
46
|
+
throw TypeError(`Failed to construct 'AudioProcessingEvent': Invalid 'AudioProcessingEventInit' given`);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
super(type);
|
|
50
|
+
|
|
51
|
+
this.#playbackTime = eventInitDict.playbackTime;
|
|
52
|
+
this.#inputBuffer = eventInitDict.inputBuffer;
|
|
53
|
+
this.#outputBuffer = eventInitDict.outputBuffer;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
get playbackTime() {
|
|
57
|
+
return this.#playbackTime;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
get inputBuffer() {
|
|
61
|
+
return this.#inputBuffer;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
get outputBuffer() {
|
|
65
|
+
return this.#outputBuffer;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
Object.defineProperties(AudioProcessingEvent.prototype, {
|
|
70
|
+
[Symbol.toStringTag]: {
|
|
71
|
+
__proto__: null,
|
|
72
|
+
writable: false,
|
|
73
|
+
enumerable: false,
|
|
74
|
+
configurable: true,
|
|
75
|
+
value: 'AudioProcessingEvent',
|
|
76
|
+
},
|
|
77
|
+
|
|
78
|
+
playbackTime: kEnumerableProperty,
|
|
79
|
+
inputBuffer: kEnumerableProperty,
|
|
80
|
+
outputBuffer: kEnumerableProperty,
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
module.exports.OfflineAudioCompletionEvent = OfflineAudioCompletionEvent;
|
|
84
|
+
module.exports.AudioProcessingEvent = AudioProcessingEvent;
|
package/js/GainNode.js
CHANGED
package/js/IIRFilterNode.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
const conversions = require('webidl-conversions');
|
|
2
2
|
|
|
3
3
|
const {
|
|
4
|
-
|
|
4
|
+
propagateEvent,
|
|
5
5
|
} = require('./lib/events.js');
|
|
6
6
|
const {
|
|
7
7
|
throwSanitizedError,
|
|
@@ -11,19 +11,15 @@ const {
|
|
|
11
11
|
kEnumerableProperty,
|
|
12
12
|
} = require('./lib/utils.js');
|
|
13
13
|
const {
|
|
14
|
-
kNapiObj
|
|
14
|
+
kNapiObj,
|
|
15
|
+
kOnStateChange,
|
|
16
|
+
kOnComplete,
|
|
15
17
|
} = require('./lib/symbols.js');
|
|
16
18
|
|
|
17
|
-
// constructor(OfflineAudioContextOptions contextOptions);
|
|
18
|
-
// constructor(unsigned long numberOfChannels, unsigned long length, float sampleRate);
|
|
19
|
-
// Promise<AudioBuffer> startRendering();
|
|
20
|
-
// Promise<undefined> resume();
|
|
21
|
-
// Promise<undefined> suspend(double suspendTime);
|
|
22
|
-
// readonly attribute unsigned long length;
|
|
23
|
-
// attribute EventHandler oncomplete;
|
|
24
|
-
|
|
25
19
|
module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
26
20
|
class OfflineAudioContext extends jsExport.BaseAudioContext {
|
|
21
|
+
#renderedBuffer = null;
|
|
22
|
+
|
|
27
23
|
constructor(...args) {
|
|
28
24
|
if (arguments.length < 1) {
|
|
29
25
|
throw new TypeError(`Failed to construct 'OfflineAudioContext': 1 argument required, but only ${arguments.length} present`);
|
|
@@ -52,7 +48,7 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
52
48
|
args = [
|
|
53
49
|
options.numberOfChannels,
|
|
54
50
|
options.length,
|
|
55
|
-
options.sampleRate
|
|
51
|
+
options.sampleRate,
|
|
56
52
|
];
|
|
57
53
|
}
|
|
58
54
|
|
|
@@ -60,16 +56,16 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
60
56
|
|
|
61
57
|
numberOfChannels = conversions['unsigned long'](numberOfChannels, {
|
|
62
58
|
enforceRange: true,
|
|
63
|
-
context: `Failed to construct 'OfflineAudioContext': Failed to read the 'numberOfChannels' property from OfflineContextOptions; The provided value (${numberOfChannels})
|
|
59
|
+
context: `Failed to construct 'OfflineAudioContext': Failed to read the 'numberOfChannels' property from OfflineContextOptions; The provided value (${numberOfChannels})`,
|
|
64
60
|
});
|
|
65
61
|
|
|
66
62
|
length = conversions['unsigned long'](length, {
|
|
67
63
|
enforceRange: true,
|
|
68
|
-
context: `Failed to construct 'OfflineAudioContext': Failed to read the 'length' property from OfflineContextOptions; The provided value (${length})
|
|
64
|
+
context: `Failed to construct 'OfflineAudioContext': Failed to read the 'length' property from OfflineContextOptions; The provided value (${length})`,
|
|
69
65
|
});
|
|
70
66
|
|
|
71
67
|
sampleRate = conversions['float'](sampleRate, {
|
|
72
|
-
context: `Failed to construct 'OfflineAudioContext': Failed to read the 'sampleRate' property from OfflineContextOptions; The provided value (${sampleRate})
|
|
68
|
+
context: `Failed to construct 'OfflineAudioContext': Failed to read the 'sampleRate' property from OfflineContextOptions; The provided value (${sampleRate})`,
|
|
73
69
|
});
|
|
74
70
|
|
|
75
71
|
let napiObj;
|
|
@@ -81,11 +77,41 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
81
77
|
}
|
|
82
78
|
|
|
83
79
|
super({ [kNapiObj]: napiObj });
|
|
80
|
+
|
|
81
|
+
// Add function to Napi object to bridge from Rust events to JS EventTarget
|
|
82
|
+
// They will be effectively registered on rust side when `startRendering` is called
|
|
83
|
+
this[kNapiObj][kOnStateChange] = (err, rawEvent) => {
|
|
84
|
+
if (typeof rawEvent !== 'object' && !('type' in rawEvent)) {
|
|
85
|
+
throw new TypeError('Invalid [kOnStateChange] Invocation: rawEvent should have a type property');
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const event = new Event(rawEvent.type);
|
|
89
|
+
propagateEvent(this, event);
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
// This event is, per spec, the last trigerred one
|
|
93
|
+
this[kNapiObj][kOnComplete] = (err, rawEvent) => {
|
|
94
|
+
if (typeof rawEvent !== 'object' && !('type' in rawEvent)) {
|
|
95
|
+
throw new TypeError('Invalid [kOnComplete] Invocation: rawEvent should have a type property');
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// @fixme: workaround the fact that this event seems to be triggered before
|
|
99
|
+
// startRendering fulfills and that we want to return the exact same instance
|
|
100
|
+
if (this.#renderedBuffer === null) {
|
|
101
|
+
this.#renderedBuffer = new jsExport.AudioBuffer({ [kNapiObj]: rawEvent.renderedBuffer });
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const event = new jsExport.OfflineAudioCompletionEvent(rawEvent.type, {
|
|
105
|
+
renderedBuffer: this.#renderedBuffer,
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
propagateEvent(this, event);
|
|
109
|
+
};
|
|
84
110
|
}
|
|
85
111
|
|
|
86
112
|
get length() {
|
|
87
113
|
if (!(this instanceof OfflineAudioContext)) {
|
|
88
|
-
throw new TypeError(
|
|
114
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'OfflineAudioContext'`);
|
|
89
115
|
}
|
|
90
116
|
|
|
91
117
|
return this[kNapiObj].length;
|
|
@@ -93,7 +119,7 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
93
119
|
|
|
94
120
|
get oncomplete() {
|
|
95
121
|
if (!(this instanceof OfflineAudioContext)) {
|
|
96
|
-
throw new TypeError(
|
|
122
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'OfflineAudioContext'`);
|
|
97
123
|
}
|
|
98
124
|
|
|
99
125
|
return this._complete || null;
|
|
@@ -101,7 +127,7 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
101
127
|
|
|
102
128
|
set oncomplete(value) {
|
|
103
129
|
if (!(this instanceof OfflineAudioContext)) {
|
|
104
|
-
throw new TypeError(
|
|
130
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'OfflineAudioContext'`);
|
|
105
131
|
}
|
|
106
132
|
|
|
107
133
|
if (isFunction(value) || value === null) {
|
|
@@ -111,12 +137,9 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
111
137
|
|
|
112
138
|
async startRendering() {
|
|
113
139
|
if (!(this instanceof OfflineAudioContext)) {
|
|
114
|
-
throw new TypeError(
|
|
140
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'OfflineAudioContext'`);
|
|
115
141
|
}
|
|
116
142
|
|
|
117
|
-
// Lazily register event callback on rust side
|
|
118
|
-
bridgeEventTarget(this);
|
|
119
|
-
|
|
120
143
|
let nativeAudioBuffer;
|
|
121
144
|
|
|
122
145
|
try {
|
|
@@ -125,26 +148,18 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
125
148
|
throwSanitizedError(err);
|
|
126
149
|
}
|
|
127
150
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
// we don't need to deal with the `OfflineAudioCompletionEvent` type.
|
|
133
|
-
const event = new Event('complete');
|
|
134
|
-
event.renderedBuffer = audioBuffer;
|
|
135
|
-
|
|
136
|
-
if (isFunction(this[`oncomplete`])) {
|
|
137
|
-
this[`oncomplete`](event);
|
|
151
|
+
// @fixme: workaround the fact that this event seems to be triggered before
|
|
152
|
+
// startRendering fulfills and that we want to return the exact same instance
|
|
153
|
+
if (this.#renderedBuffer === null) {
|
|
154
|
+
this.#renderedBuffer = new jsExport.AudioBuffer({ [kNapiObj]: nativeAudioBuffer });
|
|
138
155
|
}
|
|
139
156
|
|
|
140
|
-
this
|
|
141
|
-
|
|
142
|
-
return audioBuffer;
|
|
157
|
+
return this.#renderedBuffer;
|
|
143
158
|
}
|
|
144
159
|
|
|
145
160
|
async resume() {
|
|
146
161
|
if (!(this instanceof OfflineAudioContext)) {
|
|
147
|
-
throw new TypeError(
|
|
162
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'OfflineAudioContext'`);
|
|
148
163
|
}
|
|
149
164
|
|
|
150
165
|
try {
|
|
@@ -156,7 +171,7 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
|
|
|
156
171
|
|
|
157
172
|
async suspend(suspendTime) {
|
|
158
173
|
if (!(this instanceof OfflineAudioContext)) {
|
|
159
|
-
throw new TypeError(
|
|
174
|
+
throw new TypeError(`Invalid Invocation: Value of 'this' must be of type 'OfflineAudioContext'`);
|
|
160
175
|
}
|
|
161
176
|
|
|
162
177
|
if (arguments.length < 1) {
|
package/js/OscillatorNode.js
CHANGED
|
@@ -33,9 +33,6 @@ const {
|
|
|
33
33
|
kNapiObj,
|
|
34
34
|
kAudioBuffer,
|
|
35
35
|
} = require('./lib/symbols.js');
|
|
36
|
-
const {
|
|
37
|
-
bridgeEventTarget,
|
|
38
|
-
} = require('./lib/events.js');
|
|
39
36
|
/* eslint-enable no-unused-vars */
|
|
40
37
|
|
|
41
38
|
const AudioScheduledSourceNode = require('./AudioScheduledSourceNode.js');
|
|
@@ -140,9 +137,6 @@ module.exports = (jsExport, nativeBinding) => {
|
|
|
140
137
|
[kNapiObj]: napiObj,
|
|
141
138
|
});
|
|
142
139
|
|
|
143
|
-
// Bridge Rust native event to Node EventTarget
|
|
144
|
-
bridgeEventTarget(this);
|
|
145
|
-
|
|
146
140
|
this.#frequency = new jsExport.AudioParam({
|
|
147
141
|
[kNapiObj]: this[kNapiObj].frequency,
|
|
148
142
|
});
|
package/js/PannerNode.js
CHANGED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
/* eslint-disable no-unused-vars */
|
|
2
|
+
const conversions = require('webidl-conversions');
|
|
3
|
+
const {
|
|
4
|
+
toSanitizedSequence,
|
|
5
|
+
} = require('./lib/cast.js');
|
|
6
|
+
const {
|
|
7
|
+
isFunction,
|
|
8
|
+
kEnumerableProperty,
|
|
9
|
+
} = require('./lib/utils.js');
|
|
10
|
+
const {
|
|
11
|
+
throwSanitizedError,
|
|
12
|
+
} = require('./lib/errors.js');
|
|
13
|
+
const {
|
|
14
|
+
kNapiObj,
|
|
15
|
+
kAudioBuffer,
|
|
16
|
+
kOnAudioProcess,
|
|
17
|
+
} = require('./lib/symbols.js');
|
|
18
|
+
const {
|
|
19
|
+
propagateEvent,
|
|
20
|
+
} = require('./lib/events.js');
|
|
21
|
+
/* eslint-enable no-unused-vars */
|
|
22
|
+
|
|
23
|
+
const AudioNode = require('./AudioNode.js');
|
|
24
|
+
|
|
25
|
+
module.exports = (jsExport, nativeBinding) => {
|
|
26
|
+
class ScriptProcessorNode extends AudioNode {
|
|
27
|
+
|
|
28
|
+
#onaudioprocess = null;
|
|
29
|
+
|
|
30
|
+
constructor(context, options) {
|
|
31
|
+
|
|
32
|
+
if (arguments.length < 1) {
|
|
33
|
+
throw new TypeError(`Failed to construct 'ScriptProcessorNode': 1 argument required, but only ${arguments.length} present`);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (!(context instanceof jsExport.BaseAudioContext)) {
|
|
37
|
+
throw new TypeError(`Failed to construct 'ScriptProcessorNode': argument 1 is not of type BaseAudioContext`);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// parsed version of the option to be passed to NAPI
|
|
41
|
+
const parsedOptions = {};
|
|
42
|
+
|
|
43
|
+
if (options && typeof options !== 'object') {
|
|
44
|
+
throw new TypeError('Failed to construct \'ScriptProcessorNode\': argument 2 is not of type \'ScriptProcessorNodeOptions\'');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// IDL defines bufferSize default value as 0
|
|
48
|
+
// cf. https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createscriptprocessor
|
|
49
|
+
// > If it’s not passed in, or if the value is 0, then the implementation
|
|
50
|
+
// > will choose the best buffer size for the given environment, which will
|
|
51
|
+
// > be constant power of 2 throughout the lifetime of the node.
|
|
52
|
+
if (options && options.bufferSize !== undefined && options.bufferSize !== 0) {
|
|
53
|
+
parsedOptions.bufferSize = conversions['unsigned long'](options.bufferSize, {
|
|
54
|
+
enforceRange: true,
|
|
55
|
+
context: `Failed to construct 'ScriptProcessorNode': Failed to read the 'bufferSize' property from ScriptProcessorNodeOptions: The provided value '${options.bufferSize}'`,
|
|
56
|
+
});
|
|
57
|
+
} else {
|
|
58
|
+
parsedOptions.bufferSize = 256;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (options && options.numberOfInputChannels !== undefined) {
|
|
62
|
+
parsedOptions.numberOfInputChannels = conversions['unsigned long'](options.numberOfInputChannels, {
|
|
63
|
+
enforceRange: true,
|
|
64
|
+
context: `Failed to construct 'ScriptProcessorNode': Failed to read the 'numberOfInputChannels' property from ScriptProcessorNodeOptions: The provided value '${options.numberOfInputChannels}'`,
|
|
65
|
+
});
|
|
66
|
+
} else {
|
|
67
|
+
parsedOptions.numberOfInputChannels = 2;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (options && options.numberOfOutputChannels !== undefined) {
|
|
71
|
+
parsedOptions.numberOfOutputChannels = conversions['unsigned long'](options.numberOfOutputChannels, {
|
|
72
|
+
enforceRange: true,
|
|
73
|
+
context: `Failed to construct 'ScriptProcessorNode': Failed to read the 'numberOfOutputChannels' property from ScriptProcessorNodeOptions: The provided value '${options.numberOfOutputChannels}'`,
|
|
74
|
+
});
|
|
75
|
+
} else {
|
|
76
|
+
parsedOptions.numberOfOutputChannels = 2;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (options && options.channelCount !== undefined) {
|
|
80
|
+
parsedOptions.channelCount = conversions['unsigned long'](options.channelCount, {
|
|
81
|
+
enforceRange: true,
|
|
82
|
+
context: `Failed to construct 'ScriptProcessorNode': Failed to read the 'channelCount' property from ScriptProcessorNodeOptions: The provided value '${options.channelCount}'`,
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (options && options.channelCountMode !== undefined) {
|
|
87
|
+
parsedOptions.channelCountMode = conversions['DOMString'](options.channelCountMode, {
|
|
88
|
+
context: `Failed to construct 'ScriptProcessorNode': Failed to read the 'channelCount' property from ScriptProcessorNodeOptions: The provided value '${options.channelCountMode}'`,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (options && options.channelInterpretation !== undefined) {
|
|
93
|
+
parsedOptions.channelInterpretation = conversions['DOMString'](options.channelInterpretation, {
|
|
94
|
+
context: `Failed to construct 'ScriptProcessorNode': Failed to read the 'channelInterpretation' property from ScriptProcessorNodeOptions: The provided value '${options.channelInterpretation}'`,
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
let napiObj;
|
|
99
|
+
|
|
100
|
+
try {
|
|
101
|
+
napiObj = new nativeBinding.ScriptProcessorNode(context[kNapiObj], parsedOptions);
|
|
102
|
+
} catch (err) {
|
|
103
|
+
throwSanitizedError(err);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
super(context, {
|
|
107
|
+
[kNapiObj]: napiObj,
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
this[kNapiObj][kOnAudioProcess] = (err, rawEvent) => {
|
|
111
|
+
if (typeof rawEvent !== 'object' && !('type' in rawEvent)) {
|
|
112
|
+
throw new TypeError('Invalid [kOnStateChange] Invocation: rawEvent should have a type property');
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const audioProcessingEventInit = {
|
|
116
|
+
playbackTime: rawEvent.playbackTime,
|
|
117
|
+
inputBuffer: new jsExport.AudioBuffer({ [kNapiObj]: rawEvent.inputBuffer }),
|
|
118
|
+
outputBuffer: new jsExport.AudioBuffer({ [kNapiObj]: rawEvent.outputBuffer }),
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
const event = new jsExport.AudioProcessingEvent('audioprocess', audioProcessingEventInit);
|
|
122
|
+
propagateEvent(this, event);
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
this[kNapiObj].listen_to_events();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
get bufferSize() {
|
|
129
|
+
if (!(this instanceof ScriptProcessorNode)) {
|
|
130
|
+
throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'ScriptProcessorNode\'');
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return this[kNapiObj].bufferSize;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
get onaudioprocess() {
|
|
137
|
+
if (!(this instanceof ScriptProcessorNode)) {
|
|
138
|
+
throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'ScriptProcessorNode\'');
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return this.#onaudioprocess;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
set onaudioprocess(value) {
|
|
145
|
+
if (!(this instanceof ScriptProcessorNode)) {
|
|
146
|
+
throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'ScriptProcessorNode\'');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
if (isFunction(value) || value === null) {
|
|
150
|
+
this.#onaudioprocess = value;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
Object.defineProperties(ScriptProcessorNode, {
|
|
156
|
+
length: {
|
|
157
|
+
__proto__: null,
|
|
158
|
+
writable: false,
|
|
159
|
+
enumerable: false,
|
|
160
|
+
configurable: true,
|
|
161
|
+
value: 0,
|
|
162
|
+
},
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
Object.defineProperties(ScriptProcessorNode.prototype, {
|
|
166
|
+
[Symbol.toStringTag]: {
|
|
167
|
+
__proto__: null,
|
|
168
|
+
writable: false,
|
|
169
|
+
enumerable: false,
|
|
170
|
+
configurable: true,
|
|
171
|
+
value: 'ScriptProcessorNode',
|
|
172
|
+
},
|
|
173
|
+
bufferSize: kEnumerableProperty,
|
|
174
|
+
onaudioprocess: kEnumerableProperty,
|
|
175
|
+
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
return ScriptProcessorNode;
|
|
179
|
+
};
|
package/js/StereoPannerNode.js
CHANGED
package/js/WaveShaperNode.js
CHANGED
package/js/lib/events.js
CHANGED
|
@@ -1,20 +1,10 @@
|
|
|
1
|
-
const { kNapiObj, kDispatchEvent } = require('./symbols.js');
|
|
2
1
|
const { isFunction } = require('./utils.js');
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
// Finalize event registration on Rust side
|
|
9
|
-
jsObj[kNapiObj][kDispatchEvent] = (err, eventType) => {
|
|
10
|
-
const event = new Event(eventType);
|
|
11
|
-
// call attribute first if exists
|
|
12
|
-
if (isFunction(jsObj[`on${event.type}`])) {
|
|
13
|
-
jsObj[`on${event.type}`](event);
|
|
14
|
-
}
|
|
15
|
-
// then distach to add event listeners
|
|
16
|
-
jsObj.dispatchEvent(event);
|
|
3
|
+
module.exports.propagateEvent = function propagateEvent(eventTarget, event) {
|
|
4
|
+
// call attribute first if exists
|
|
5
|
+
if (isFunction(eventTarget[`on${event.type}`])) {
|
|
6
|
+
eventTarget[`on${event.type}`](event);
|
|
17
7
|
}
|
|
18
|
-
//
|
|
19
|
-
|
|
8
|
+
// then distach to add event listeners
|
|
9
|
+
eventTarget.dispatchEvent(event);
|
|
20
10
|
}
|
package/js/lib/symbols.js
CHANGED
|
@@ -1,5 +1,20 @@
|
|
|
1
1
|
module.exports.kNapiObj = Symbol('node-web-audio-api:napi-obj');
|
|
2
2
|
module.exports.kAudioBuffer = Symbol('node-web-audio-api:audio-buffer');
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
// semi-private keys for events listeners
|
|
6
|
+
|
|
7
|
+
// # BaseAudioContext
|
|
8
|
+
module.exports.kOnStateChange = Symbol.for('node-web-audio-api:onstatechange');
|
|
9
|
+
// AudioContext
|
|
10
|
+
module.exports.kOnSinkChange = Symbol.for('node-web-audio-api:onsinkchange');
|
|
11
|
+
// # OfflineAudioContext
|
|
12
|
+
// > [The onstatechange] event is fired before the complete event is fired
|
|
13
|
+
// cf. https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-onstatechange
|
|
14
|
+
// @fixme: for now the `complete` event is triggered **before** startRenring fulfills
|
|
15
|
+
module.exports.kOnComplete = Symbol.for('node-web-audio-api:oncomplete');
|
|
16
|
+
// # AudioScheduledSourceNode
|
|
17
|
+
module.exports.kOnEnded = Symbol.for('node-web-audio-api:onended');
|
|
18
|
+
// # ScriptProcessorNode
|
|
19
|
+
module.exports.kOnAudioProcess = Symbol.for('node-web-audio-api:onaudioprocess');
|
|
5
20
|
|
package/js/monkey-patch.js
CHANGED
|
@@ -19,13 +19,20 @@
|
|
|
19
19
|
|
|
20
20
|
module.exports = function monkeyPatch(nativeBinding) {
|
|
21
21
|
let jsExport = {};
|
|
22
|
+
|
|
23
|
+
// --------------------------------------------------------------------------
|
|
24
|
+
// Events
|
|
25
|
+
// --------------------------------------------------------------------------
|
|
26
|
+
jsExport.OfflineAudioCompletionEvent = require('./Events').OfflineAudioCompletionEvent;
|
|
27
|
+
jsExport.AudioProcessingEvent = require('./Events').AudioProcessingEvent;
|
|
22
28
|
// --------------------------------------------------------------------------
|
|
23
|
-
//
|
|
29
|
+
// Create Web Audio API facade
|
|
24
30
|
// --------------------------------------------------------------------------
|
|
25
31
|
jsExport.BaseAudioContext = require('./BaseAudioContext.js')(jsExport, nativeBinding);
|
|
26
32
|
jsExport.AudioContext = require('./AudioContext.js')(jsExport, nativeBinding);
|
|
27
33
|
jsExport.OfflineAudioContext = require('./OfflineAudioContext.js')(jsExport, nativeBinding);
|
|
28
34
|
|
|
35
|
+
jsExport.ScriptProcessorNode = require('./ScriptProcessorNode.js')(jsExport, nativeBinding);
|
|
29
36
|
jsExport.AnalyserNode = require('./AnalyserNode.js')(jsExport, nativeBinding);
|
|
30
37
|
jsExport.AudioBufferSourceNode = require('./AudioBufferSourceNode.js')(jsExport, nativeBinding);
|
|
31
38
|
jsExport.BiquadFilterNode = require('./BiquadFilterNode.js')(jsExport, nativeBinding);
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|