node-web-audio-api 0.18.0 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/CHANGELOG.md +14 -0
  2. package/TODOS.md +134 -12
  3. package/index.mjs +17 -6
  4. package/js/AnalyserNode.js +259 -48
  5. package/js/AudioBuffer.js +243 -0
  6. package/js/AudioBufferSourceNode.js +259 -41
  7. package/js/AudioContext.js +294 -28
  8. package/js/AudioDestinationNode.js +42 -100
  9. package/js/AudioListener.js +219 -0
  10. package/js/AudioNode.js +323 -0
  11. package/js/AudioParam.js +252 -39
  12. package/js/AudioScheduledSourceNode.js +120 -0
  13. package/js/BaseAudioContext.js +434 -0
  14. package/js/BiquadFilterNode.js +218 -29
  15. package/js/ChannelMergerNode.js +93 -22
  16. package/js/ChannelSplitterNode.js +93 -22
  17. package/js/ConstantSourceNode.js +86 -26
  18. package/js/ConvolverNode.js +158 -29
  19. package/js/DelayNode.js +112 -21
  20. package/js/DynamicsCompressorNode.js +195 -27
  21. package/js/Events.js +84 -0
  22. package/js/GainNode.js +104 -21
  23. package/js/IIRFilterNode.js +136 -23
  24. package/js/MediaStreamAudioSourceNode.js +80 -24
  25. package/js/OfflineAudioContext.js +198 -35
  26. package/js/OscillatorNode.js +189 -32
  27. package/js/PannerNode.js +458 -56
  28. package/js/PeriodicWave.js +67 -3
  29. package/js/ScriptProcessorNode.js +179 -0
  30. package/js/StereoPannerNode.js +104 -21
  31. package/js/WaveShaperNode.js +144 -29
  32. package/js/lib/cast.js +19 -0
  33. package/js/lib/errors.js +10 -55
  34. package/js/lib/events.js +10 -0
  35. package/js/lib/symbols.js +20 -0
  36. package/js/lib/utils.js +12 -12
  37. package/js/monkey-patch.js +40 -31
  38. package/node-web-audio-api.darwin-arm64.node +0 -0
  39. package/node-web-audio-api.darwin-x64.node +0 -0
  40. package/node-web-audio-api.linux-arm-gnueabihf.node +0 -0
  41. package/node-web-audio-api.linux-arm64-gnu.node +0 -0
  42. package/node-web-audio-api.linux-x64-gnu.node +0 -0
  43. package/node-web-audio-api.win32-arm64-msvc.node +0 -0
  44. package/node-web-audio-api.win32-x64-msvc.node +0 -0
  45. package/package.json +7 -4
  46. package/run-wpt.md +27 -0
  47. package/run-wpt.sh +5 -0
  48. package/js/AudioNode.mixin.js +0 -132
  49. package/js/AudioScheduledSourceNode.mixin.js +0 -67
  50. package/js/BaseAudioContext.mixin.js +0 -154
  51. package/js/EventTarget.mixin.js +0 -60
package/CHANGELOG.md CHANGED
@@ -1,3 +1,17 @@
1
+ ## v0.20.0 (29/04/2024)
2
+
3
+ - Update upstream crate to [v0.44.0](https://github.com/orottier/web-audio-api-rs/blob/main/CHANGELOG.md#version-0440-2024-04-22)
4
+ - Implement ScriptProcessorNode
5
+ - Fix memory leak introduced in v0.19.0
6
+ - Improve events compliance
7
+
8
+ ## v0.19.0 (17/04/2024)
9
+
10
+ - Update upstream crate to [1.0.0-rc.5](https://github.com/orottier/web-audio-api-rs/blob/main/CHANGELOG.md#version-0430--100-rc5-2024-04-15)
11
+ - Provide JS facades with proper inheritance chain for all exposed interfaces
12
+ - Implement all AudioNode connect / disconnect alternatives
13
+ - Improve compliance and error handling
14
+
1
15
  ## v0.18.0 (13/03/2024)
2
16
 
3
17
  - Fix `MediaStreamAudioSourceNode`
package/TODOS.md CHANGED
@@ -1,21 +1,143 @@
1
1
  # TODO
2
2
 
3
- - [ ] Review AudioBuffer
4
- - [ ] Ended event in AudioScheduledSourceNode for offline audio context
5
- - [ ] `MediaStreamAudioSourceNode`
6
- + [x] properly handle `mediaStream`
7
- + [ ] do not accept OfflineAudioContext (see if this can be delegated to upstream)
3
+ ## MISC
4
+
5
+ - [x] decode audio data in dedicated thread
6
+ - [x] Use node DOMExeption <https://nodejs.org/api/globals.html#domexception>
7
+ - [x] connect / disconnect
8
+ - [x] _napi_ review tsfn store implementation _or remove_
9
+ - [x] implement ScriptProcesorNode _on going_
10
+ - [x] _napi_ clean internal audio_buffer __internal_caller__, use JsNull instead
11
+ - [x] Refactor Events:
12
+ + [x] extend Event for all specific event types
13
+ + [x] Register AudioScheduledSourceNode listener only on start
14
+
15
+ - [ ] reuse event objects across calls
16
+
17
+ -> proxy EventListener
18
+
19
+ - [ ] wpt: mock for `URL.createObjectURL`
20
+ - [ ] wpt: mock for `requestAnimationFrame` cf. https://github.com/nodejs/help/issues/2483
21
+ - [ ] make sure we don't try to use `in` operator on null values
22
+
23
+ - [ ] _rust_ AudioParam failing tests
24
+ - [ ] _rust_ AudioBufferSourceNode failing tests
25
+ - [ ] _rust_ IIRFilter node
26
+
27
+ - [ ] refactor - Add context string in `throwSanitizedError` and to `toSanitizedSequence`
28
+ - [ ] Rust - review Symbol.toStringTag https://github.com/nodejs/node/issues/41358
29
+ cf. https://github.com/nodejs/node/issues/41358#issuecomment-1003595890
30
+ ```
31
+ // fails...
32
+ let symbols = ctx
33
+ .env
34
+ .get_global()?
35
+ .get_named_property::<JsUnknown>("Symbol")?
36
+ .coerce_to_object()?;
37
+ let to_string_tag = symbols.get_named_property("toStringTag")?;
38
+
39
+ js_this.set_property(to_string_tag, &ctx.env.create_string("AudioContext")?);
40
+
41
+ // ----
42
+ 151 | js_this.set_property(to_string_tag, &ctx.env.create_string("AudioContext")?);
43
+ | ------------ ^^^^^^^^^^^^^ the trait `napi::NapiRaw` is not implemented for `()`
44
+ ```
8
45
 
9
46
  - [ ] wpt bot
47
+ - [ ] wpt - handle loading of 4-channels sound file
48
+
49
+ - [ ] _rust_ decodeAudioData should throw EncodingError
50
+ - review JS side when done
51
+
52
+ - [-] AnalyserNode -> requires script processor and request animation frame
53
+
54
+ - [x] DelayNode
55
+ - [x] protect AudioBuffer arguments
56
+ - [x] AudioNode setters (handle enum types, cf audio param too)
57
+ - [x] This is weird `jsExport.AudioBuffer = require('./AudioBuffer.js').AudioBuffer(nativeBinding.AudioBuffer);`
58
+ - [x] Ended event in AudioScheduledSourceNode for offline audio context
59
+
60
+ - cf. util.types.isSharedArrayBuffer(value)
61
+
62
+ ## Notes
63
+
64
+ - wpt/webaudio/the-audio-api/the-dynamicscompressornode-interface/dynamicscompressor-basic.html sometimes pass, sometimes fail because audio context is resumed
65
+
66
+ ------------------------------------------------------------------------
67
+
68
+ ## `onstatechnage` and `onsinkchange`
69
+
70
+ ### https://webaudio.github.io/web-audio-api/#eventdef-baseaudiocontext-statechange
71
+
72
+ > A newly-created AudioContext will always begin in the suspended state, and a state change event will be fired whenever the state changes to a different state. This event is fired before the complete event is fired.
10
73
 
11
- - [ ] OfflineAudioContext should not lock the process w/ event listeners if startRendering has not been called
74
+ ### https://webaudio.github.io/web-audio-api/#dom-audiocontext-onsinkchange
12
75
 
13
- - [ ] wrap EventTarget::dispatchEvent in setTimeout(callback , 0); so that events are dispatched in the next microtask ?
76
+ > NOTE: This is not dispatched for the initial device selection in the construction of AudioContext. The statechange event is available to check the readiness of the initial output device.
77
+
78
+ cf. the-audiocontext-interface/audiocontext-sinkid-state-change.https.html
79
+
80
+ ### Notes
81
+
82
+ We should explicitly resume context at startup, just as a context created in a console or localhost
83
+
84
+ What happens when sinkId is changed while context is suspended? It seems that it is resumed:
85
+
86
+ ```rs
87
+ Startup { graph } => {
88
+ debug_assert!(self.graph.is_none());
89
+ self.graph = Some(graph);
90
+ self.set_state(AudioContextState::Running);
91
+ }
92
+ ```
93
+
94
+ @todo - create a test bed
95
+
96
+ - testing AudioContextOptions.sinkId requires this fix
97
+ the-audiocontext-interface/audiocontext-sinkid-constructor.https.html
98
+ - setting sink on supended audioContext test too
99
+ the-audiocontext-interface/audiocontext-sinkid-state-change.https.html
100
+
101
+ ------------------------------------------------------------------------
102
+
103
+ ## Issue in spec / wpt
104
+
105
+ - [ ] review waveshaper curve (need to be able to set back to null)
106
+ <https://webaudio.github.io/web-audio-api/#dom-waveshapernode-curve>
107
+ To set the curve attribute, execute these steps:
108
+ - Let new curve be a Float32Array to be assigned to curve or null. .
109
+ - If new curve is not null and [[curve set]] is true, throw an InvalidStateError and abort these steps.
110
+ - If new curve is not null, set [[curve set]] to true.
111
+ - Assign new curve to the curve attribute.
112
+
113
+ -> Spec is not inline with wpt tests, both chrome and firefox accept setting curve several times (which makes sens...), without passing to null first
114
+ -> Curve defined as sequence<float> in arguments but as Float32Array in attributes
115
+ -> Means that we can't pass non finite values in ctor but we can with setter
116
+
117
+ - [ ] AudioDestination::numberOfOutputs
118
+ - implementation and wpt report 0
119
+ cf. webaudio/the-audio-api/the-audionode-interface/audionode.html
120
+ - spec specifies 1: https://webaudio.github.io/web-audio-api/#AudioDestinationNode
121
+
122
+ - [ ] Analyser::fftSize
123
+ - wpt defines that when set to -1 it should throw an IndexSizeError, when not a type error as it is defined as unsigned long?
124
+ cf. the-analysernode-interface/realtimeanalyser-fft-sizing.html
125
+ cf. https://webidl.spec.whatwg.org/#js-attributes
126
+ setter step 4.6
127
+ - same with getChannelData
128
+
129
+ - [ ] wpt - propose patch to remove patch regarding `audiobuffersource-multi-channels-expected.wav`in XMLHttpRequest Mock
130
+
131
+ - [ ] Propose a test for decodeAudioData
132
+ "Let error be a DOMException whose name is EncodingError."
133
+
134
+ - [ ] ScriptProcessorNode rehabilitation
135
+ - padenot mail vs spec
136
+
137
+ ------------------------------------------------------------------------
14
138
 
139
+ ls wpt/webaudio/the-audio-api/the-audiocontext-interface | xargs -I {} ./run-wpt.sh {}
140
+ ls wpt/webaudio/the-audio-api/the-dynamicscompressornode-interface | xargs -I {} ./run-wpt.sh {}
15
141
 
16
- ## AudioBuffer notes
17
142
 
18
- - [x] `AudioBuffer` has to be a facade because `startRendering`, `decodeAudioData`
19
- - [x] need to adapt `AudioBufferSourceNode` and `ConvolverNode` so
20
- that `set buffer(value)` retrieve the wrapped value
21
- - [ ] No fucking sound when buffer comes from `decodeAudioData`..., but ok when `createBuffer`
143
+ 971
package/index.mjs CHANGED
@@ -20,18 +20,32 @@
20
20
  // re-export index.cjs to support esm import syntax
21
21
  // see https://github.com/nodejs/node/issues/40541#issuecomment-951609570
22
22
 
23
- import { createRequire } from 'module';
23
+ import {
24
+ createRequire,
25
+ } from 'module';
24
26
  const require = createRequire(import.meta.url);
25
27
 
26
28
  const nativeModule = require('./index.cjs');
27
29
  export const {
30
+ // events
31
+ OfflineAudioCompletionEvent,
32
+ AudioProcessingEvent,
33
+
34
+ // manually written nodes
35
+ BaseAudioContext,
28
36
  AudioContext,
29
37
  OfflineAudioContext,
38
+
39
+ AudioNode,
40
+ AudioScheduledSourceNode,
30
41
  AudioParam,
31
42
  AudioDestinationNode,
32
- AudioBuffer,
43
+ AudioListener,
44
+
33
45
  PeriodicWave,
34
- // generated supported nodes
46
+ AudioBuffer,
47
+ // generated nodes
48
+ ScriptProcessorNode,
35
49
  AnalyserNode,
36
50
  AudioBufferSourceNode,
37
51
  BiquadFilterNode,
@@ -54,6 +68,3 @@ export const {
54
68
  } = nativeModule;
55
69
 
56
70
  export default nativeModule;
57
-
58
-
59
-
@@ -17,114 +17,298 @@
17
17
  // -------------------------------------------------------------------------- //
18
18
  // -------------------------------------------------------------------------- //
19
19
 
20
- // eslint-disable-next-line no-unused-vars
21
- const { throwSanitizedError } = require('./lib/errors.js');
22
- // eslint-disable-next-line no-unused-vars
23
- const { AudioParam } = require('./AudioParam.js');
24
- const EventTargetMixin = require('./EventTarget.mixin.js');
25
- const AudioNodeMixin = require('./AudioNode.mixin.js');
20
+ /* eslint-disable no-unused-vars */
21
+ const conversions = require('webidl-conversions');
22
+ const {
23
+ toSanitizedSequence,
24
+ } = require('./lib/cast.js');
25
+ const {
26
+ isFunction,
27
+ kEnumerableProperty,
28
+ } = require('./lib/utils.js');
29
+ const {
30
+ throwSanitizedError,
31
+ } = require('./lib/errors.js');
32
+ const {
33
+ kNapiObj,
34
+ kAudioBuffer,
35
+ } = require('./lib/symbols.js');
36
+ /* eslint-enable no-unused-vars */
26
37
 
38
+ const AudioNode = require('./AudioNode.js');
27
39
 
28
- module.exports = (NativeAnalyserNode) => {
29
-
30
- const EventTarget = EventTargetMixin(NativeAnalyserNode);
31
- const AudioNode = AudioNodeMixin(EventTarget);
32
-
40
+ module.exports = (jsExport, nativeBinding) => {
33
41
  class AnalyserNode extends AudioNode {
42
+
34
43
  constructor(context, options) {
35
- if (options !== undefined && typeof options !== 'object') {
36
- throw new TypeError("Failed to construct 'AnalyserNode': argument 2 is not of type 'AnalyserOptions'")
44
+
45
+ if (arguments.length < 1) {
46
+ throw new TypeError(`Failed to construct 'AnalyserNode': 1 argument required, but only ${arguments.length} present`);
37
47
  }
38
48
 
39
- super(context, options);
49
+ if (!(context instanceof jsExport.BaseAudioContext)) {
50
+ throw new TypeError(`Failed to construct 'AnalyserNode': argument 1 is not of type BaseAudioContext`);
51
+ }
40
52
 
41
- }
53
+ // parsed version of the option to be passed to NAPI
54
+ const parsedOptions = {};
42
55
 
43
- // getters
56
+ if (options && typeof options !== 'object') {
57
+ throw new TypeError('Failed to construct \'AnalyserNode\': argument 2 is not of type \'AnalyserOptions\'');
58
+ }
44
59
 
45
- get fftSize() {
46
- return super.fftSize;
47
- }
60
+ if (options && options.fftSize !== undefined) {
61
+ parsedOptions.fftSize = conversions['unsigned long'](options.fftSize, {
62
+ enforceRange: true,
63
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'fftSize' property from AnalyserOptions: The provided value (${options.fftSize}})`,
64
+ });
65
+ } else {
66
+ parsedOptions.fftSize = 2048;
67
+ }
48
68
 
49
- get frequencyBinCount() {
50
- return super.frequencyBinCount;
51
- }
69
+ if (options && options.maxDecibels !== undefined) {
70
+ parsedOptions.maxDecibels = conversions['double'](options.maxDecibels, {
71
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'maxDecibels' property from AnalyserOptions: The provided value (${options.maxDecibels}})`,
72
+ });
73
+ } else {
74
+ parsedOptions.maxDecibels = -30;
75
+ }
52
76
 
53
- get minDecibels() {
54
- return super.minDecibels;
55
- }
77
+ if (options && options.minDecibels !== undefined) {
78
+ parsedOptions.minDecibels = conversions['double'](options.minDecibels, {
79
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'minDecibels' property from AnalyserOptions: The provided value (${options.minDecibels}})`,
80
+ });
81
+ } else {
82
+ parsedOptions.minDecibels = -100;
83
+ }
56
84
 
57
- get maxDecibels() {
58
- return super.maxDecibels;
59
- }
85
+ if (options && options.smoothingTimeConstant !== undefined) {
86
+ parsedOptions.smoothingTimeConstant = conversions['double'](options.smoothingTimeConstant, {
87
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'smoothingTimeConstant' property from AnalyserOptions: The provided value (${options.smoothingTimeConstant}})`,
88
+ });
89
+ } else {
90
+ parsedOptions.smoothingTimeConstant = 0.8;
91
+ }
92
+
93
+ if (options && options.channelCount !== undefined) {
94
+ parsedOptions.channelCount = conversions['unsigned long'](options.channelCount, {
95
+ enforceRange: true,
96
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'channelCount' property from AnalyserOptions: The provided value '${options.channelCount}'`,
97
+ });
98
+ }
99
+
100
+ if (options && options.channelCountMode !== undefined) {
101
+ parsedOptions.channelCountMode = conversions['DOMString'](options.channelCountMode, {
102
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'channelCount' property from AnalyserOptions: The provided value '${options.channelCountMode}'`,
103
+ });
104
+ }
105
+
106
+ if (options && options.channelInterpretation !== undefined) {
107
+ parsedOptions.channelInterpretation = conversions['DOMString'](options.channelInterpretation, {
108
+ context: `Failed to construct 'AnalyserNode': Failed to read the 'channelInterpretation' property from AnalyserOptions: The provided value '${options.channelInterpretation}'`,
109
+ });
110
+ }
111
+
112
+ let napiObj;
113
+
114
+ try {
115
+ napiObj = new nativeBinding.AnalyserNode(context[kNapiObj], parsedOptions);
116
+ } catch (err) {
117
+ throwSanitizedError(err);
118
+ }
119
+
120
+ super(context, {
121
+ [kNapiObj]: napiObj,
122
+ });
60
123
 
61
- get smoothingTimeConstant() {
62
- return super.smoothingTimeConstant;
63
124
  }
64
125
 
65
- // setters
126
+ get fftSize() {
127
+ if (!(this instanceof AnalyserNode)) {
128
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
129
+ }
130
+
131
+ return this[kNapiObj].fftSize;
132
+ }
66
133
 
67
134
  set fftSize(value) {
135
+ if (!(this instanceof AnalyserNode)) {
136
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
137
+ }
138
+
139
+ // @fixme - wpt pretends that when set to -1, this should throw IndexSizeError, not a TypeError.
140
+ // For now let's just cast it to Number without further checks, and let Rust do the job
141
+ // as 0 is an invalid value too
142
+ // value = conversions['unsigned long'](value, {
143
+ // enforceRange: true,
144
+ // context: `Failed to set the 'fftSize' property on 'AnalyserNode': Value`
145
+ // });
146
+ value = conversions['unrestricted double'](value, {
147
+ context: `Failed to set the 'fftSize' property on 'AnalyserNode': Value`,
148
+ });
149
+
68
150
  try {
69
- super.fftSize = value;
151
+ this[kNapiObj].fftSize = value;
70
152
  } catch (err) {
71
153
  throwSanitizedError(err);
72
154
  }
73
155
  }
74
156
 
157
+ get frequencyBinCount() {
158
+ if (!(this instanceof AnalyserNode)) {
159
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
160
+ }
161
+
162
+ return this[kNapiObj].frequencyBinCount;
163
+ }
164
+
165
+ get minDecibels() {
166
+ if (!(this instanceof AnalyserNode)) {
167
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
168
+ }
169
+
170
+ return this[kNapiObj].minDecibels;
171
+ }
172
+
75
173
  set minDecibels(value) {
174
+ if (!(this instanceof AnalyserNode)) {
175
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
176
+ }
177
+
178
+ value = conversions['double'](value, {
179
+ context: `Failed to set the 'minDecibels' property on 'AnalyserNode': Value`,
180
+ });
181
+
76
182
  try {
77
- super.minDecibels = value;
183
+ this[kNapiObj].minDecibels = value;
78
184
  } catch (err) {
79
185
  throwSanitizedError(err);
80
186
  }
81
187
  }
82
188
 
189
+ get maxDecibels() {
190
+ if (!(this instanceof AnalyserNode)) {
191
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
192
+ }
193
+
194
+ return this[kNapiObj].maxDecibels;
195
+ }
196
+
83
197
  set maxDecibels(value) {
198
+ if (!(this instanceof AnalyserNode)) {
199
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
200
+ }
201
+
202
+ value = conversions['double'](value, {
203
+ context: `Failed to set the 'maxDecibels' property on 'AnalyserNode': Value`,
204
+ });
205
+
84
206
  try {
85
- super.maxDecibels = value;
207
+ this[kNapiObj].maxDecibels = value;
86
208
  } catch (err) {
87
209
  throwSanitizedError(err);
88
210
  }
89
211
  }
90
212
 
213
+ get smoothingTimeConstant() {
214
+ if (!(this instanceof AnalyserNode)) {
215
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
216
+ }
217
+
218
+ return this[kNapiObj].smoothingTimeConstant;
219
+ }
220
+
91
221
  set smoothingTimeConstant(value) {
222
+ if (!(this instanceof AnalyserNode)) {
223
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
224
+ }
225
+
226
+ value = conversions['double'](value, {
227
+ context: `Failed to set the 'smoothingTimeConstant' property on 'AnalyserNode': Value`,
228
+ });
229
+
92
230
  try {
93
- super.smoothingTimeConstant = value;
231
+ this[kNapiObj].smoothingTimeConstant = value;
94
232
  } catch (err) {
95
233
  throwSanitizedError(err);
96
234
  }
97
235
  }
98
236
 
99
- // methods
100
-
101
- getFloatFrequencyData(...args) {
237
+ getFloatFrequencyData(array) {
238
+ if (!(this instanceof AnalyserNode)) {
239
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
240
+ }
241
+
242
+ if (arguments.length < 1) {
243
+ throw new TypeError(`Failed to execute 'getFloatFrequencyData' on 'AnalyserNode': 1 argument required, but only ${arguments.length} present`);
244
+ }
245
+
246
+ if (!(array instanceof Float32Array)) {
247
+ throw new TypeError(`Failed to execute 'getFloatFrequencyData' on 'AnalyserNode': Parameter 1 is not of type 'Float32Array'`);
248
+ }
249
+
102
250
  try {
103
- return super.getFloatFrequencyData(...args);
251
+ return this[kNapiObj].getFloatFrequencyData(array);
104
252
  } catch (err) {
105
253
  throwSanitizedError(err);
106
254
  }
107
255
  }
108
256
 
109
- getByteFrequencyData(...args) {
257
+ getByteFrequencyData(array) {
258
+ if (!(this instanceof AnalyserNode)) {
259
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
260
+ }
261
+
262
+ if (arguments.length < 1) {
263
+ throw new TypeError(`Failed to execute 'getByteFrequencyData' on 'AnalyserNode': 1 argument required, but only ${arguments.length} present`);
264
+ }
265
+
266
+ if (!(array instanceof Uint8Array)) {
267
+ throw new TypeError(`Failed to execute 'getByteFrequencyData' on 'AnalyserNode': Parameter 1 is not of type 'Uint8Array'`);
268
+ }
269
+
110
270
  try {
111
- return super.getByteFrequencyData(...args);
271
+ return this[kNapiObj].getByteFrequencyData(array);
112
272
  } catch (err) {
113
273
  throwSanitizedError(err);
114
274
  }
115
275
  }
116
276
 
117
- getFloatTimeDomainData(...args) {
277
+ getFloatTimeDomainData(array) {
278
+ if (!(this instanceof AnalyserNode)) {
279
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
280
+ }
281
+
282
+ if (arguments.length < 1) {
283
+ throw new TypeError(`Failed to execute 'getFloatTimeDomainData' on 'AnalyserNode': 1 argument required, but only ${arguments.length} present`);
284
+ }
285
+
286
+ if (!(array instanceof Float32Array)) {
287
+ throw new TypeError(`Failed to execute 'getFloatTimeDomainData' on 'AnalyserNode': Parameter 1 is not of type 'Float32Array'`);
288
+ }
289
+
118
290
  try {
119
- return super.getFloatTimeDomainData(...args);
291
+ return this[kNapiObj].getFloatTimeDomainData(array);
120
292
  } catch (err) {
121
293
  throwSanitizedError(err);
122
294
  }
123
295
  }
124
296
 
125
- getByteTimeDomainData(...args) {
297
+ getByteTimeDomainData(array) {
298
+ if (!(this instanceof AnalyserNode)) {
299
+ throw new TypeError('Invalid Invocation: Value of \'this\' must be of type \'AnalyserNode\'');
300
+ }
301
+
302
+ if (arguments.length < 1) {
303
+ throw new TypeError(`Failed to execute 'getByteTimeDomainData' on 'AnalyserNode': 1 argument required, but only ${arguments.length} present`);
304
+ }
305
+
306
+ if (!(array instanceof Uint8Array)) {
307
+ throw new TypeError(`Failed to execute 'getByteTimeDomainData' on 'AnalyserNode': Parameter 1 is not of type 'Uint8Array'`);
308
+ }
309
+
126
310
  try {
127
- return super.getByteTimeDomainData(...args);
311
+ return this[kNapiObj].getByteTimeDomainData(array);
128
312
  } catch (err) {
129
313
  throwSanitizedError(err);
130
314
  }
@@ -132,8 +316,35 @@ module.exports = (NativeAnalyserNode) => {
132
316
 
133
317
  }
134
318
 
135
- return AnalyserNode;
136
- };
319
+ Object.defineProperties(AnalyserNode, {
320
+ length: {
321
+ __proto__: null,
322
+ writable: false,
323
+ enumerable: false,
324
+ configurable: true,
325
+ value: 1,
326
+ },
327
+ });
137
328
 
329
+ Object.defineProperties(AnalyserNode.prototype, {
330
+ [Symbol.toStringTag]: {
331
+ __proto__: null,
332
+ writable: false,
333
+ enumerable: false,
334
+ configurable: true,
335
+ value: 'AnalyserNode',
336
+ },
138
337
 
139
-
338
+ fftSize: kEnumerableProperty,
339
+ frequencyBinCount: kEnumerableProperty,
340
+ minDecibels: kEnumerableProperty,
341
+ maxDecibels: kEnumerableProperty,
342
+ smoothingTimeConstant: kEnumerableProperty,
343
+ getFloatFrequencyData: kEnumerableProperty,
344
+ getByteFrequencyData: kEnumerableProperty,
345
+ getFloatTimeDomainData: kEnumerableProperty,
346
+ getByteTimeDomainData: kEnumerableProperty,
347
+ });
348
+
349
+ return AnalyserNode;
350
+ };