react-native-audio-api 0.4.13 → 0.5.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIModule.kt +7 -6
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIPackage.kt +25 -24
- package/common/cpp/audioapi/HostObjects/AnalyserNodeHostObject.h +17 -41
- package/common/cpp/audioapi/HostObjects/AudioBufferHostObject.h +22 -32
- package/common/cpp/audioapi/HostObjects/AudioBufferSourceNodeHostObject.h +2 -17
- package/common/cpp/audioapi/HostObjects/AudioContextHostObject.h +14 -4
- package/common/cpp/audioapi/HostObjects/AudioParamHostObject.h +5 -8
- package/common/cpp/audioapi/HostObjects/BaseAudioContextHostObject.h +13 -9
- package/common/cpp/audioapi/HostObjects/BiquadFilterNodeHostObject.h +8 -19
- package/common/cpp/audioapi/core/AudioContext.cpp +14 -4
- package/common/cpp/audioapi/core/AudioContext.h +2 -2
- package/common/cpp/audioapi/core/BaseAudioContext.cpp +4 -2
- package/common/cpp/audioapi/core/BaseAudioContext.h +1 -1
- package/common/cpp/audioapi/core/effects/BiquadFilterNode.cpp +5 -6
- package/common/cpp/audioapi/core/effects/BiquadFilterNode.h +4 -3
- package/common/cpp/audioapi/core/sources/AudioBufferSourceNode.cpp +5 -11
- package/common/cpp/audioapi/core/sources/AudioBufferSourceNode.h +3 -31
- package/lib/module/api.js +1 -1
- package/lib/module/api.js.map +1 -1
- package/lib/module/api.web.js +1 -2
- package/lib/module/api.web.js.map +1 -1
- package/lib/module/core/AnalyserNode.js.map +1 -1
- package/lib/module/core/AudioBuffer.js.map +1 -1
- package/lib/module/core/AudioBufferSourceNode.js +0 -6
- package/lib/module/core/AudioBufferSourceNode.js.map +1 -1
- package/lib/module/core/AudioParam.js.map +1 -1
- package/lib/module/core/BaseAudioContext.js +3 -2
- package/lib/module/core/BaseAudioContext.js.map +1 -1
- package/lib/module/core/BiquadFilterNode.js.map +1 -1
- package/lib/module/utils/index.js +6 -0
- package/lib/module/utils/index.js.map +1 -0
- package/lib/module/web-core/AnalyserNode.js +4 -20
- package/lib/module/web-core/AnalyserNode.js.map +1 -1
- package/lib/module/web-core/AudioBuffer.js +2 -6
- package/lib/module/web-core/AudioBuffer.js.map +1 -1
- package/lib/module/web-core/AudioBufferSourceNode.js +161 -21
- package/lib/module/web-core/AudioBufferSourceNode.js.map +1 -1
- package/lib/module/web-core/AudioContext.js +7 -8
- package/lib/module/web-core/AudioContext.js.map +1 -1
- package/lib/module/web-core/AudioParam.js +1 -1
- package/lib/module/web-core/AudioParam.js.map +1 -1
- package/lib/module/web-core/BiquadFilterNode.js +1 -9
- package/lib/module/web-core/BiquadFilterNode.js.map +1 -1
- package/lib/module/web-core/custom/signalsmithStretch/SignalsmithStretch.mjs +2 -2
- package/lib/module/web-core/custom/signalsmithStretch/SignalsmithStretch.mjs.map +1 -1
- package/lib/typescript/api.d.ts +1 -1
- package/lib/typescript/api.d.ts.map +1 -1
- package/lib/typescript/api.web.d.ts +1 -2
- package/lib/typescript/api.web.d.ts.map +1 -1
- package/lib/typescript/core/AnalyserNode.d.ts +4 -4
- package/lib/typescript/core/AnalyserNode.d.ts.map +1 -1
- package/lib/typescript/core/AudioBuffer.d.ts +3 -3
- package/lib/typescript/core/AudioBuffer.d.ts.map +1 -1
- package/lib/typescript/core/AudioBufferSourceNode.d.ts +0 -3
- package/lib/typescript/core/AudioBufferSourceNode.d.ts.map +1 -1
- package/lib/typescript/core/AudioParam.d.ts +1 -1
- package/lib/typescript/core/AudioParam.d.ts.map +1 -1
- package/lib/typescript/core/BaseAudioContext.d.ts +3 -3
- package/lib/typescript/core/BaseAudioContext.d.ts.map +1 -1
- package/lib/typescript/core/BiquadFilterNode.d.ts +1 -1
- package/lib/typescript/core/BiquadFilterNode.d.ts.map +1 -1
- package/lib/typescript/interfaces.d.ts +12 -13
- package/lib/typescript/interfaces.d.ts.map +1 -1
- package/lib/typescript/types.d.ts +3 -1
- package/lib/typescript/types.d.ts.map +1 -1
- package/lib/typescript/utils/index.d.ts +2 -0
- package/lib/typescript/utils/index.d.ts.map +1 -0
- package/lib/typescript/web-core/AnalyserNode.d.ts +4 -4
- package/lib/typescript/web-core/AnalyserNode.d.ts.map +1 -1
- package/lib/typescript/web-core/AudioBuffer.d.ts +2 -2
- package/lib/typescript/web-core/AudioBuffer.d.ts.map +1 -1
- package/lib/typescript/web-core/AudioBufferSourceNode.d.ts +58 -6
- package/lib/typescript/web-core/AudioBufferSourceNode.d.ts.map +1 -1
- package/lib/typescript/web-core/AudioContext.d.ts +3 -5
- package/lib/typescript/web-core/AudioContext.d.ts.map +1 -1
- package/lib/typescript/web-core/AudioParam.d.ts +1 -1
- package/lib/typescript/web-core/AudioParam.d.ts.map +1 -1
- package/lib/typescript/web-core/BaseAudioContext.d.ts +2 -2
- package/lib/typescript/web-core/BaseAudioContext.d.ts.map +1 -1
- package/lib/typescript/web-core/BiquadFilterNode.d.ts +1 -1
- package/lib/typescript/web-core/BiquadFilterNode.d.ts.map +1 -1
- package/package.json +6 -6
- package/src/api.ts +0 -1
- package/src/api.web.ts +0 -2
- package/src/core/AnalyserNode.ts +4 -4
- package/src/core/AudioBuffer.ts +3 -3
- package/src/core/AudioBufferSourceNode.ts +0 -9
- package/src/core/AudioParam.ts +1 -1
- package/src/core/BaseAudioContext.ts +16 -5
- package/src/core/BiquadFilterNode.ts +3 -3
- package/src/interfaces.ts +14 -16
- package/src/types.ts +3 -1
- package/src/utils/index.ts +3 -0
- package/src/web-core/AnalyserNode.tsx +8 -30
- package/src/web-core/AudioBuffer.tsx +4 -14
- package/src/web-core/AudioBufferSourceNode.tsx +357 -31
- package/src/web-core/AudioContext.tsx +19 -13
- package/src/web-core/AudioParam.tsx +2 -6
- package/src/web-core/BaseAudioContext.tsx +3 -3
- package/src/web-core/BiquadFilterNode.tsx +6 -16
- package/src/web-core/custom/signalsmithStretch/SignalsmithStretch.mjs +4 -3
- package/common/cpp/audioapi/core/types/TimeStretchType.h +0 -7
- package/lib/module/web-core/StretcherNode.js +0 -81
- package/lib/module/web-core/StretcherNode.js.map +0 -1
- package/lib/module/web-core/custom/signalsmithStretch/SignalsmithStretch.js +0 -823
- package/lib/module/web-core/custom/signalsmithStretch/SignalsmithStretch.js.map +0 -1
- package/lib/typescript/web-core/StretcherNode.d.ts +0 -48
- package/lib/typescript/web-core/StretcherNode.d.ts.map +0 -1
- package/src/web-core/StretcherNode.tsx +0 -145
- package/src/web-core/custom/signalsmithStretch/SignalsmithStretch.js +0 -946
|
@@ -13,10 +13,6 @@ class AudioAPIModule(
|
|
|
13
13
|
reactContext: ReactApplicationContext,
|
|
14
14
|
) : NativeAudioAPIModuleSpec(reactContext) {
|
|
15
15
|
companion object {
|
|
16
|
-
init {
|
|
17
|
-
System.loadLibrary("react-native-audio-api")
|
|
18
|
-
}
|
|
19
|
-
|
|
20
16
|
const val NAME = NativeAudioAPIModuleSpec.NAME
|
|
21
17
|
}
|
|
22
18
|
|
|
@@ -30,8 +26,13 @@ class AudioAPIModule(
|
|
|
30
26
|
private external fun injectJSIBindings()
|
|
31
27
|
|
|
32
28
|
init {
|
|
33
|
-
|
|
34
|
-
|
|
29
|
+
try {
|
|
30
|
+
System.loadLibrary("react-native-audio-api")
|
|
31
|
+
val jsCallInvokerHolder = reactContext.jsCallInvokerHolder as CallInvokerHolderImpl
|
|
32
|
+
mHybridData = initHybrid(reactContext.javaScriptContextHolder!!.get(), jsCallInvokerHolder)
|
|
33
|
+
} catch (exception: UnsatisfiedLinkError) {
|
|
34
|
+
throw RuntimeException("Could not load native module AudioAPIModule", exception)
|
|
35
|
+
}
|
|
35
36
|
}
|
|
36
37
|
|
|
37
38
|
@ReactMethod(isBlockingSynchronousMethod = true)
|
|
@@ -1,41 +1,42 @@
|
|
|
1
1
|
package com.swmansion.audioapi
|
|
2
2
|
|
|
3
3
|
import com.facebook.react.BaseReactPackage
|
|
4
|
-
import com.facebook.react.ReactPackage
|
|
5
4
|
import com.facebook.react.bridge.NativeModule
|
|
6
5
|
import com.facebook.react.bridge.ReactApplicationContext
|
|
7
|
-
import com.facebook.react.common.annotations.UnstableReactNativeAPI
|
|
8
6
|
import com.facebook.react.module.annotations.ReactModuleList
|
|
9
7
|
import com.facebook.react.module.model.ReactModuleInfo
|
|
10
8
|
import com.facebook.react.module.model.ReactModuleInfoProvider
|
|
11
9
|
|
|
12
|
-
@ReactModuleList(
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
10
|
+
@ReactModuleList(
|
|
11
|
+
nativeModules = [
|
|
12
|
+
AudioAPIModule::class,
|
|
13
|
+
],
|
|
14
|
+
)
|
|
15
|
+
class AudioAPIPackage : BaseReactPackage() {
|
|
17
16
|
override fun getModule(
|
|
18
17
|
name: String,
|
|
19
18
|
reactContext: ReactApplicationContext,
|
|
20
|
-
): NativeModule?
|
|
19
|
+
): NativeModule? {
|
|
21
20
|
when (name) {
|
|
22
|
-
AudioAPIModule.NAME -> AudioAPIModule(reactContext)
|
|
23
|
-
else -> null
|
|
21
|
+
AudioAPIModule.NAME -> return AudioAPIModule(reactContext)
|
|
24
22
|
}
|
|
23
|
+
return null
|
|
24
|
+
}
|
|
25
25
|
|
|
26
26
|
override fun getReactModuleInfoProvider(): ReactModuleInfoProvider =
|
|
27
|
-
ReactModuleInfoProvider
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
)
|
|
40
|
-
|
|
27
|
+
ReactModuleInfoProvider {
|
|
28
|
+
val moduleInfos: MutableMap<String, ReactModuleInfo> = HashMap()
|
|
29
|
+
val isTurboModule = BuildConfig.IS_NEW_ARCHITECTURE_ENABLED
|
|
30
|
+
moduleInfos[AudioAPIModule.NAME] =
|
|
31
|
+
ReactModuleInfo(
|
|
32
|
+
AudioAPIModule.NAME,
|
|
33
|
+
AudioAPIModule.NAME,
|
|
34
|
+
canOverrideExistingModule = true,
|
|
35
|
+
needsEagerInit = false,
|
|
36
|
+
hasConstants = true,
|
|
37
|
+
isCxxModule = false,
|
|
38
|
+
isTurboModule = isTurboModule,
|
|
39
|
+
)
|
|
40
|
+
moduleInfos
|
|
41
|
+
}
|
|
41
42
|
}
|
|
@@ -73,71 +73,47 @@ class AnalyserNodeHostObject : public AudioNodeHostObject {
|
|
|
73
73
|
}
|
|
74
74
|
|
|
75
75
|
JSI_HOST_FUNCTION(getFloatFrequencyData) {
|
|
76
|
-
auto
|
|
77
|
-
auto
|
|
78
|
-
auto
|
|
76
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
77
|
+
auto data = reinterpret_cast<float *>(arrayBuffer.data(runtime));
|
|
78
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
79
79
|
|
|
80
80
|
auto analyserNode = std::static_pointer_cast<AnalyserNode>(node_);
|
|
81
81
|
analyserNode->getFloatFrequencyData(data, length);
|
|
82
82
|
|
|
83
|
-
for (int i = 0; i < length; i++) {
|
|
84
|
-
destination.setValueAtIndex(runtime, i, jsi::Value(data[i]));
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
delete[] data;
|
|
88
|
-
|
|
89
83
|
return jsi::Value::undefined();
|
|
90
84
|
}
|
|
91
85
|
|
|
92
86
|
JSI_HOST_FUNCTION(getByteFrequencyData) {
|
|
93
|
-
auto
|
|
94
|
-
auto
|
|
95
|
-
auto
|
|
87
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
88
|
+
auto data = arrayBuffer.data(runtime);
|
|
89
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
96
90
|
|
|
97
91
|
auto analyserNode = std::static_pointer_cast<AnalyserNode>(node_);
|
|
98
92
|
analyserNode->getByteFrequencyData(data, length);
|
|
99
93
|
|
|
100
|
-
for (int i = 0; i < length; i++) {
|
|
101
|
-
destination.setValueAtIndex(runtime, i, jsi::Value(data[i]));
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
delete[] data;
|
|
105
|
-
|
|
106
94
|
return jsi::Value::undefined();
|
|
107
95
|
}
|
|
108
96
|
|
|
109
97
|
JSI_HOST_FUNCTION(getFloatTimeDomainData) {
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
auto analyserNode = std::static_pointer_cast<AnalyserNode>(node_);
|
|
115
|
-
analyserNode->getFloatTimeDomainData(data, length);
|
|
116
|
-
|
|
117
|
-
for (int i = 0; i < length; i++) {
|
|
118
|
-
destination.setValueAtIndex(runtime, i, jsi::Value(data[i]));
|
|
119
|
-
}
|
|
98
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
99
|
+
auto data = reinterpret_cast<float *>(arrayBuffer.data(runtime));
|
|
100
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
120
101
|
|
|
121
|
-
|
|
102
|
+
auto analyserNode = std::static_pointer_cast<AnalyserNode>(node_);
|
|
103
|
+
analyserNode->getFloatTimeDomainData(data, length);
|
|
122
104
|
|
|
123
105
|
return jsi::Value::undefined();
|
|
124
106
|
}
|
|
125
107
|
|
|
126
108
|
JSI_HOST_FUNCTION(getByteTimeDomainData) {
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
auto analyserNode = std::static_pointer_cast<AnalyserNode>(node_);
|
|
132
|
-
analyserNode->getByteTimeDomainData(data, length);
|
|
133
|
-
|
|
134
|
-
for (int i = 0; i < length; i++) {
|
|
135
|
-
destination.setValueAtIndex(runtime, i, jsi::Value(data[i]));
|
|
136
|
-
}
|
|
109
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
110
|
+
auto data = arrayBuffer.data(runtime);
|
|
111
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
137
112
|
|
|
138
|
-
|
|
113
|
+
auto analyserNode = std::static_pointer_cast<AnalyserNode>(node_);
|
|
114
|
+
analyserNode->getByteTimeDomainData(data, length);
|
|
139
115
|
|
|
140
|
-
|
|
116
|
+
return jsi::Value::undefined();
|
|
141
117
|
}
|
|
142
118
|
|
|
143
119
|
JSI_PROPERTY_SETTER(fftSize) {
|
|
@@ -47,56 +47,46 @@ class AudioBufferHostObject : public JsiHostObject {
|
|
|
47
47
|
}
|
|
48
48
|
|
|
49
49
|
JSI_HOST_FUNCTION(getChannelData) {
|
|
50
|
-
|
|
51
|
-
|
|
50
|
+
auto channel = static_cast<int>(args[0].getNumber());
|
|
51
|
+
auto *channelData = audioBuffer_->getChannelData(channel);
|
|
52
|
+
auto length = static_cast<int>(audioBuffer_->getLength());
|
|
53
|
+
auto size = static_cast<int>(length * sizeof(float));
|
|
52
54
|
|
|
53
|
-
auto
|
|
54
|
-
|
|
55
|
-
array.setValueAtIndex(runtime, i, jsi::Value(channelData[i]));
|
|
56
|
-
}
|
|
55
|
+
auto arrayBufferCtor = runtime.global().getPropertyAsFunction(runtime, "ArrayBuffer");
|
|
56
|
+
auto arrayBuffer = arrayBufferCtor.callAsConstructor(runtime, size).getObject(runtime).getArrayBuffer(runtime);
|
|
57
57
|
|
|
58
|
-
|
|
58
|
+
auto float32ArrayCtor = runtime.global().getPropertyAsFunction(runtime, "Float32Array");
|
|
59
|
+
auto float32Array = float32ArrayCtor.callAsConstructor(runtime, arrayBuffer).getObject(runtime);
|
|
60
|
+
|
|
61
|
+
auto data = reinterpret_cast<float *>(arrayBuffer.data(runtime));
|
|
62
|
+
|
|
63
|
+
memcpy(data, channelData, size);
|
|
64
|
+
|
|
65
|
+
return float32Array;
|
|
59
66
|
}
|
|
60
67
|
|
|
61
68
|
JSI_HOST_FUNCTION(copyFromChannel) {
|
|
62
|
-
auto
|
|
63
|
-
auto
|
|
64
|
-
|
|
69
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
70
|
+
auto destination = reinterpret_cast<float *>(arrayBuffer.data(runtime));
|
|
71
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
65
72
|
auto channelNumber = static_cast<int>(args[1].getNumber());
|
|
66
73
|
auto startInChannel = static_cast<size_t>(args[2].getNumber());
|
|
67
74
|
|
|
68
|
-
auto *destinationData = new float[destinationLength];
|
|
69
|
-
|
|
70
75
|
audioBuffer_->copyFromChannel(
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
for (int i = 0; i < destinationLength; i++) {
|
|
74
|
-
destination.setValueAtIndex(runtime, i, jsi::Value(destinationData[i]));
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
delete[] destinationData;
|
|
76
|
+
destination, length, channelNumber, startInChannel);
|
|
78
77
|
|
|
79
78
|
return jsi::Value::undefined();
|
|
80
79
|
}
|
|
81
80
|
|
|
82
81
|
JSI_HOST_FUNCTION(copyToChannel) {
|
|
83
|
-
auto
|
|
84
|
-
auto
|
|
85
|
-
|
|
82
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
83
|
+
auto source = reinterpret_cast<float *>(arrayBuffer.data(runtime));
|
|
84
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
86
85
|
auto channelNumber = static_cast<int>(args[1].getNumber());
|
|
87
86
|
auto startInChannel = static_cast<size_t>(args[2].getNumber());
|
|
88
87
|
|
|
89
|
-
auto *sourceData = new float[sourceLength];
|
|
90
|
-
|
|
91
|
-
for (int i = 0; i < sourceLength; i++) {
|
|
92
|
-
sourceData[i] =
|
|
93
|
-
static_cast<float>(source.getValueAtIndex(runtime, i).getNumber());
|
|
94
|
-
}
|
|
95
|
-
|
|
96
88
|
audioBuffer_->copyToChannel(
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
delete[] sourceData;
|
|
89
|
+
source, length, channelNumber, startInChannel);
|
|
100
90
|
|
|
101
91
|
return jsi::Value::undefined();
|
|
102
92
|
}
|
|
@@ -24,15 +24,13 @@ class AudioBufferSourceNodeHostObject
|
|
|
24
24
|
JSI_EXPORT_PROPERTY_GETTER(AudioBufferSourceNodeHostObject, loopStart),
|
|
25
25
|
JSI_EXPORT_PROPERTY_GETTER(AudioBufferSourceNodeHostObject, loopEnd),
|
|
26
26
|
JSI_EXPORT_PROPERTY_GETTER(AudioBufferSourceNodeHostObject, detune),
|
|
27
|
-
JSI_EXPORT_PROPERTY_GETTER(AudioBufferSourceNodeHostObject, playbackRate)
|
|
28
|
-
JSI_EXPORT_PROPERTY_GETTER(AudioBufferSourceNodeHostObject, timeStretch));
|
|
27
|
+
JSI_EXPORT_PROPERTY_GETTER(AudioBufferSourceNodeHostObject, playbackRate));
|
|
29
28
|
|
|
30
29
|
addSetters(
|
|
31
30
|
JSI_EXPORT_PROPERTY_SETTER(AudioBufferSourceNodeHostObject, loop),
|
|
32
31
|
JSI_EXPORT_PROPERTY_SETTER(AudioBufferSourceNodeHostObject, buffer),
|
|
33
32
|
JSI_EXPORT_PROPERTY_SETTER(AudioBufferSourceNodeHostObject, loopStart),
|
|
34
|
-
JSI_EXPORT_PROPERTY_SETTER(AudioBufferSourceNodeHostObject, loopEnd)
|
|
35
|
-
JSI_EXPORT_PROPERTY_SETTER(AudioBufferSourceNodeHostObject, timeStretch));
|
|
33
|
+
JSI_EXPORT_PROPERTY_SETTER(AudioBufferSourceNodeHostObject, loopEnd));
|
|
36
34
|
|
|
37
35
|
// start method is overridden in this class
|
|
38
36
|
functions_->erase("start");
|
|
@@ -92,13 +90,6 @@ class AudioBufferSourceNodeHostObject
|
|
|
92
90
|
return jsi::Object::createFromHostObject(runtime, playbackRateHostObject);
|
|
93
91
|
}
|
|
94
92
|
|
|
95
|
-
JSI_PROPERTY_GETTER(timeStretch) {
|
|
96
|
-
auto audioBufferSourceNode =
|
|
97
|
-
std::static_pointer_cast<AudioBufferSourceNode>(node_);
|
|
98
|
-
auto timeStretch = audioBufferSourceNode->getTimeStretchType();
|
|
99
|
-
return jsi::String::createFromUtf8(runtime, timeStretch);
|
|
100
|
-
}
|
|
101
|
-
|
|
102
93
|
JSI_PROPERTY_SETTER(loop) {
|
|
103
94
|
auto audioBufferSourceNode =
|
|
104
95
|
std::static_pointer_cast<AudioBufferSourceNode>(node_);
|
|
@@ -130,12 +121,6 @@ class AudioBufferSourceNodeHostObject
|
|
|
130
121
|
audioBufferSourceNode->setLoopEnd(value.getNumber());
|
|
131
122
|
}
|
|
132
123
|
|
|
133
|
-
JSI_PROPERTY_SETTER(timeStretch) {
|
|
134
|
-
auto audioBufferSourceNode =
|
|
135
|
-
std::static_pointer_cast<AudioBufferSourceNode>(node_);
|
|
136
|
-
audioBufferSourceNode->setTimeStretchType(value.getString(runtime).utf8(runtime));
|
|
137
|
-
}
|
|
138
|
-
|
|
139
124
|
JSI_HOST_FUNCTION(start) {
|
|
140
125
|
auto when = args[0].getNumber();
|
|
141
126
|
auto offset = args[1].getNumber();
|
|
@@ -43,10 +43,15 @@ class AudioContextHostObject : public BaseAudioContextHostObject {
|
|
|
43
43
|
auto promise = promiseVendor_->createPromise([this](std::shared_ptr<Promise> promise) {
|
|
44
44
|
std::thread([this, promise = std::move(promise)]() {
|
|
45
45
|
auto audioContext = std::static_pointer_cast<AudioContext>(context_);
|
|
46
|
-
audioContext->resume();
|
|
46
|
+
auto result = audioContext->resume();
|
|
47
|
+
|
|
48
|
+
if (!result) {
|
|
49
|
+
promise->reject("Failed to resume audio context, because it is already closed.");
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
47
52
|
|
|
48
53
|
promise->resolve([](jsi::Runtime &runtime) {
|
|
49
|
-
|
|
54
|
+
return jsi::Value::undefined();
|
|
50
55
|
});
|
|
51
56
|
}).detach();
|
|
52
57
|
});
|
|
@@ -58,10 +63,15 @@ class AudioContextHostObject : public BaseAudioContextHostObject {
|
|
|
58
63
|
auto promise = promiseVendor_->createPromise([this](std::shared_ptr<Promise> promise) {
|
|
59
64
|
std::thread([this, promise = std::move(promise)]() {
|
|
60
65
|
auto audioContext = std::static_pointer_cast<AudioContext>(context_);
|
|
61
|
-
audioContext->suspend();
|
|
66
|
+
auto result = audioContext->suspend();
|
|
67
|
+
|
|
68
|
+
if (!result) {
|
|
69
|
+
promise->reject("Failed to resume audio context, because it is already closed.");
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
62
72
|
|
|
63
73
|
promise->resolve([](jsi::Runtime &runtime) {
|
|
64
|
-
|
|
74
|
+
return jsi::Value::undefined();
|
|
65
75
|
});
|
|
66
76
|
}).detach();
|
|
67
77
|
});
|
|
@@ -79,16 +79,13 @@ class AudioParamHostObject : public JsiHostObject {
|
|
|
79
79
|
}
|
|
80
80
|
|
|
81
81
|
JSI_HOST_FUNCTION(setValueCurveAtTime) {
|
|
82
|
-
auto
|
|
83
|
-
auto
|
|
84
|
-
auto
|
|
85
|
-
|
|
86
|
-
valuesData[i] =
|
|
87
|
-
static_cast<float>(values.getValueAtIndex(runtime, i).getNumber());
|
|
88
|
-
}
|
|
82
|
+
auto arrayBuffer = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
83
|
+
auto values = reinterpret_cast<float *>(arrayBuffer.data(runtime));
|
|
84
|
+
auto length = static_cast<int>(arrayBuffer.size(runtime));
|
|
85
|
+
|
|
89
86
|
double startTime = args[1].getNumber();
|
|
90
87
|
double duration = args[2].getNumber();
|
|
91
|
-
param_->setValueCurveAtTime(
|
|
88
|
+
param_->setValueCurveAtTime(values, length, startTime, duration);
|
|
92
89
|
return jsi::Value::undefined();
|
|
93
90
|
}
|
|
94
91
|
|
|
@@ -95,7 +95,8 @@ class BaseAudioContextHostObject : public JsiHostObject {
|
|
|
95
95
|
}
|
|
96
96
|
|
|
97
97
|
JSI_HOST_FUNCTION(createBufferSource) {
|
|
98
|
-
auto
|
|
98
|
+
auto pitchCorrection = args[0].asBool();
|
|
99
|
+
auto bufferSource = context_->createBufferSource(pitchCorrection);
|
|
99
100
|
auto bufferSourceHostObject =
|
|
100
101
|
std::make_shared<AudioBufferSourceNodeHostObject>(bufferSource, callInvoker_);
|
|
101
102
|
return jsi::Object::createFromHostObject(runtime, bufferSourceHostObject);
|
|
@@ -111,18 +112,21 @@ class BaseAudioContextHostObject : public JsiHostObject {
|
|
|
111
112
|
}
|
|
112
113
|
|
|
113
114
|
JSI_HOST_FUNCTION(createPeriodicWave) {
|
|
114
|
-
auto
|
|
115
|
-
auto
|
|
115
|
+
auto arrayBufferReal = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
116
|
+
auto real = reinterpret_cast<float *>(arrayBufferReal.data(runtime));
|
|
117
|
+
auto length = static_cast<int>(arrayBufferReal.size(runtime));
|
|
118
|
+
|
|
119
|
+
auto arrayBufferImag = args[1].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
120
|
+
auto imag = reinterpret_cast<float *>(arrayBufferReal.data(runtime));
|
|
121
|
+
|
|
116
122
|
auto disableNormalization = args[2].getBool();
|
|
117
|
-
auto length =
|
|
118
|
-
static_cast<int>(real.getProperty(runtime, "length").asNumber());
|
|
119
123
|
|
|
120
124
|
auto complexData = std::vector<std::complex<float>>(length);
|
|
121
125
|
|
|
122
|
-
for (size_t i = 0; i <
|
|
126
|
+
for (size_t i = 0; i < length; i++) {
|
|
123
127
|
complexData[i] = std::complex<float>(
|
|
124
|
-
static_cast<float>(real
|
|
125
|
-
static_cast<float>(imag
|
|
128
|
+
static_cast<float>(real[i]),
|
|
129
|
+
static_cast<float>(imag[i]));
|
|
126
130
|
}
|
|
127
131
|
|
|
128
132
|
auto periodicWave = context_->createPeriodicWave(
|
|
@@ -148,7 +152,7 @@ class BaseAudioContextHostObject : public JsiHostObject {
|
|
|
148
152
|
auto audioBufferHostObject = std::make_shared<AudioBufferHostObject>(results);
|
|
149
153
|
|
|
150
154
|
if (!results) {
|
|
151
|
-
promise->reject("Failed to decode audio data source");
|
|
155
|
+
promise->reject("Failed to decode audio data source.");
|
|
152
156
|
return;
|
|
153
157
|
}
|
|
154
158
|
|
|
@@ -64,30 +64,19 @@ class BiquadFilterNodeHostObject : public AudioNodeHostObject {
|
|
|
64
64
|
}
|
|
65
65
|
|
|
66
66
|
JSI_HOST_FUNCTION(getFrequencyResponse) {
|
|
67
|
-
auto
|
|
68
|
-
auto
|
|
69
|
-
auto
|
|
67
|
+
auto arrayBufferFrequency = args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
68
|
+
auto frequencyArray = reinterpret_cast<float *>(arrayBufferFrequency.data(runtime));
|
|
69
|
+
auto length = static_cast<int>(arrayBufferFrequency.size(runtime));
|
|
70
70
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
frequencyArrayVector[i] = static_cast<float>(
|
|
74
|
-
frequencyArray.getValueAtIndex(runtime, i).getNumber());
|
|
75
|
-
}
|
|
71
|
+
auto arrayBufferMag = args[1].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
72
|
+
auto magResponseOut = reinterpret_cast<float *>(arrayBufferMag.data(runtime));
|
|
76
73
|
|
|
77
|
-
|
|
78
|
-
|
|
74
|
+
auto arrayBufferPhase = args[2].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
|
|
75
|
+
auto phaseResponseOut = reinterpret_cast<float *>(arrayBufferPhase.data(runtime));
|
|
79
76
|
|
|
80
77
|
auto biquadFilterNode = std::static_pointer_cast<BiquadFilterNode>(node_);
|
|
81
78
|
biquadFilterNode->getFrequencyResponse(
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
for (size_t i = 0; i < magResponseOutVector.size(); i++) {
|
|
85
|
-
magResponseOut.setValueAtIndex(runtime, i, magResponseOutVector[i]);
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
for (size_t i = 0; i < phaseResponseOutVector.size(); i++) {
|
|
89
|
-
phaseResponseOut.setValueAtIndex(runtime, i, phaseResponseOutVector[i]);
|
|
90
|
-
}
|
|
79
|
+
frequencyArray, magResponseOut, phaseResponseOut, length);
|
|
91
80
|
|
|
92
81
|
return jsi::Value::undefined();
|
|
93
82
|
}
|
|
@@ -39,23 +39,33 @@ AudioContext::~AudioContext() {
|
|
|
39
39
|
if (!isClosed()) {
|
|
40
40
|
close();
|
|
41
41
|
}
|
|
42
|
-
|
|
43
|
-
nodeManager_->cleanup();
|
|
44
42
|
}
|
|
45
43
|
|
|
46
44
|
void AudioContext::close() {
|
|
47
45
|
state_ = ContextState::CLOSED;
|
|
48
46
|
audioPlayer_->stop();
|
|
47
|
+
|
|
48
|
+
nodeManager_->cleanup();
|
|
49
49
|
}
|
|
50
50
|
|
|
51
|
-
|
|
51
|
+
bool AudioContext::resume() {
|
|
52
|
+
if (isClosed()) {
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
|
|
52
56
|
state_ = ContextState::RUNNING;
|
|
53
57
|
audioPlayer_->resume();
|
|
58
|
+
return true;
|
|
54
59
|
}
|
|
55
60
|
|
|
56
|
-
|
|
61
|
+
bool AudioContext::suspend() {
|
|
62
|
+
if (isClosed()) {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
|
|
57
66
|
state_ = ContextState::SUSPENDED;
|
|
58
67
|
audioPlayer_->suspend();
|
|
68
|
+
return true;
|
|
59
69
|
}
|
|
60
70
|
|
|
61
71
|
std::function<void(std::shared_ptr<AudioBus>, int)>
|
|
@@ -65,8 +65,10 @@ std::shared_ptr<BiquadFilterNode> BaseAudioContext::createBiquadFilter() {
|
|
|
65
65
|
return biquadFilter;
|
|
66
66
|
}
|
|
67
67
|
|
|
68
|
-
std::shared_ptr<AudioBufferSourceNode> BaseAudioContext::createBufferSource(
|
|
69
|
-
|
|
68
|
+
std::shared_ptr<AudioBufferSourceNode> BaseAudioContext::createBufferSource(
|
|
69
|
+
bool pitchCorrection) {
|
|
70
|
+
auto bufferSource =
|
|
71
|
+
std::make_shared<AudioBufferSourceNode>(this, pitchCorrection);
|
|
70
72
|
nodeManager_->addNode(bufferSource);
|
|
71
73
|
return bufferSource;
|
|
72
74
|
}
|
|
@@ -42,7 +42,7 @@ class BaseAudioContext {
|
|
|
42
42
|
std::shared_ptr<GainNode> createGain();
|
|
43
43
|
std::shared_ptr<StereoPannerNode> createStereoPanner();
|
|
44
44
|
std::shared_ptr<BiquadFilterNode> createBiquadFilter();
|
|
45
|
-
std::shared_ptr<AudioBufferSourceNode> createBufferSource();
|
|
45
|
+
std::shared_ptr<AudioBufferSourceNode> createBufferSource(bool pitchCorrection);
|
|
46
46
|
static std::shared_ptr<AudioBuffer>
|
|
47
47
|
createBuffer(int numberOfChannels, size_t length, float sampleRate);
|
|
48
48
|
std::shared_ptr<PeriodicWave> createPeriodicWave(
|
|
@@ -57,20 +57,19 @@ std::shared_ptr<AudioParam> BiquadFilterNode::getGainParam() const {
|
|
|
57
57
|
// angle of the frequency response
|
|
58
58
|
|
|
59
59
|
void BiquadFilterNode::getFrequencyResponse(
|
|
60
|
-
const
|
|
61
|
-
|
|
62
|
-
|
|
60
|
+
const float *frequencyArray,
|
|
61
|
+
float *magResponseOutput,
|
|
62
|
+
float *phaseResponseOutput,
|
|
63
|
+
const int length) {
|
|
63
64
|
applyFilter();
|
|
64
65
|
|
|
65
|
-
auto frequencyArraySize = frequencyArray.size();
|
|
66
|
-
|
|
67
66
|
float b0 = b0_;
|
|
68
67
|
float b1 = b1_;
|
|
69
68
|
float b2 = b2_;
|
|
70
69
|
float a1 = a1_;
|
|
71
70
|
float a2 = a2_;
|
|
72
71
|
|
|
73
|
-
for (size_t i = 0; i <
|
|
72
|
+
for (size_t i = 0; i < length; i++) {
|
|
74
73
|
auto omega = PI * frequencyArray[i] / context_->getNyquistFrequency();
|
|
75
74
|
auto z = std::complex<float>(cos(omega), sin(omega));
|
|
76
75
|
auto response = ((b0 * z + b1) * z + b2) / ((z + a1) * z + a2);
|
|
@@ -27,9 +27,10 @@ class BiquadFilterNode : public AudioNode {
|
|
|
27
27
|
[[nodiscard]] std::shared_ptr<AudioParam> getQParam() const;
|
|
28
28
|
[[nodiscard]] std::shared_ptr<AudioParam> getGainParam() const;
|
|
29
29
|
void getFrequencyResponse(
|
|
30
|
-
const
|
|
31
|
-
|
|
32
|
-
|
|
30
|
+
const float *frequencyArray,
|
|
31
|
+
float *magResponseOutput,
|
|
32
|
+
float *phaseResponseOutput,
|
|
33
|
+
int length);
|
|
33
34
|
|
|
34
35
|
protected:
|
|
35
36
|
void processNode(const std::shared_ptr<AudioBus>& processingBus, int framesToProcess) override;
|
|
@@ -9,12 +9,14 @@
|
|
|
9
9
|
|
|
10
10
|
namespace audioapi {
|
|
11
11
|
|
|
12
|
-
AudioBufferSourceNode::AudioBufferSourceNode(
|
|
12
|
+
AudioBufferSourceNode::AudioBufferSourceNode(
|
|
13
|
+
BaseAudioContext *context,
|
|
14
|
+
bool pitchCorrection)
|
|
13
15
|
: AudioScheduledSourceNode(context),
|
|
14
16
|
loop_(false),
|
|
15
17
|
loopStart_(0),
|
|
16
18
|
loopEnd_(0),
|
|
17
|
-
|
|
19
|
+
pitchCorrection_(pitchCorrection),
|
|
18
20
|
vReadIndex_(0.0) {
|
|
19
21
|
buffer_ = std::shared_ptr<AudioBuffer>(nullptr);
|
|
20
22
|
|
|
@@ -54,10 +56,6 @@ std::shared_ptr<AudioBuffer> AudioBufferSourceNode::getBuffer() const {
|
|
|
54
56
|
return buffer_;
|
|
55
57
|
}
|
|
56
58
|
|
|
57
|
-
std::string AudioBufferSourceNode::getTimeStretchType() const {
|
|
58
|
-
return toString(timeStretchType_);
|
|
59
|
-
}
|
|
60
|
-
|
|
61
59
|
void AudioBufferSourceNode::setLoop(bool loop) {
|
|
62
60
|
loop_ = loop;
|
|
63
61
|
}
|
|
@@ -91,10 +89,6 @@ void AudioBufferSourceNode::setBuffer(
|
|
|
91
89
|
loopEnd_ = buffer_->getDuration();
|
|
92
90
|
}
|
|
93
91
|
|
|
94
|
-
void AudioBufferSourceNode::setTimeStretchType(const std::string &type) {
|
|
95
|
-
timeStretchType_ = fromString(type);
|
|
96
|
-
}
|
|
97
|
-
|
|
98
92
|
void AudioBufferSourceNode::start(double when, double offset, double duration) {
|
|
99
93
|
AudioScheduledSourceNode::start(when);
|
|
100
94
|
|
|
@@ -136,7 +130,7 @@ void AudioBufferSourceNode::processNode(
|
|
|
136
130
|
size_t startOffset = 0;
|
|
137
131
|
size_t offsetLength = 0;
|
|
138
132
|
|
|
139
|
-
if (
|
|
133
|
+
if (!pitchCorrection_) {
|
|
140
134
|
auto computedPlaybackRate = getComputedPlaybackRateValue();
|
|
141
135
|
updatePlaybackInfo(
|
|
142
136
|
processingBus, framesToProcess, startOffset, offsetLength);
|