react-native-audio-api 0.5.4 → 0.5.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIPackage.kt +1 -0
- package/common/cpp/audioapi/AudioAPIModuleInstaller.h +28 -0
- package/common/cpp/audioapi/HostObjects/OfflineAudioContextHostObject.h +70 -0
- package/common/cpp/audioapi/core/AudioNode.cpp +6 -1
- package/common/cpp/audioapi/core/OfflineAudioContext.cpp +117 -0
- package/common/cpp/audioapi/core/OfflineAudioContext.h +40 -0
- package/common/cpp/audioapi/core/sources/AudioBufferSourceNode.cpp +2 -2
- package/common/cpp/audioapi/core/sources/AudioScheduledSourceNode.cpp +28 -2
- package/common/cpp/audioapi/core/utils/AudioNodeDestructor.cpp +53 -0
- package/common/cpp/audioapi/core/utils/AudioNodeDestructor.h +33 -0
- package/common/cpp/audioapi/core/utils/AudioNodeManager.cpp +13 -10
- package/common/cpp/audioapi/core/utils/AudioNodeManager.h +3 -0
- package/lib/module/api.js +2 -1
- package/lib/module/api.js.map +1 -1
- package/lib/module/api.web.js +1 -0
- package/lib/module/api.web.js.map +1 -1
- package/lib/module/core/OfflineAudioContext.js +57 -0
- package/lib/module/core/OfflineAudioContext.js.map +1 -0
- package/lib/module/web-core/OfflineAudioContext.js +90 -0
- package/lib/module/web-core/OfflineAudioContext.js.map +1 -0
- package/lib/typescript/api.d.ts +3 -1
- package/lib/typescript/api.d.ts.map +1 -1
- package/lib/typescript/api.web.d.ts +1 -0
- package/lib/typescript/api.web.d.ts.map +1 -1
- package/lib/typescript/core/OfflineAudioContext.d.ts +14 -0
- package/lib/typescript/core/OfflineAudioContext.d.ts.map +1 -0
- package/lib/typescript/interfaces.d.ts +6 -0
- package/lib/typescript/interfaces.d.ts.map +1 -1
- package/lib/typescript/types.d.ts +5 -0
- package/lib/typescript/types.d.ts.map +1 -1
- package/lib/typescript/web-core/OfflineAudioContext.d.ts +34 -0
- package/lib/typescript/web-core/OfflineAudioContext.d.ts.map +1 -0
- package/package.json +1 -1
- package/src/api.ts +11 -2
- package/src/api.web.ts +1 -0
- package/src/core/OfflineAudioContext.ts +94 -0
- package/src/interfaces.ts +11 -0
- package/src/types.ts +6 -0
- package/src/web-core/OfflineAudioContext.tsx +163 -0
|
@@ -2,7 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
#include <audioapi/jsi/JsiPromise.h>
|
|
4
4
|
#include <audioapi/core/AudioContext.h>
|
|
5
|
+
#include <audioapi/core/OfflineAudioContext.h>
|
|
5
6
|
#include <audioapi/HostObjects/AudioContextHostObject.h>
|
|
7
|
+
#include <audioapi/HostObjects/OfflineAudioContextHostObject.h>
|
|
6
8
|
|
|
7
9
|
#include <memory>
|
|
8
10
|
|
|
@@ -14,8 +16,11 @@ class AudioAPIModuleInstaller {
|
|
|
14
16
|
public:
|
|
15
17
|
static void injectJSIBindings(jsi::Runtime *jsiRuntime, const std::shared_ptr<react::CallInvoker> &jsCallInvoker) {
|
|
16
18
|
auto createAudioContext = getCreateAudioContextFunction(jsiRuntime, jsCallInvoker);
|
|
19
|
+
auto createOfflineAudioContext = getCreateOfflineAudioContextFunction(jsiRuntime, jsCallInvoker);
|
|
17
20
|
jsiRuntime->global().setProperty(
|
|
18
21
|
*jsiRuntime, "createAudioContext", createAudioContext);
|
|
22
|
+
jsiRuntime->global().setProperty(
|
|
23
|
+
*jsiRuntime, "createOfflineAudioContext", createOfflineAudioContext);
|
|
19
24
|
}
|
|
20
25
|
|
|
21
26
|
private:
|
|
@@ -44,6 +49,29 @@ class AudioAPIModuleInstaller {
|
|
|
44
49
|
runtime, audioContextHostObject);
|
|
45
50
|
});
|
|
46
51
|
}
|
|
52
|
+
|
|
53
|
+
static jsi::Function getCreateOfflineAudioContextFunction(jsi::Runtime *jsiRuntime, const std::shared_ptr<react::CallInvoker> &jsCallInvoker) {
|
|
54
|
+
return jsi::Function::createFromHostFunction(
|
|
55
|
+
*jsiRuntime,
|
|
56
|
+
jsi::PropNameID::forAscii(*jsiRuntime, "createOfflineAudioContext"),
|
|
57
|
+
0,
|
|
58
|
+
[jsiRuntime, jsCallInvoker](
|
|
59
|
+
jsi::Runtime &runtime,
|
|
60
|
+
const jsi::Value &thisValue,
|
|
61
|
+
const jsi::Value *args,
|
|
62
|
+
size_t count) -> jsi::Value {
|
|
63
|
+
auto numberOfChannels = static_cast<int>(args[0].getNumber());
|
|
64
|
+
auto length = static_cast<size_t>(args[1].getNumber());
|
|
65
|
+
auto sampleRate = static_cast<float>(args[2].getNumber());
|
|
66
|
+
|
|
67
|
+
std::shared_ptr<OfflineAudioContext> offlineAudioContext = std::make_shared<OfflineAudioContext>(numberOfChannels, length, sampleRate);
|
|
68
|
+
auto audioContextHostObject = std::make_shared<OfflineAudioContextHostObject>(
|
|
69
|
+
offlineAudioContext, jsiRuntime, jsCallInvoker);
|
|
70
|
+
|
|
71
|
+
return jsi::Object::createFromHostObject(
|
|
72
|
+
runtime, audioContextHostObject);
|
|
73
|
+
});
|
|
74
|
+
}
|
|
47
75
|
};
|
|
48
76
|
|
|
49
77
|
} // namespace audioapi
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
#pragma once
|
|
2
|
+
|
|
3
|
+
#include <audioapi/core/OfflineAudioContext.h>
|
|
4
|
+
#include <audioapi/HostObjects/BaseAudioContextHostObject.h>
|
|
5
|
+
|
|
6
|
+
#include <jsi/jsi.h>
|
|
7
|
+
#include <memory>
|
|
8
|
+
#include <utility>
|
|
9
|
+
#include <vector>
|
|
10
|
+
|
|
11
|
+
namespace audioapi {
|
|
12
|
+
using namespace facebook;
|
|
13
|
+
|
|
14
|
+
class OfflineAudioContextHostObject : public BaseAudioContextHostObject {
|
|
15
|
+
public:
|
|
16
|
+
explicit OfflineAudioContextHostObject(
|
|
17
|
+
const std::shared_ptr<OfflineAudioContext> &offlineAudioContext,
|
|
18
|
+
jsi::Runtime *runtime,
|
|
19
|
+
const std::shared_ptr<react::CallInvoker> &callInvoker)
|
|
20
|
+
: BaseAudioContextHostObject(offlineAudioContext, runtime, callInvoker) {
|
|
21
|
+
addFunctions(
|
|
22
|
+
JSI_EXPORT_FUNCTION(OfflineAudioContextHostObject, resume),
|
|
23
|
+
JSI_EXPORT_FUNCTION(OfflineAudioContextHostObject, suspend),
|
|
24
|
+
JSI_EXPORT_FUNCTION(OfflineAudioContextHostObject, startRendering));
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
JSI_HOST_FUNCTION(resume) {
|
|
28
|
+
auto promise = promiseVendor_->createPromise([this](const std::shared_ptr<Promise>& promise) {
|
|
29
|
+
auto audioContext = std::static_pointer_cast<OfflineAudioContext>(context_);
|
|
30
|
+
audioContext->resume();
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
return promise;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
JSI_HOST_FUNCTION(suspend) {
|
|
37
|
+
double when = args[0].getNumber();
|
|
38
|
+
|
|
39
|
+
auto promise = promiseVendor_->createPromise([this, when](const std::shared_ptr<Promise>& promise) {
|
|
40
|
+
auto audioContext = std::static_pointer_cast<OfflineAudioContext>(context_);
|
|
41
|
+
OfflineAudioContextSuspendCallback callback = [promise]() {
|
|
42
|
+
promise->resolve([](jsi::Runtime &runtime) {
|
|
43
|
+
return jsi::Value::undefined();
|
|
44
|
+
});
|
|
45
|
+
};
|
|
46
|
+
audioContext->suspend(when, callback);
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
return promise;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
JSI_HOST_FUNCTION(startRendering) {
|
|
53
|
+
auto promise = promiseVendor_->createPromise([this](const std::shared_ptr<Promise>& promise) {
|
|
54
|
+
auto audioContext = std::static_pointer_cast<OfflineAudioContext>(context_);
|
|
55
|
+
|
|
56
|
+
OfflineAudioContextResultCallback callback =
|
|
57
|
+
[promise](const std::shared_ptr<AudioBuffer>& audioBuffer) -> void {
|
|
58
|
+
auto audioBufferHostObject = std::make_shared<AudioBufferHostObject>(audioBuffer);
|
|
59
|
+
promise->resolve([audioBufferHostObject = std::move(audioBufferHostObject)](jsi::Runtime &runtime) {
|
|
60
|
+
return jsi::Object::createFromHostObject(runtime, audioBufferHostObject);
|
|
61
|
+
});
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
audioContext->startRendering(callback);
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
return promise;
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
} // namespace audioapi
|
|
@@ -222,7 +222,12 @@ void AudioNode::connectNode(const std::shared_ptr<AudioNode> &node) {
|
|
|
222
222
|
}
|
|
223
223
|
|
|
224
224
|
void AudioNode::disconnectNode(const std::shared_ptr<AudioNode> &node) {
|
|
225
|
-
outputNodes_.
|
|
225
|
+
auto position = outputNodes_.find(node);
|
|
226
|
+
|
|
227
|
+
if (position != outputNodes_.end()) {
|
|
228
|
+
node->onInputDisconnected(this);
|
|
229
|
+
outputNodes_.erase(node);
|
|
230
|
+
}
|
|
226
231
|
}
|
|
227
232
|
|
|
228
233
|
void AudioNode::onInputEnabled() {
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
#include "OfflineAudioContext.h"
|
|
2
|
+
|
|
3
|
+
#include <audioapi/core/AudioContext.h>
|
|
4
|
+
#include <audioapi/core/Constants.h>
|
|
5
|
+
#include <audioapi/core/destinations/AudioDestinationNode.h>
|
|
6
|
+
#include <audioapi/core/sources/AudioBuffer.h>
|
|
7
|
+
#include <audioapi/core/utils/AudioDecoder.h>
|
|
8
|
+
#include <audioapi/core/utils/AudioNodeManager.h>
|
|
9
|
+
#include <audioapi/core/utils/Locker.h>
|
|
10
|
+
#include <audioapi/utils/AudioArray.h>
|
|
11
|
+
#include <audioapi/utils/AudioBus.h>
|
|
12
|
+
|
|
13
|
+
#include <algorithm>
|
|
14
|
+
#include <cassert>
|
|
15
|
+
#include <iostream>
|
|
16
|
+
#include <thread>
|
|
17
|
+
#include <utility>
|
|
18
|
+
|
|
19
|
+
namespace audioapi {
|
|
20
|
+
|
|
21
|
+
OfflineAudioContext::OfflineAudioContext(
|
|
22
|
+
int numberOfChannels,
|
|
23
|
+
size_t length,
|
|
24
|
+
float sampleRate)
|
|
25
|
+
: BaseAudioContext(),
|
|
26
|
+
length_(length),
|
|
27
|
+
numberOfChannels_(numberOfChannels),
|
|
28
|
+
currentSampleFrame_(0) {
|
|
29
|
+
sampleRate_ = sampleRate;
|
|
30
|
+
audioDecoder_ = std::make_shared<AudioDecoder>(sampleRate_);
|
|
31
|
+
resultBus_ = std::make_shared<AudioBus>(
|
|
32
|
+
static_cast<int>(length_), numberOfChannels_, sampleRate_);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
OfflineAudioContext::~OfflineAudioContext() {
|
|
36
|
+
nodeManager_->cleanup();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
void OfflineAudioContext::resume() {
|
|
40
|
+
Locker locker(mutex_);
|
|
41
|
+
|
|
42
|
+
if (state_ == ContextState::RUNNING) {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
renderAudio();
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
void OfflineAudioContext::suspend(
|
|
50
|
+
double when,
|
|
51
|
+
const std::function<void()> &callback) {
|
|
52
|
+
Locker locker(mutex_);
|
|
53
|
+
|
|
54
|
+
// we can only suspend once per render quantum at the end of the quantum
|
|
55
|
+
// first quantum is [0, RENDER_QUANTUM_SIZE)
|
|
56
|
+
auto frame = static_cast<size_t>(when * sampleRate_);
|
|
57
|
+
frame = RENDER_QUANTUM_SIZE *
|
|
58
|
+
((frame + RENDER_QUANTUM_SIZE - 1) / RENDER_QUANTUM_SIZE);
|
|
59
|
+
|
|
60
|
+
if (scheduledSuspends_.find(frame) != scheduledSuspends_.end()) {
|
|
61
|
+
throw std::runtime_error(
|
|
62
|
+
"cannot schedule more than one suspend at frame " +
|
|
63
|
+
std::to_string(frame) + " (" + std::to_string(when) + " seconds)");
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
scheduledSuspends_.emplace(frame, callback);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
void OfflineAudioContext::renderAudio() {
|
|
70
|
+
state_ = ContextState::RUNNING;
|
|
71
|
+
std::thread([this]() {
|
|
72
|
+
auto audioBus = std::make_shared<AudioBus>(
|
|
73
|
+
RENDER_QUANTUM_SIZE, numberOfChannels_, sampleRate_);
|
|
74
|
+
|
|
75
|
+
while (currentSampleFrame_ < length_) {
|
|
76
|
+
Locker locker(mutex_);
|
|
77
|
+
int framesToProcess = std::min(
|
|
78
|
+
static_cast<int>(length_ - currentSampleFrame_), RENDER_QUANTUM_SIZE);
|
|
79
|
+
|
|
80
|
+
destination_->renderAudio(audioBus, framesToProcess);
|
|
81
|
+
|
|
82
|
+
for (int i = 0; i < framesToProcess; i++) {
|
|
83
|
+
for (int channel = 0; channel < numberOfChannels_; channel += 1) {
|
|
84
|
+
resultBus_->getChannel(channel)->getData()[currentSampleFrame_ + i] =
|
|
85
|
+
audioBus->getChannel(channel)->getData()[i];
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
currentSampleFrame_ += framesToProcess;
|
|
90
|
+
|
|
91
|
+
// Execute scheduled suspend if exists
|
|
92
|
+
auto suspend = scheduledSuspends_.find(currentSampleFrame_);
|
|
93
|
+
if (suspend != scheduledSuspends_.end()) {
|
|
94
|
+
assert(currentSampleFrame_ < length_);
|
|
95
|
+
auto callback = suspend->second;
|
|
96
|
+
scheduledSuspends_.erase(currentSampleFrame_);
|
|
97
|
+
state_ = ContextState::SUSPENDED;
|
|
98
|
+
callback();
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Rendering completed
|
|
104
|
+
auto buffer = std::make_shared<AudioBuffer>(resultBus_);
|
|
105
|
+
resultCallback_(buffer);
|
|
106
|
+
}).detach();
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
void OfflineAudioContext::startRendering(
|
|
110
|
+
OfflineAudioContextResultCallback callback) {
|
|
111
|
+
Locker locker(mutex_);
|
|
112
|
+
|
|
113
|
+
resultCallback_ = std::move(callback);
|
|
114
|
+
renderAudio();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
} // namespace audioapi
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
#pragma once
|
|
2
|
+
|
|
3
|
+
#include "BaseAudioContext.h"
|
|
4
|
+
|
|
5
|
+
#include <mutex>
|
|
6
|
+
#include <map>
|
|
7
|
+
#include <unordered_map>
|
|
8
|
+
#include <memory>
|
|
9
|
+
|
|
10
|
+
namespace audioapi {
|
|
11
|
+
|
|
12
|
+
using OfflineAudioContextSuspendCallback = std::function<void()>;
|
|
13
|
+
using OfflineAudioContextResultCallback = std::function<void(std::shared_ptr<AudioBuffer>)>;
|
|
14
|
+
|
|
15
|
+
class OfflineAudioContext : public BaseAudioContext {
|
|
16
|
+
public:
|
|
17
|
+
explicit OfflineAudioContext(int numberOfChannels, size_t length, float sampleRate);
|
|
18
|
+
~OfflineAudioContext() override;
|
|
19
|
+
|
|
20
|
+
void resume();
|
|
21
|
+
void suspend(double when, const OfflineAudioContextSuspendCallback& callback);
|
|
22
|
+
|
|
23
|
+
void startRendering(OfflineAudioContextResultCallback callback);
|
|
24
|
+
|
|
25
|
+
private:
|
|
26
|
+
std::mutex mutex_;
|
|
27
|
+
|
|
28
|
+
std::unordered_map<size_t, OfflineAudioContextSuspendCallback> scheduledSuspends_;
|
|
29
|
+
OfflineAudioContextResultCallback resultCallback_;
|
|
30
|
+
|
|
31
|
+
size_t length_;
|
|
32
|
+
int numberOfChannels_;
|
|
33
|
+
size_t currentSampleFrame_;
|
|
34
|
+
|
|
35
|
+
std::shared_ptr<AudioBus> resultBus_;
|
|
36
|
+
|
|
37
|
+
void renderAudio();
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
} // namespace audioapi
|
|
@@ -181,7 +181,7 @@ void AudioBufferSourceNode::processWithoutPitchCorrection(
|
|
|
181
181
|
auto computedPlaybackRate = getComputedPlaybackRateValue();
|
|
182
182
|
updatePlaybackInfo(processingBus, framesToProcess, startOffset, offsetLength);
|
|
183
183
|
|
|
184
|
-
if (computedPlaybackRate == 0.0f || !isPlaying()) {
|
|
184
|
+
if (computedPlaybackRate == 0.0f || (!isPlaying() && !isStopScheduled())) {
|
|
185
185
|
processingBus->zero();
|
|
186
186
|
return;
|
|
187
187
|
}
|
|
@@ -215,7 +215,7 @@ void AudioBufferSourceNode::processWithPitchCorrection(
|
|
|
215
215
|
updatePlaybackInfo(
|
|
216
216
|
playbackRateBus_, framesNeededToStretch, startOffset, offsetLength);
|
|
217
217
|
|
|
218
|
-
if (playbackRate == 0.0f || !isPlaying()) {
|
|
218
|
+
if (playbackRate == 0.0f || (!isPlaying() && !isStopScheduled())) {
|
|
219
219
|
processingBus->zero();
|
|
220
220
|
return;
|
|
221
221
|
}
|
|
@@ -71,7 +71,7 @@ void AudioScheduledSourceNode::updatePlaybackInfo(
|
|
|
71
71
|
std::max(dsp::timeToSampleFrame(startTime_, sampleRate), firstFrame);
|
|
72
72
|
size_t stopFrame = stopTime_ == -1.0
|
|
73
73
|
? std::numeric_limits<size_t>::max()
|
|
74
|
-
:
|
|
74
|
+
: dsp::timeToSampleFrame(stopTime_, sampleRate);
|
|
75
75
|
|
|
76
76
|
if (isUnscheduled() || isFinished()) {
|
|
77
77
|
startOffset = 0;
|
|
@@ -93,7 +93,18 @@ void AudioScheduledSourceNode::updatePlaybackInfo(
|
|
|
93
93
|
startOffset = std::max(startFrame, firstFrame) - firstFrame > 0
|
|
94
94
|
? std::max(startFrame, firstFrame) - firstFrame
|
|
95
95
|
: 0;
|
|
96
|
-
nonSilentFramesToProcess =
|
|
96
|
+
nonSilentFramesToProcess =
|
|
97
|
+
std::max(std::min(lastFrame, stopFrame), startFrame) - startFrame;
|
|
98
|
+
|
|
99
|
+
assert(startOffset <= framesToProcess);
|
|
100
|
+
assert(nonSilentFramesToProcess <= framesToProcess);
|
|
101
|
+
|
|
102
|
+
// stop will happen in the same render quantum
|
|
103
|
+
if (stopFrame < lastFrame && stopFrame >= firstFrame) {
|
|
104
|
+
playbackState_ = PlaybackState::STOP_SCHEDULED;
|
|
105
|
+
processingBus->zero(stopFrame - firstFrame, lastFrame - stopFrame);
|
|
106
|
+
}
|
|
107
|
+
|
|
97
108
|
processingBus->zero(0, startOffset);
|
|
98
109
|
return;
|
|
99
110
|
}
|
|
@@ -106,10 +117,25 @@ void AudioScheduledSourceNode::updatePlaybackInfo(
|
|
|
106
117
|
playbackState_ = PlaybackState::STOP_SCHEDULED;
|
|
107
118
|
startOffset = 0;
|
|
108
119
|
nonSilentFramesToProcess = stopFrame - firstFrame;
|
|
120
|
+
|
|
121
|
+
assert(startOffset <= framesToProcess);
|
|
122
|
+
assert(nonSilentFramesToProcess <= framesToProcess);
|
|
123
|
+
|
|
109
124
|
processingBus->zero(stopFrame - firstFrame, lastFrame - stopFrame);
|
|
110
125
|
return;
|
|
111
126
|
}
|
|
112
127
|
|
|
128
|
+
// mark as finished in first silent render quantum
|
|
129
|
+
if (stopFrame < firstFrame) {
|
|
130
|
+
startOffset = 0;
|
|
131
|
+
nonSilentFramesToProcess = 0;
|
|
132
|
+
|
|
133
|
+
playbackState_ = PlaybackState::STOP_SCHEDULED;
|
|
134
|
+
handleStopScheduled();
|
|
135
|
+
playbackState_ = PlaybackState::FINISHED;
|
|
136
|
+
return;
|
|
137
|
+
}
|
|
138
|
+
|
|
113
139
|
// normal "mid-buffer" playback
|
|
114
140
|
startOffset = 0;
|
|
115
141
|
nonSilentFramesToProcess = framesToProcess;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
#include <audioapi/core/AudioNode.h>
|
|
2
|
+
#include <audioapi/core/utils/AudioNodeDestructor.h>
|
|
3
|
+
#include <audioapi/core/utils/Locker.h>
|
|
4
|
+
|
|
5
|
+
namespace audioapi {
|
|
6
|
+
|
|
7
|
+
AudioNodeDestructor::AudioNodeDestructor()
|
|
8
|
+
: thread_(std::thread(&AudioNodeDestructor::process, this)),
|
|
9
|
+
isExiting_(false) {}
|
|
10
|
+
|
|
11
|
+
AudioNodeDestructor::~AudioNodeDestructor() {
|
|
12
|
+
isExiting_ = true;
|
|
13
|
+
|
|
14
|
+
cv_.notify_one(); // call process for the last time
|
|
15
|
+
if (thread_.joinable()) {
|
|
16
|
+
thread_.join();
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
void AudioNodeDestructor::tryCallWithLock(
|
|
21
|
+
const std::function<void()> &callback) {
|
|
22
|
+
if (auto lock = Locker::tryLock(mutex_)) {
|
|
23
|
+
callback();
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
void AudioNodeDestructor::addNodeForDeconstruction(
|
|
28
|
+
const std::shared_ptr<AudioNode> &node) {
|
|
29
|
+
// NOTE: this method must be called within `tryCallWithLock`
|
|
30
|
+
nodesForDeconstruction_.emplace_back(node);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
void AudioNodeDestructor::notify() {
|
|
34
|
+
cv_.notify_one();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
void AudioNodeDestructor::process() {
|
|
38
|
+
std::unique_lock<std::mutex> lock(mutex_);
|
|
39
|
+
while (!isExiting_) {
|
|
40
|
+
cv_.wait(lock, [this] {
|
|
41
|
+
return isExiting_ || !nodesForDeconstruction_.empty();
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
if (isExiting_)
|
|
45
|
+
break;
|
|
46
|
+
|
|
47
|
+
if (!isExiting_ && !nodesForDeconstruction_.empty()) {
|
|
48
|
+
nodesForDeconstruction_.clear();
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
} // namespace audioapi
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
#pragma once
|
|
2
|
+
|
|
3
|
+
#include <condition_variable>
|
|
4
|
+
#include <mutex>
|
|
5
|
+
#include <thread>
|
|
6
|
+
#include <atomic>
|
|
7
|
+
#include <vector>
|
|
8
|
+
#include <memory>
|
|
9
|
+
|
|
10
|
+
namespace audioapi {
|
|
11
|
+
|
|
12
|
+
class AudioNode;
|
|
13
|
+
|
|
14
|
+
class AudioNodeDestructor {
|
|
15
|
+
public:
|
|
16
|
+
AudioNodeDestructor();
|
|
17
|
+
~AudioNodeDestructor();
|
|
18
|
+
|
|
19
|
+
void tryCallWithLock(const std::function<void()> &callback);
|
|
20
|
+
void addNodeForDeconstruction(const std::shared_ptr<AudioNode> &node);
|
|
21
|
+
void notify();
|
|
22
|
+
|
|
23
|
+
private:
|
|
24
|
+
mutable std::mutex mutex_;
|
|
25
|
+
std::thread thread_;
|
|
26
|
+
std::condition_variable cv_;
|
|
27
|
+
std::vector<std::shared_ptr<AudioNode>> nodesForDeconstruction_;
|
|
28
|
+
|
|
29
|
+
std::atomic<bool> isExiting_;
|
|
30
|
+
|
|
31
|
+
void process();
|
|
32
|
+
};
|
|
33
|
+
} // namespace audioapi
|
|
@@ -55,17 +55,20 @@ void AudioNodeManager::settlePendingConnections() {
|
|
|
55
55
|
}
|
|
56
56
|
|
|
57
57
|
void AudioNodeManager::prepareNodesForDestruction() {
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
58
|
+
nodeDeconstructor_.tryCallWithLock([this]() {
|
|
59
|
+
auto it = nodes_.begin();
|
|
60
|
+
|
|
61
|
+
while (it != nodes_.end()) {
|
|
62
|
+
if (it->use_count() == 1) {
|
|
63
|
+
nodeDeconstructor_.addNodeForDeconstruction(*it);
|
|
64
|
+
it->get()->cleanup();
|
|
65
|
+
it = nodes_.erase(it);
|
|
66
|
+
} else {
|
|
67
|
+
++it;
|
|
68
|
+
}
|
|
67
69
|
}
|
|
68
|
-
}
|
|
70
|
+
});
|
|
71
|
+
nodeDeconstructor_.notify();
|
|
69
72
|
}
|
|
70
73
|
|
|
71
74
|
void AudioNodeManager::cleanup() {
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
#pragma once
|
|
2
2
|
|
|
3
|
+
#include <audioapi/core/utils/AudioNodeDestructor.h>
|
|
4
|
+
|
|
3
5
|
#include <memory>
|
|
4
6
|
#include <mutex>
|
|
5
7
|
#include <tuple>
|
|
@@ -31,6 +33,7 @@ class AudioNodeManager {
|
|
|
31
33
|
|
|
32
34
|
private:
|
|
33
35
|
std::mutex graphLock_;
|
|
36
|
+
AudioNodeDestructor nodeDeconstructor_;
|
|
34
37
|
|
|
35
38
|
// all nodes created in the context
|
|
36
39
|
std::unordered_set<std::shared_ptr<AudioNode>> nodes_;
|
package/lib/module/api.js
CHANGED
|
@@ -6,7 +6,7 @@ import NativeAudioAPIModule from "./specs/NativeAudioAPIModule.js";
|
|
|
6
6
|
|
|
7
7
|
/* eslint-disable no-var */
|
|
8
8
|
|
|
9
|
-
if (global.createAudioContext == null) {
|
|
9
|
+
if (global.createAudioContext == null || global.createOfflineAudioContext == null) {
|
|
10
10
|
if (!NativeAudioAPIModule) {
|
|
11
11
|
throw new Error(`Failed to install react-native-audio-api: The native module could not be found.`);
|
|
12
12
|
}
|
|
@@ -15,6 +15,7 @@ if (global.createAudioContext == null) {
|
|
|
15
15
|
export { default as AudioBuffer } from "./core/AudioBuffer.js";
|
|
16
16
|
export { default as AudioBufferSourceNode } from "./core/AudioBufferSourceNode.js";
|
|
17
17
|
export { default as AudioContext } from "./core/AudioContext.js";
|
|
18
|
+
export { default as OfflineAudioContext } from "./core/OfflineAudioContext.js";
|
|
18
19
|
export { default as AudioDestinationNode } from "./core/AudioDestinationNode.js";
|
|
19
20
|
export { default as AudioNode } from "./core/AudioNode.js";
|
|
20
21
|
export { default as AnalyserNode } from "./core/AnalyserNode.js";
|
package/lib/module/api.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["NativeAudioAPIModule","global","createAudioContext","Error","install","default","AudioBuffer","AudioBufferSourceNode","AudioContext","AudioDestinationNode","AudioNode","AnalyserNode","AudioParam","AudioScheduledSourceNode","BaseAudioContext","BiquadFilterNode","GainNode","OscillatorNode","StereoPannerNode","OscillatorType","BiquadFilterType","ChannelCountMode","ChannelInterpretation","ContextState","WindowType","PeriodicWaveConstraints","IndexSizeError","InvalidAccessError","InvalidStateError","RangeError","NotSupportedError"],"sourceRoot":"../../src","sources":["api.ts"],"mappings":";;AAAA,OAAOA,oBAAoB,MAAM,iCAA8B;;AAG/D;;
|
|
1
|
+
{"version":3,"names":["NativeAudioAPIModule","global","createAudioContext","createOfflineAudioContext","Error","install","default","AudioBuffer","AudioBufferSourceNode","AudioContext","OfflineAudioContext","AudioDestinationNode","AudioNode","AnalyserNode","AudioParam","AudioScheduledSourceNode","BaseAudioContext","BiquadFilterNode","GainNode","OscillatorNode","StereoPannerNode","OscillatorType","BiquadFilterType","ChannelCountMode","ChannelInterpretation","ContextState","WindowType","PeriodicWaveConstraints","IndexSizeError","InvalidAccessError","InvalidStateError","RangeError","NotSupportedError"],"sourceRoot":"../../src","sources":["api.ts"],"mappings":";;AAAA,OAAOA,oBAAoB,MAAM,iCAA8B;;AAG/D;;AASA;;AAEA,IACEC,MAAM,CAACC,kBAAkB,IAAI,IAAI,IACjCD,MAAM,CAACE,yBAAyB,IAAI,IAAI,EACxC;EACA,IAAI,CAACH,oBAAoB,EAAE;IACzB,MAAM,IAAII,KAAK,CACb,iFACF,CAAC;EACH;EAEAJ,oBAAoB,CAACK,OAAO,CAAC,CAAC;AAChC;AAEA,SAASC,OAAO,IAAIC,WAAW,QAAQ,uBAAoB;AAC3D,SAASD,OAAO,IAAIE,qBAAqB,QAAQ,iCAA8B;AAC/E,SAASF,OAAO,IAAIG,YAAY,QAAQ,wBAAqB;AAC7D,SAASH,OAAO,IAAII,mBAAmB,QAAQ,+BAA4B;AAC3E,SAASJ,OAAO,IAAIK,oBAAoB,QAAQ,gCAA6B;AAC7E,SAASL,OAAO,IAAIM,SAAS,QAAQ,qBAAkB;AACvD,SAASN,OAAO,IAAIO,YAAY,QAAQ,wBAAqB;AAC7D,SAASP,OAAO,IAAIQ,UAAU,QAAQ,sBAAmB;AACzD,SAASR,OAAO,IAAIS,wBAAwB,QAAQ,oCAAiC;AACrF,SAAST,OAAO,IAAIU,gBAAgB,QAAQ,4BAAyB;AACrE,SAASV,OAAO,IAAIW,gBAAgB,QAAQ,4BAAyB;AACrE,SAASX,OAAO,IAAIY,QAAQ,QAAQ,oBAAiB;AACrD,SAASZ,OAAO,IAAIa,cAAc,QAAQ,0BAAuB;AACjE,SAASb,OAAO,IAAIc,gBAAgB,QAAQ,4BAAyB;AAErE,SACEC,cAAc,EACdC,gBAAgB,EAChBC,gBAAgB,EAChBC,qBAAqB,EACrBC,YAAY,EACZC,UAAU,EACVC,uBAAuB,QAClB,YAAS;AAEhB,SACEC,cAAc,EACdC,kBAAkB,EAClBC,iBAAiB,EACjBC,UAAU,EACVC,iBAAiB,QACZ,mBAAU","ignoreList":[]}
|
package/lib/module/api.web.js
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
export { default as AudioBuffer } from "./web-core/AudioBuffer.js";
|
|
4
4
|
export { default as AudioBufferSourceNode } from "./web-core/AudioBufferSourceNode.js";
|
|
5
5
|
export { default as AudioContext } from "./web-core/AudioContext.js";
|
|
6
|
+
export { default as OfflineAudioContext } from "./web-core/OfflineAudioContext.js";
|
|
6
7
|
export { default as AudioDestinationNode } from "./web-core/AudioDestinationNode.js";
|
|
7
8
|
export { default as AudioNode } from "./web-core/AudioNode.js";
|
|
8
9
|
export { default as AnalyserNode } from "./web-core/AnalyserNode.js";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["default","AudioBuffer","AudioBufferSourceNode","AudioContext","AudioDestinationNode","AudioNode","AnalyserNode","AudioParam","AudioScheduledSourceNode","BaseAudioContext","BiquadFilterNode","GainNode","OscillatorNode","StereoPannerNode","OscillatorType","BiquadFilterType","ChannelCountMode","ChannelInterpretation","ContextState","WindowType","PeriodicWaveConstraints","IndexSizeError","InvalidAccessError","InvalidStateError","RangeError","NotSupportedError"],"sourceRoot":"../../src","sources":["api.web.ts"],"mappings":";;AAAA,SAASA,OAAO,IAAIC,WAAW,QAAQ,2BAAwB;AAC/D,SAASD,OAAO,IAAIE,qBAAqB,QAAQ,qCAAkC;AACnF,SAASF,OAAO,IAAIG,YAAY,QAAQ,4BAAyB;AACjE,SAASH,OAAO,IAAII,oBAAoB,QAAQ,oCAAiC;AACjF,
|
|
1
|
+
{"version":3,"names":["default","AudioBuffer","AudioBufferSourceNode","AudioContext","OfflineAudioContext","AudioDestinationNode","AudioNode","AnalyserNode","AudioParam","AudioScheduledSourceNode","BaseAudioContext","BiquadFilterNode","GainNode","OscillatorNode","StereoPannerNode","OscillatorType","BiquadFilterType","ChannelCountMode","ChannelInterpretation","ContextState","WindowType","PeriodicWaveConstraints","IndexSizeError","InvalidAccessError","InvalidStateError","RangeError","NotSupportedError"],"sourceRoot":"../../src","sources":["api.web.ts"],"mappings":";;AAAA,SAASA,OAAO,IAAIC,WAAW,QAAQ,2BAAwB;AAC/D,SAASD,OAAO,IAAIE,qBAAqB,QAAQ,qCAAkC;AACnF,SAASF,OAAO,IAAIG,YAAY,QAAQ,4BAAyB;AACjE,SAASH,OAAO,IAAII,mBAAmB,QAAQ,mCAAgC;AAC/E,SAASJ,OAAO,IAAIK,oBAAoB,QAAQ,oCAAiC;AACjF,SAASL,OAAO,IAAIM,SAAS,QAAQ,yBAAsB;AAC3D,SAASN,OAAO,IAAIO,YAAY,QAAQ,4BAAyB;AACjE,SAASP,OAAO,IAAIQ,UAAU,QAAQ,0BAAuB;AAC7D,SAASR,OAAO,IAAIS,wBAAwB,QAAQ,wCAAqC;AACzF,SAAST,OAAO,IAAIU,gBAAgB,QAAQ,gCAA6B;AACzE,SAASV,OAAO,IAAIW,gBAAgB,QAAQ,gCAA6B;AACzE,SAASX,OAAO,IAAIY,QAAQ,QAAQ,wBAAqB;AACzD,SAASZ,OAAO,IAAIa,cAAc,QAAQ,8BAA2B;AACrE,SAASb,OAAO,IAAIc,gBAAgB,QAAQ,gCAA6B;AAEzE,cAAc,4BAAmB;AAEjC,SACEC,cAAc,EACdC,gBAAgB,EAChBC,gBAAgB,EAChBC,qBAAqB,EACrBC,YAAY,EACZC,UAAU,EACVC,uBAAuB,QAClB,YAAS;AAEhB,SACEC,cAAc,EACdC,kBAAkB,EAClBC,iBAAiB,EACjBC,UAAU,EACVC,iBAAiB,QACZ,mBAAU","ignoreList":[]}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
import BaseAudioContext from "./BaseAudioContext.js";
|
|
4
|
+
import { InvalidStateError, NotSupportedError } from "../errors/index.js";
|
|
5
|
+
import AudioBuffer from "./AudioBuffer.js";
|
|
6
|
+
export default class OfflineAudioContext extends BaseAudioContext {
|
|
7
|
+
constructor(arg0, arg1, arg2) {
|
|
8
|
+
if (typeof arg0 === 'object') {
|
|
9
|
+
const {
|
|
10
|
+
numberOfChannels,
|
|
11
|
+
length,
|
|
12
|
+
sampleRate
|
|
13
|
+
} = arg0;
|
|
14
|
+
super(global.createOfflineAudioContext(numberOfChannels, length, sampleRate));
|
|
15
|
+
this.duration = length / sampleRate;
|
|
16
|
+
} else if (typeof arg0 === 'number' && typeof arg1 === 'number' && typeof arg2 === 'number') {
|
|
17
|
+
super(global.createOfflineAudioContext(arg0, arg1, arg2));
|
|
18
|
+
this.duration = arg1 / arg2;
|
|
19
|
+
} else {
|
|
20
|
+
throw new NotSupportedError('Invalid constructor arguments');
|
|
21
|
+
}
|
|
22
|
+
this.isSuspended = false;
|
|
23
|
+
this.isRendering = false;
|
|
24
|
+
}
|
|
25
|
+
async resume() {
|
|
26
|
+
if (!this.isRendering) {
|
|
27
|
+
throw new InvalidStateError('Cannot resume an OfflineAudioContext while rendering');
|
|
28
|
+
}
|
|
29
|
+
if (!this.isSuspended) {
|
|
30
|
+
throw new InvalidStateError('Cannot resume an OfflineAudioContext that is not suspended');
|
|
31
|
+
}
|
|
32
|
+
this.isSuspended = false;
|
|
33
|
+
await this.context.resume();
|
|
34
|
+
}
|
|
35
|
+
async suspend(suspendTime) {
|
|
36
|
+
if (suspendTime < 0) {
|
|
37
|
+
throw new InvalidStateError('suspendTime must be a non-negative number');
|
|
38
|
+
}
|
|
39
|
+
if (suspendTime < this.context.currentTime) {
|
|
40
|
+
throw new InvalidStateError(`suspendTime must be greater than the current time: ${suspendTime}`);
|
|
41
|
+
}
|
|
42
|
+
if (suspendTime > this.duration) {
|
|
43
|
+
throw new InvalidStateError(`suspendTime must be less than the duration of the context: ${suspendTime}`);
|
|
44
|
+
}
|
|
45
|
+
this.isSuspended = true;
|
|
46
|
+
await this.context.suspend(suspendTime);
|
|
47
|
+
}
|
|
48
|
+
async startRendering() {
|
|
49
|
+
if (this.isRendering) {
|
|
50
|
+
throw new InvalidStateError('OfflineAudioContext is already rendering');
|
|
51
|
+
}
|
|
52
|
+
this.isRendering = true;
|
|
53
|
+
const audioBuffer = await this.context.startRendering();
|
|
54
|
+
return new AudioBuffer(audioBuffer);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
//# sourceMappingURL=OfflineAudioContext.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["BaseAudioContext","InvalidStateError","NotSupportedError","AudioBuffer","OfflineAudioContext","constructor","arg0","arg1","arg2","numberOfChannels","length","sampleRate","global","createOfflineAudioContext","duration","isSuspended","isRendering","resume","context","suspend","suspendTime","currentTime","startRendering","audioBuffer"],"sourceRoot":"../../../src","sources":["core/OfflineAudioContext.ts"],"mappings":";;AACA,OAAOA,gBAAgB,MAAM,uBAAoB;AAEjD,SAASC,iBAAiB,EAAEC,iBAAiB,QAAQ,oBAAW;AAChE,OAAOC,WAAW,MAAM,kBAAe;AAEvC,eAAe,MAAMC,mBAAmB,SAASJ,gBAAgB,CAAC;EAOhEK,WAAWA,CACTC,IAAyC,EACzCC,IAAa,EACbC,IAAa,EACb;IACA,IAAI,OAAOF,IAAI,KAAK,QAAQ,EAAE;MAC5B,MAAM;QAAEG,gBAAgB;QAAEC,MAAM;QAAEC;MAAW,CAAC,GAAGL,IAAI;MACrD,KAAK,CACHM,MAAM,CAACC,yBAAyB,CAACJ,gBAAgB,EAAEC,MAAM,EAAEC,UAAU,CACvE,CAAC;MAED,IAAI,CAACG,QAAQ,GAAGJ,MAAM,GAAGC,UAAU;IACrC,CAAC,MAAM,IACL,OAAOL,IAAI,KAAK,QAAQ,IACxB,OAAOC,IAAI,KAAK,QAAQ,IACxB,OAAOC,IAAI,KAAK,QAAQ,EACxB;MACA,KAAK,CAACI,MAAM,CAACC,yBAAyB,CAACP,IAAI,EAAEC,IAAI,EAAEC,IAAI,CAAC,CAAC;MACzD,IAAI,CAACM,QAAQ,GAAGP,IAAI,GAAGC,IAAI;IAC7B,CAAC,MAAM;MACL,MAAM,IAAIN,iBAAiB,CAAC,+BAA+B,CAAC;IAC9D;IAEA,IAAI,CAACa,WAAW,GAAG,KAAK;IACxB,IAAI,CAACC,WAAW,GAAG,KAAK;EAC1B;EAEA,MAAMC,MAAMA,CAAA,EAAuB;IACjC,IAAI,CAAC,IAAI,CAACD,WAAW,EAAE;MACrB,MAAM,IAAIf,iBAAiB,CACzB,sDACF,CAAC;IACH;IAEA,IAAI,CAAC,IAAI,CAACc,WAAW,EAAE;MACrB,MAAM,IAAId,iBAAiB,CACzB,4DACF,CAAC;IACH;IAEA,IAAI,CAACc,WAAW,GAAG,KAAK;IAExB,MAAO,IAAI,CAACG,OAAO,CAA0BD,MAAM,CAAC,CAAC;EACvD;EAEA,MAAME,OAAOA,CAACC,WAAmB,EAAsB;IACrD,IAAIA,WAAW,GAAG,CAAC,EAAE;MACnB,MAAM,IAAInB,iBAAiB,CAAC,2CAA2C,CAAC;IAC1E;IAEA,IAAImB,WAAW,GAAG,IAAI,CAACF,OAAO,CAACG,WAAW,EAAE;MAC1C,MAAM,IAAIpB,iBAAiB,CACzB,sDAAsDmB,WAAW,EACnE,CAAC;IACH;IAEA,IAAIA,WAAW,GAAG,IAAI,CAACN,QAAQ,EAAE;MAC/B,MAAM,IAAIb,iBAAiB,CACzB,8DAA8DmB,WAAW,EAC3E,CAAC;IACH;IAEA,IAAI,CAACL,WAAW,GAAG,IAAI;IAEvB,MAAO,IAAI,CAACG,OAAO,CAA0BC,OAAO,CAACC,WAAW,CAAC;EACnE;EAEA,MAAME,cAAcA,CAAA,EAAyB;IAC3C,IAAI,IAAI,CAACN,WAAW,EAAE;MACpB,MAAM,IAAIf,iBAAiB,CAAC,0CAA0C,CAAC;IACzE;IAEA,IAAI,CAACe,WAAW,GAAG,IAAI;IAEvB,MAAMO,WAAW,GAAG,MAClB,IAAI,CAACL,OAAO,CACZI,cAAc,CAAC,CAAC;IAElB,OAAO,IAAInB,WAAW,CAACoB,WAAW,CAAC;EACrC;AACF","ignoreList":[]}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
import { InvalidAccessError, NotSupportedError } from "../errors/index.js";
|
|
4
|
+
import AnalyserNode from "./AnalyserNode.js";
|
|
5
|
+
import AudioDestinationNode from "./AudioDestinationNode.js";
|
|
6
|
+
import AudioBuffer from "./AudioBuffer.js";
|
|
7
|
+
import AudioBufferSourceNode from "./AudioBufferSourceNode.js";
|
|
8
|
+
import BiquadFilterNode from "./BiquadFilterNode.js";
|
|
9
|
+
import GainNode from "./GainNode.js";
|
|
10
|
+
import OscillatorNode from "./OscillatorNode.js";
|
|
11
|
+
import PeriodicWave from "./PeriodicWave.js";
|
|
12
|
+
import StereoPannerNode from "./StereoPannerNode.js";
|
|
13
|
+
import { globalWasmPromise, globalTag } from "./custom/LoadCustomWasm.js";
|
|
14
|
+
export default class OfflineAudioContext {
|
|
15
|
+
constructor(arg0, arg1, arg2) {
|
|
16
|
+
if (typeof arg0 === 'object') {
|
|
17
|
+
this.context = new window.OfflineAudioContext(arg0);
|
|
18
|
+
} else if (typeof arg0 === 'number' && typeof arg1 === 'number' && typeof arg2 === 'number') {
|
|
19
|
+
this.context = new window.OfflineAudioContext(arg0, arg1, arg2);
|
|
20
|
+
} else {
|
|
21
|
+
throw new NotSupportedError('Invalid constructor arguments');
|
|
22
|
+
}
|
|
23
|
+
this.sampleRate = this.context.sampleRate;
|
|
24
|
+
this.destination = new AudioDestinationNode(this, this.context.destination);
|
|
25
|
+
}
|
|
26
|
+
get currentTime() {
|
|
27
|
+
return this.context.currentTime;
|
|
28
|
+
}
|
|
29
|
+
get state() {
|
|
30
|
+
return this.context.state;
|
|
31
|
+
}
|
|
32
|
+
createOscillator() {
|
|
33
|
+
return new OscillatorNode(this, this.context.createOscillator());
|
|
34
|
+
}
|
|
35
|
+
createGain() {
|
|
36
|
+
return new GainNode(this, this.context.createGain());
|
|
37
|
+
}
|
|
38
|
+
createStereoPanner() {
|
|
39
|
+
return new StereoPannerNode(this, this.context.createStereoPanner());
|
|
40
|
+
}
|
|
41
|
+
createBiquadFilter() {
|
|
42
|
+
return new BiquadFilterNode(this, this.context.createBiquadFilter());
|
|
43
|
+
}
|
|
44
|
+
async createBufferSource(options) {
|
|
45
|
+
if (!options || !options.pitchCorrection) {
|
|
46
|
+
return new AudioBufferSourceNode(this, this.context.createBufferSource(), false);
|
|
47
|
+
}
|
|
48
|
+
await globalWasmPromise;
|
|
49
|
+
const wasmStretch = await window[globalTag](this.context);
|
|
50
|
+
return new AudioBufferSourceNode(this, wasmStretch, true);
|
|
51
|
+
}
|
|
52
|
+
createBuffer(numOfChannels, length, sampleRate) {
|
|
53
|
+
if (numOfChannels < 1 || numOfChannels >= 32) {
|
|
54
|
+
throw new NotSupportedError(`The number of channels provided (${numOfChannels}) is outside the range [1, 32]`);
|
|
55
|
+
}
|
|
56
|
+
if (length <= 0) {
|
|
57
|
+
throw new NotSupportedError(`The number of frames provided (${length}) is less than or equal to the minimum bound (0)`);
|
|
58
|
+
}
|
|
59
|
+
if (sampleRate < 8000 || sampleRate > 96000) {
|
|
60
|
+
throw new NotSupportedError(`The sample rate provided (${sampleRate}) is outside the range [8000, 96000]`);
|
|
61
|
+
}
|
|
62
|
+
return new AudioBuffer(this.context.createBuffer(numOfChannels, length, sampleRate));
|
|
63
|
+
}
|
|
64
|
+
createPeriodicWave(real, imag, constraints) {
|
|
65
|
+
if (real.length !== imag.length) {
|
|
66
|
+
throw new InvalidAccessError(`The lengths of the real (${real.length}) and imaginary (${imag.length}) arrays must match.`);
|
|
67
|
+
}
|
|
68
|
+
return new PeriodicWave(this.context.createPeriodicWave(real, imag, constraints));
|
|
69
|
+
}
|
|
70
|
+
createAnalyser() {
|
|
71
|
+
return new AnalyserNode(this, this.context.createAnalyser());
|
|
72
|
+
}
|
|
73
|
+
async decodeAudioDataSource(source) {
|
|
74
|
+
const arrayBuffer = await fetch(source).then(response => response.arrayBuffer());
|
|
75
|
+
return this.decodeAudioData(arrayBuffer);
|
|
76
|
+
}
|
|
77
|
+
async decodeAudioData(arrayBuffer) {
|
|
78
|
+
return new AudioBuffer(await this.context.decodeAudioData(arrayBuffer));
|
|
79
|
+
}
|
|
80
|
+
async startRendering() {
|
|
81
|
+
return new AudioBuffer(await this.context.startRendering());
|
|
82
|
+
}
|
|
83
|
+
async resume() {
|
|
84
|
+
await this.context.resume();
|
|
85
|
+
}
|
|
86
|
+
async suspend(suspendTime) {
|
|
87
|
+
await this.context.suspend(suspendTime);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
//# sourceMappingURL=OfflineAudioContext.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["InvalidAccessError","NotSupportedError","AnalyserNode","AudioDestinationNode","AudioBuffer","AudioBufferSourceNode","BiquadFilterNode","GainNode","OscillatorNode","PeriodicWave","StereoPannerNode","globalWasmPromise","globalTag","OfflineAudioContext","constructor","arg0","arg1","arg2","context","window","sampleRate","destination","currentTime","state","createOscillator","createGain","createStereoPanner","createBiquadFilter","createBufferSource","options","pitchCorrection","wasmStretch","createBuffer","numOfChannels","length","createPeriodicWave","real","imag","constraints","createAnalyser","decodeAudioDataSource","source","arrayBuffer","fetch","then","response","decodeAudioData","startRendering","resume","suspend","suspendTime"],"sourceRoot":"../../../src","sources":["web-core/OfflineAudioContext.tsx"],"mappings":";;AAMA,SAASA,kBAAkB,EAAEC,iBAAiB,QAAQ,oBAAW;AAEjE,OAAOC,YAAY,MAAM,mBAAgB;AACzC,OAAOC,oBAAoB,MAAM,2BAAwB;AACzD,OAAOC,WAAW,MAAM,kBAAe;AACvC,OAAOC,qBAAqB,MAAM,4BAAyB;AAC3D,OAAOC,gBAAgB,MAAM,uBAAoB;AACjD,OAAOC,QAAQ,MAAM,eAAY;AACjC,OAAOC,cAAc,MAAM,qBAAkB;AAC7C,OAAOC,YAAY,MAAM,mBAAgB;AACzC,OAAOC,gBAAgB,MAAM,uBAAoB;AAEjD,SAASC,iBAAiB,EAAEC,SAAS,QAAQ,4BAAyB;AAEtE,eAAe,MAAMC,mBAAmB,CAA6B;EAQnEC,WAAWA,CACTC,IAAyC,EACzCC,IAAa,EACbC,IAAa,EACb;IACA,IAAI,OAAOF,IAAI,KAAK,QAAQ,EAAE;MAC5B,IAAI,CAACG,OAAO,GAAG,IAAIC,MAAM,CAACN,mBAAmB,CAACE,IAAI,CAAC;IACrD,CAAC,MAAM,IACL,OAAOA,IAAI,KAAK,QAAQ,IACxB,OAAOC,IAAI,KAAK,QAAQ,IACxB,OAAOC,IAAI,KAAK,QAAQ,EACxB;MACA,IAAI,CAACC,OAAO,GAAG,IAAIC,MAAM,CAACN,mBAAmB,CAACE,IAAI,EAAEC,IAAI,EAAEC,IAAI,CAAC;IACjE,CAAC,MAAM;MACL,MAAM,IAAIhB,iBAAiB,CAAC,+BAA+B,CAAC;IAC9D;IAEA,IAAI,CAACmB,UAAU,GAAG,IAAI,CAACF,OAAO,CAACE,UAAU;IACzC,IAAI,CAACC,WAAW,GAAG,IAAIlB,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAACe,OAAO,CAACG,WAAW,CAAC;EAC7E;EAEA,IAAWC,WAAWA,CAAA,EAAW;IAC/B,OAAO,IAAI,CAACJ,OAAO,CAACI,WAAW;EACjC;EAEA,IAAWC,KAAKA,CAAA,EAAiB;IAC/B,OAAO,IAAI,CAACL,OAAO,CAACK,KAAK;EAC3B;EAEAC,gBAAgBA,CAAA,EAAmB;IACjC,OAAO,IAAIhB,cAAc,CAAC,IAAI,EAAE,IAAI,CAACU,OAAO,CAACM,gBAAgB,CAAC,CAAC,CAAC;EAClE;EAEAC,UAAUA,CAAA,EAAa;IACrB,OAAO,IAAIlB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAACW,OAAO,CAACO,UAAU,CAAC,CAAC,CAAC;EACtD;EAEAC,kBAAkBA,CAAA,EAAqB;IACrC,OAAO,IAAIhB,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAACQ,OAAO,CAACQ,kBAAkB,CAAC,CAAC,CAAC;EACtE;EAEAC,kBAAkBA,CAAA,EAAqB;IACrC,OAAO,IAAIrB,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAACY,OAAO,CAACS,kBAAkB,CAAC,CAAC,CAAC;EACtE;EAEA,MAAMC,kBAAkBA,CACtBC,OAAsC,EACN;IAChC,IAAI,CAACA,OAAO,IAAI,CAACA,OAAO,CAACC,eAAe,EAAE;MACxC,OAAO,IAAIzB,qBAAqB,CAC9B,IAAI,EACJ,IAAI,CAACa,OAAO,CAACU,kBAAkB,CAAC,CAAC,EACjC,KACF,CAAC;IACH;IAEA,MAAMjB,iBAAiB;IAEvB,MAAMoB,WAAW,GAAG,MAAMZ,MAAM,CAACP,SAAS,CAAC,CAAC,IAAI,CAACM,OAAO,CAAC;IAEzD,OAAO,IAAIb,qBAAqB,CAAC,IAAI,EAAE0B,WAAW,EAAE,IAAI,CAAC;EAC3D;EAEAC,YAAYA,CACVC,aAAqB,EACrBC,MAAc,EACdd,UAAkB,EACL;IACb,IAAIa,aAAa,GAAG,CAAC,IAAIA,aAAa,IAAI,EAAE,EAAE;MAC5C,MAAM,IAAIhC,iBAAiB,CACzB,oCAAoCgC,aAAa,gCACnD,CAAC;IACH;IAEA,IAAIC,MAAM,IAAI,CAAC,EAAE;MACf,MAAM,IAAIjC,iBAAiB,CACzB,kCAAkCiC,MAAM,kDAC1C,CAAC;IACH;IAEA,IAAId,UAAU,GAAG,IAAI,IAAIA,UAAU,GAAG,KAAK,EAAE;MAC3C,MAAM,IAAInB,iBAAiB,CACzB,6BAA6BmB,UAAU,sCACzC,CAAC;IACH;IAEA,OAAO,IAAIhB,WAAW,CACpB,IAAI,CAACc,OAAO,CAACc,YAAY,CAACC,aAAa,EAAEC,MAAM,EAAEd,UAAU,CAC7D,CAAC;EACH;EAEAe,kBAAkBA,CAChBC,IAAkB,EAClBC,IAAkB,EAClBC,WAAqC,EACvB;IACd,IAAIF,IAAI,CAACF,MAAM,KAAKG,IAAI,CAACH,MAAM,EAAE;MAC/B,MAAM,IAAIlC,kBAAkB,CAC1B,4BAA4BoC,IAAI,CAACF,MAAM,oBAAoBG,IAAI,CAACH,MAAM,sBACxE,CAAC;IACH;IAEA,OAAO,IAAIzB,YAAY,CACrB,IAAI,CAACS,OAAO,CAACiB,kBAAkB,CAACC,IAAI,EAAEC,IAAI,EAAEC,WAAW,CACzD,CAAC;EACH;EAEAC,cAAcA,CAAA,EAAiB;IAC7B,OAAO,IAAIrC,YAAY,CAAC,IAAI,EAAE,IAAI,CAACgB,OAAO,CAACqB,cAAc,CAAC,CAAC,CAAC;EAC9D;EAEA,MAAMC,qBAAqBA,CAACC,MAAc,EAAwB;IAChE,MAAMC,WAAW,GAAG,MAAMC,KAAK,CAACF,MAAM,CAAC,CAACG,IAAI,CAAEC,QAAQ,IACpDA,QAAQ,CAACH,WAAW,CAAC,CACvB,CAAC;IAED,OAAO,IAAI,CAACI,eAAe,CAACJ,WAAW,CAAC;EAC1C;EAEA,MAAMI,eAAeA,CAACJ,WAAwB,EAAwB;IACpE,OAAO,IAAItC,WAAW,CAAC,MAAM,IAAI,CAACc,OAAO,CAAC4B,eAAe,CAACJ,WAAW,CAAC,CAAC;EACzE;EAEA,MAAMK,cAAcA,CAAA,EAAyB;IAC3C,OAAO,IAAI3C,WAAW,CAAC,MAAM,IAAI,CAACc,OAAO,CAAC6B,cAAc,CAAC,CAAC,CAAC;EAC7D;EAEA,MAAMC,MAAMA,CAAA,EAAkB;IAC5B,MAAM,IAAI,CAAC9B,OAAO,CAAC8B,MAAM,CAAC,CAAC;EAC7B;EAEA,MAAMC,OAAOA,CAACC,WAAmB,EAAiB;IAChD,MAAM,IAAI,CAAChC,OAAO,CAAC+B,OAAO,CAACC,WAAW,CAAC;EACzC;AACF","ignoreList":[]}
|
package/lib/typescript/api.d.ts
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
|
-
import type { IAudioContext } from './interfaces';
|
|
1
|
+
import type { IAudioContext, IOfflineAudioContext } from './interfaces';
|
|
2
2
|
declare global {
|
|
3
3
|
var createAudioContext: (sampleRate?: number) => IAudioContext;
|
|
4
|
+
var createOfflineAudioContext: (numberOfChannels: number, length: number, sampleRate: number) => IOfflineAudioContext;
|
|
4
5
|
}
|
|
5
6
|
export { default as AudioBuffer } from './core/AudioBuffer';
|
|
6
7
|
export { default as AudioBufferSourceNode } from './core/AudioBufferSourceNode';
|
|
7
8
|
export { default as AudioContext } from './core/AudioContext';
|
|
9
|
+
export { default as OfflineAudioContext } from './core/OfflineAudioContext';
|
|
8
10
|
export { default as AudioDestinationNode } from './core/AudioDestinationNode';
|
|
9
11
|
export { default as AudioNode } from './core/AudioNode';
|
|
10
12
|
export { default as AnalyserNode } from './core/AnalyserNode';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"api.d.ts","sourceRoot":"","sources":["../../src/api.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;
|
|
1
|
+
{"version":3,"file":"api.d.ts","sourceRoot":"","sources":["../../src/api.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,oBAAoB,EAAE,MAAM,cAAc,CAAC;AAGxE,OAAO,CAAC,MAAM,CAAC;IACb,IAAI,kBAAkB,EAAE,CAAC,UAAU,CAAC,EAAE,MAAM,KAAK,aAAa,CAAC;IAC/D,IAAI,yBAAyB,EAAE,CAC7B,gBAAgB,EAAE,MAAM,EACxB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,KACf,oBAAoB,CAAC;CAC3B;AAgBD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAC5D,OAAO,EAAE,OAAO,IAAI,qBAAqB,EAAE,MAAM,8BAA8B,CAAC;AAChF,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAC9D,OAAO,EAAE,OAAO,IAAI,mBAAmB,EAAE,MAAM,4BAA4B,CAAC;AAC5E,OAAO,EAAE,OAAO,IAAI,oBAAoB,EAAE,MAAM,6BAA6B,CAAC;AAC9E,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,kBAAkB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAC9D,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAC1D,OAAO,EAAE,OAAO,IAAI,wBAAwB,EAAE,MAAM,iCAAiC,CAAC;AACtF,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAClE,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAEtE,OAAO,EACL,cAAc,EACd,gBAAgB,EAChB,gBAAgB,EAChB,qBAAqB,EACrB,YAAY,EACZ,UAAU,EACV,uBAAuB,GACxB,MAAM,SAAS,CAAC;AAEjB,OAAO,EACL,cAAc,EACd,kBAAkB,EAClB,iBAAiB,EACjB,UAAU,EACV,iBAAiB,GAClB,MAAM,UAAU,CAAC"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
export { default as AudioBuffer } from './web-core/AudioBuffer';
|
|
2
2
|
export { default as AudioBufferSourceNode } from './web-core/AudioBufferSourceNode';
|
|
3
3
|
export { default as AudioContext } from './web-core/AudioContext';
|
|
4
|
+
export { default as OfflineAudioContext } from './web-core/OfflineAudioContext';
|
|
4
5
|
export { default as AudioDestinationNode } from './web-core/AudioDestinationNode';
|
|
5
6
|
export { default as AudioNode } from './web-core/AudioNode';
|
|
6
7
|
export { default as AnalyserNode } from './web-core/AnalyserNode';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"api.web.d.ts","sourceRoot":"","sources":["../../src/api.web.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAChE,OAAO,EAAE,OAAO,IAAI,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACpF,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAClE,OAAO,EAAE,OAAO,IAAI,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AAClF,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAClE,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,uBAAuB,CAAC;AAC9D,OAAO,EAAE,OAAO,IAAI,wBAAwB,EAAE,MAAM,qCAAqC,CAAC;AAC1F,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAC1E,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAC1E,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAC1D,OAAO,EAAE,OAAO,IAAI,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACtE,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAE1E,cAAc,mBAAmB,CAAC;AAElC,OAAO,EACL,cAAc,EACd,gBAAgB,EAChB,gBAAgB,EAChB,qBAAqB,EACrB,YAAY,EACZ,UAAU,EACV,uBAAuB,GACxB,MAAM,SAAS,CAAC;AAEjB,OAAO,EACL,cAAc,EACd,kBAAkB,EAClB,iBAAiB,EACjB,UAAU,EACV,iBAAiB,GAClB,MAAM,UAAU,CAAC"}
|
|
1
|
+
{"version":3,"file":"api.web.d.ts","sourceRoot":"","sources":["../../src/api.web.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAChE,OAAO,EAAE,OAAO,IAAI,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACpF,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAClE,OAAO,EAAE,OAAO,IAAI,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AAChF,OAAO,EAAE,OAAO,IAAI,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AAClF,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAClE,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,uBAAuB,CAAC;AAC9D,OAAO,EAAE,OAAO,IAAI,wBAAwB,EAAE,MAAM,qCAAqC,CAAC;AAC1F,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAC1E,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAC1E,OAAO,EAAE,OAAO,IAAI,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAC1D,OAAO,EAAE,OAAO,IAAI,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACtE,OAAO,EAAE,OAAO,IAAI,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAE1E,cAAc,mBAAmB,CAAC;AAElC,OAAO,EACL,cAAc,EACd,gBAAgB,EAChB,gBAAgB,EAChB,qBAAqB,EACrB,YAAY,EACZ,UAAU,EACV,uBAAuB,GACxB,MAAM,SAAS,CAAC;AAEjB,OAAO,EACL,cAAc,EACd,kBAAkB,EAClB,iBAAiB,EACjB,UAAU,EACV,iBAAiB,GAClB,MAAM,UAAU,CAAC"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
2
|
+
import { OfflineAudioContextOptions } from '../types';
|
|
3
|
+
import AudioBuffer from './AudioBuffer';
|
|
4
|
+
export default class OfflineAudioContext extends BaseAudioContext {
|
|
5
|
+
private isSuspended;
|
|
6
|
+
private isRendering;
|
|
7
|
+
private duration;
|
|
8
|
+
constructor(options: OfflineAudioContextOptions);
|
|
9
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
10
|
+
resume(): Promise<undefined>;
|
|
11
|
+
suspend(suspendTime: number): Promise<undefined>;
|
|
12
|
+
startRendering(): Promise<AudioBuffer>;
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=OfflineAudioContext.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OfflineAudioContext.d.ts","sourceRoot":"","sources":["../../../src/core/OfflineAudioContext.ts"],"names":[],"mappings":"AACA,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,0BAA0B,EAAE,MAAM,UAAU,CAAC;AAEtD,OAAO,WAAW,MAAM,eAAe,CAAC;AAExC,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,gBAAgB;IAC/D,OAAO,CAAC,WAAW,CAAU;IAC7B,OAAO,CAAC,WAAW,CAAU;IAC7B,OAAO,CAAC,QAAQ,CAAS;gBAEb,OAAO,EAAE,0BAA0B;gBACnC,gBAAgB,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM;IA4BlE,MAAM,IAAI,OAAO,CAAC,SAAS,CAAC;IAkB5B,OAAO,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC;IAsBhD,cAAc,IAAI,OAAO,CAAC,WAAW,CAAC;CAa7C"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { ContextState, BiquadFilterType, OscillatorType, ChannelCountMode, ChannelInterpretation, WindowType } from './types';
|
|
2
2
|
export interface AudioAPIInstaller {
|
|
3
3
|
createAudioContext: (sampleRate?: number) => IAudioContext;
|
|
4
|
+
createOfflineAudioContext: (numberOfChannels: number, length: number, sampleRate: number) => IAudioContext;
|
|
4
5
|
}
|
|
5
6
|
export interface IBaseAudioContext {
|
|
6
7
|
readonly destination: IAudioDestinationNode;
|
|
@@ -23,6 +24,11 @@ export interface IAudioContext extends IBaseAudioContext {
|
|
|
23
24
|
resume(): Promise<void>;
|
|
24
25
|
suspend(): Promise<void>;
|
|
25
26
|
}
|
|
27
|
+
export interface IOfflineAudioContext extends IBaseAudioContext {
|
|
28
|
+
resume(): Promise<void>;
|
|
29
|
+
suspend(suspendTime: number): Promise<void>;
|
|
30
|
+
startRendering(): Promise<IAudioBuffer>;
|
|
31
|
+
}
|
|
26
32
|
export interface IAudioNode {
|
|
27
33
|
readonly context: BaseAudioContext;
|
|
28
34
|
readonly numberOfInputs: number;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"interfaces.d.ts","sourceRoot":"","sources":["../../src/interfaces.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,gBAAgB,EAChB,cAAc,EACd,gBAAgB,EAChB,qBAAqB,EACrB,UAAU,EACX,MAAM,SAAS,CAAC;AAEjB,MAAM,WAAW,iBAAiB;IAChC,kBAAkB,EAAE,CAAC,UAAU,CAAC,EAAE,MAAM,KAAK,aAAa,CAAC;
|
|
1
|
+
{"version":3,"file":"interfaces.d.ts","sourceRoot":"","sources":["../../src/interfaces.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,gBAAgB,EAChB,cAAc,EACd,gBAAgB,EAChB,qBAAqB,EACrB,UAAU,EACX,MAAM,SAAS,CAAC;AAEjB,MAAM,WAAW,iBAAiB;IAChC,kBAAkB,EAAE,CAAC,UAAU,CAAC,EAAE,MAAM,KAAK,aAAa,CAAC;IAC3D,yBAAyB,EAAE,CACzB,gBAAgB,EAAE,MAAM,EACxB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,KACf,aAAa,CAAC;CACpB;AAED,MAAM,WAAW,iBAAiB;IAChC,QAAQ,CAAC,WAAW,EAAE,qBAAqB,CAAC;IAC5C,QAAQ,CAAC,KAAK,EAAE,YAAY,CAAC;IAC7B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAE7B,gBAAgB,IAAI,eAAe,CAAC;IACpC,UAAU,IAAI,SAAS,CAAC;IACxB,kBAAkB,IAAI,iBAAiB,CAAC;IACxC,kBAAkB,EAAE,MAAM,iBAAiB,CAAC;IAC5C,kBAAkB,EAAE,CAAC,eAAe,EAAE,OAAO,KAAK,sBAAsB,CAAC;IACzE,YAAY,EAAE,CACZ,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,KACf,YAAY,CAAC;IAClB,kBAAkB,EAAE,CAClB,IAAI,EAAE,YAAY,EAClB,IAAI,EAAE,YAAY,EAClB,oBAAoB,EAAE,OAAO,KAC1B,aAAa,CAAC;IACnB,cAAc,EAAE,MAAM,aAAa,CAAC;IACpC,qBAAqB,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,OAAO,CAAC,YAAY,CAAC,CAAC;IACrE,eAAe,EAAE,CAAC,WAAW,EAAE,WAAW,KAAK,OAAO,CAAC,YAAY,CAAC,CAAC;CACtE;AAED,MAAM,WAAW,aAAc,SAAQ,iBAAiB;IACtD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACvB,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACxB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CAC1B;AAED,MAAM,WAAW,oBAAqB,SAAQ,iBAAiB;IAC7D,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACxB,OAAO,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5C,cAAc,IAAI,OAAO,CAAC,YAAY,CAAC,CAAC;CACzC;AAED,MAAM,WAAW,UAAU;IACzB,QAAQ,CAAC,OAAO,EAAE,gBAAgB,CAAC;IACnC,QAAQ,CAAC,cAAc,EAAE,MAAM,CAAC;IAChC,QAAQ,CAAC,eAAe,EAAE,MAAM,CAAC;IACjC,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;IAC9B,QAAQ,CAAC,gBAAgB,EAAE,gBAAgB,CAAC;IAC5C,QAAQ,CAAC,qBAAqB,EAAE,qBAAqB,CAAC;IAEtD,OAAO,EAAE,CAAC,IAAI,EAAE,UAAU,KAAK,IAAI,CAAC;IACpC,UAAU,EAAE,CAAC,IAAI,CAAC,EAAE,UAAU,KAAK,IAAI,CAAC;CACzC;AAED,MAAM,WAAW,SAAU,SAAQ,UAAU;IAC3C,QAAQ,CAAC,IAAI,EAAE,WAAW,CAAC;CAC5B;AAED,MAAM,WAAW,iBAAkB,SAAQ,UAAU;IACnD,QAAQ,CAAC,GAAG,EAAE,WAAW,CAAC;CAC3B;AAED,MAAM,WAAW,iBAAkB,SAAQ,UAAU;IACnD,QAAQ,CAAC,SAAS,EAAE,UAAU,CAAC;IAC/B,QAAQ,CAAC,MAAM,EAAE,UAAU,CAAC;IAC5B,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC;IACvB,QAAQ,CAAC,IAAI,EAAE,UAAU,CAAC;IAC1B,IAAI,EAAE,gBAAgB,CAAC;IAEvB,oBAAoB,CAClB,cAAc,EAAE,YAAY,EAC5B,iBAAiB,EAAE,YAAY,EAC/B,mBAAmB,EAAE,YAAY,GAChC,IAAI,CAAC;CACT;AAED,MAAM,WAAW,qBAAsB,SAAQ,UAAU;CAAG;AAE5D,MAAM,WAAW,yBAA0B,SAAQ,UAAU;IAC3D,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,IAAI,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IAC7B,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,MAAM,KAAK,IAAI,GAAG,IAAI,CAAC;CACxC;AAED,MAAM,WAAW,eAAgB,SAAQ,yBAAyB;IAChE,QAAQ,CAAC,SAAS,EAAE,WAAW,CAAC;IAChC,QAAQ,CAAC,MAAM,EAAE,WAAW,CAAC;IAC7B,IAAI,EAAE,cAAc,CAAC;IAErB,eAAe,CAAC,YAAY,EAAE,aAAa,GAAG,IAAI,CAAC;CACpD;AAED,MAAM,WAAW,sBAAuB,SAAQ,yBAAyB;IACvE,MAAM,EAAE,YAAY,GAAG,IAAI,CAAC;IAC5B,IAAI,EAAE,OAAO,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,WAAW,CAAC;IACpB,YAAY,EAAE,WAAW,CAAC;IAE1B,KAAK,EAAE,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,IAAI,CAAC;CACpE;AAED,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,gBAAgB,EAAE,MAAM,CAAC;IAElC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,YAAY,CAAC;IAC9C,eAAe,CACb,WAAW,EAAE,YAAY,EACzB,aAAa,EAAE,MAAM,EACrB,cAAc,EAAE,MAAM,GACrB,IAAI,CAAC;IACR,aAAa,CACX,MAAM,EAAE,YAAY,EACpB,aAAa,EAAE,MAAM,EACrB,cAAc,EAAE,MAAM,GACrB,IAAI,CAAC;CACT;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,YAAY,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IAEjB,cAAc,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3D,uBAAuB,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IAClE,4BAA4B,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvE,eAAe,EAAE,CACf,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,MAAM,EACjB,YAAY,EAAE,MAAM,KACjB,IAAI,CAAC;IACV,mBAAmB,EAAE,CACnB,MAAM,EAAE,YAAY,EACpB,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,MAAM,KACb,IAAI,CAAC;IACV,qBAAqB,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IACpD,mBAAmB,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;CACnD;AAED,MAAM,WAAW,aAAa;CAAG;AAEjC,MAAM,WAAW,aAAc,SAAQ,UAAU;IAC/C,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,iBAAiB,EAAE,MAAM,CAAC;IACnC,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;IACpB,qBAAqB,EAAE,MAAM,CAAC;IAC9B,MAAM,EAAE,UAAU,CAAC;IAEnB,qBAAqB,EAAE,CAAC,KAAK,EAAE,YAAY,KAAK,IAAI,CAAC;IACrD,oBAAoB,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,CAAC;IAClD,sBAAsB,EAAE,CAAC,KAAK,EAAE,YAAY,KAAK,IAAI,CAAC;IACtD,qBAAqB,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,CAAC;CACpD"}
|
|
@@ -9,6 +9,11 @@ export interface PeriodicWaveConstraints {
|
|
|
9
9
|
export interface AudioContextOptions {
|
|
10
10
|
sampleRate: number;
|
|
11
11
|
}
|
|
12
|
+
export interface OfflineAudioContextOptions {
|
|
13
|
+
numberOfChannels: number;
|
|
14
|
+
length: number;
|
|
15
|
+
sampleRate: number;
|
|
16
|
+
}
|
|
12
17
|
export type WindowType = 'blackman' | 'hann';
|
|
13
18
|
export interface AudioBufferSourceNodeOptions {
|
|
14
19
|
pitchCorrection: boolean;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,gBAAgB,GAAG,KAAK,GAAG,aAAa,GAAG,UAAU,CAAC;AAElE,MAAM,MAAM,qBAAqB,GAAG,UAAU,GAAG,UAAU,CAAC;AAE5D,MAAM,MAAM,gBAAgB,GACxB,SAAS,GACT,UAAU,GACV,UAAU,GACV,UAAU,GACV,WAAW,GACX,SAAS,GACT,OAAO,GACP,SAAS,CAAC;AAEd,MAAM,MAAM,YAAY,GAAG,SAAS,GAAG,QAAQ,GAAG,WAAW,CAAC;AAE9D,MAAM,MAAM,cAAc,GACtB,MAAM,GACN,QAAQ,GACR,UAAU,GACV,UAAU,GACV,QAAQ,CAAC;AAEb,MAAM,WAAW,uBAAuB;IACtC,oBAAoB,EAAE,OAAO,CAAC;CAC/B;AAED,MAAM,WAAW,mBAAmB;IAClC,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,MAAM,UAAU,GAAG,UAAU,GAAG,MAAM,CAAC;AAE7C,MAAM,WAAW,4BAA4B;IAC3C,eAAe,EAAE,OAAO,CAAC;CAC1B"}
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,gBAAgB,GAAG,KAAK,GAAG,aAAa,GAAG,UAAU,CAAC;AAElE,MAAM,MAAM,qBAAqB,GAAG,UAAU,GAAG,UAAU,CAAC;AAE5D,MAAM,MAAM,gBAAgB,GACxB,SAAS,GACT,UAAU,GACV,UAAU,GACV,UAAU,GACV,WAAW,GACX,SAAS,GACT,OAAO,GACP,SAAS,CAAC;AAEd,MAAM,MAAM,YAAY,GAAG,SAAS,GAAG,QAAQ,GAAG,WAAW,CAAC;AAE9D,MAAM,MAAM,cAAc,GACtB,MAAM,GACN,QAAQ,GACR,UAAU,GACV,UAAU,GACV,QAAQ,CAAC;AAEb,MAAM,WAAW,uBAAuB;IACtC,oBAAoB,EAAE,OAAO,CAAC;CAC/B;AAED,MAAM,WAAW,mBAAmB;IAClC,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,0BAA0B;IACzC,gBAAgB,EAAE,MAAM,CAAC;IACzB,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,MAAM,UAAU,GAAG,UAAU,GAAG,MAAM,CAAC;AAE7C,MAAM,WAAW,4BAA4B;IAC3C,eAAe,EAAE,OAAO,CAAC;CAC1B"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { ContextState, PeriodicWaveConstraints, OfflineAudioContextOptions, AudioBufferSourceNodeOptions } from '../types';
|
|
2
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
3
|
+
import AnalyserNode from './AnalyserNode';
|
|
4
|
+
import AudioDestinationNode from './AudioDestinationNode';
|
|
5
|
+
import AudioBuffer from './AudioBuffer';
|
|
6
|
+
import AudioBufferSourceNode from './AudioBufferSourceNode';
|
|
7
|
+
import BiquadFilterNode from './BiquadFilterNode';
|
|
8
|
+
import GainNode from './GainNode';
|
|
9
|
+
import OscillatorNode from './OscillatorNode';
|
|
10
|
+
import PeriodicWave from './PeriodicWave';
|
|
11
|
+
import StereoPannerNode from './StereoPannerNode';
|
|
12
|
+
export default class OfflineAudioContext implements BaseAudioContext {
|
|
13
|
+
readonly context: globalThis.OfflineAudioContext;
|
|
14
|
+
readonly destination: AudioDestinationNode;
|
|
15
|
+
readonly sampleRate: number;
|
|
16
|
+
constructor(options: OfflineAudioContextOptions);
|
|
17
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
18
|
+
get currentTime(): number;
|
|
19
|
+
get state(): ContextState;
|
|
20
|
+
createOscillator(): OscillatorNode;
|
|
21
|
+
createGain(): GainNode;
|
|
22
|
+
createStereoPanner(): StereoPannerNode;
|
|
23
|
+
createBiquadFilter(): BiquadFilterNode;
|
|
24
|
+
createBufferSource(options?: AudioBufferSourceNodeOptions): Promise<AudioBufferSourceNode>;
|
|
25
|
+
createBuffer(numOfChannels: number, length: number, sampleRate: number): AudioBuffer;
|
|
26
|
+
createPeriodicWave(real: Float32Array, imag: Float32Array, constraints?: PeriodicWaveConstraints): PeriodicWave;
|
|
27
|
+
createAnalyser(): AnalyserNode;
|
|
28
|
+
decodeAudioDataSource(source: string): Promise<AudioBuffer>;
|
|
29
|
+
decodeAudioData(arrayBuffer: ArrayBuffer): Promise<AudioBuffer>;
|
|
30
|
+
startRendering(): Promise<AudioBuffer>;
|
|
31
|
+
resume(): Promise<void>;
|
|
32
|
+
suspend(suspendTime: number): Promise<void>;
|
|
33
|
+
}
|
|
34
|
+
//# sourceMappingURL=OfflineAudioContext.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OfflineAudioContext.d.ts","sourceRoot":"","sources":["../../../src/web-core/OfflineAudioContext.tsx"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,uBAAuB,EACvB,0BAA0B,EAC1B,4BAA4B,EAC7B,MAAM,UAAU,CAAC;AAElB,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAClD,OAAO,YAAY,MAAM,gBAAgB,CAAC;AAC1C,OAAO,oBAAoB,MAAM,wBAAwB,CAAC;AAC1D,OAAO,WAAW,MAAM,eAAe,CAAC;AACxC,OAAO,qBAAqB,MAAM,yBAAyB,CAAC;AAC5D,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAClD,OAAO,QAAQ,MAAM,YAAY,CAAC;AAClC,OAAO,cAAc,MAAM,kBAAkB,CAAC;AAC9C,OAAO,YAAY,MAAM,gBAAgB,CAAC;AAC1C,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAIlD,MAAM,CAAC,OAAO,OAAO,mBAAoB,YAAW,gBAAgB;IAClE,QAAQ,CAAC,OAAO,EAAE,UAAU,CAAC,mBAAmB,CAAC;IAEjD,QAAQ,CAAC,WAAW,EAAE,oBAAoB,CAAC;IAC3C,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;gBAEhB,OAAO,EAAE,0BAA0B;gBACnC,gBAAgB,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM;IAsBxE,IAAW,WAAW,IAAI,MAAM,CAE/B;IAED,IAAW,KAAK,IAAI,YAAY,CAE/B;IAED,gBAAgB,IAAI,cAAc;IAIlC,UAAU,IAAI,QAAQ;IAItB,kBAAkB,IAAI,gBAAgB;IAItC,kBAAkB,IAAI,gBAAgB;IAIhC,kBAAkB,CACtB,OAAO,CAAC,EAAE,4BAA4B,GACrC,OAAO,CAAC,qBAAqB,CAAC;IAgBjC,YAAY,CACV,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,GACjB,WAAW;IAwBd,kBAAkB,CAChB,IAAI,EAAE,YAAY,EAClB,IAAI,EAAE,YAAY,EAClB,WAAW,CAAC,EAAE,uBAAuB,GACpC,YAAY;IAYf,cAAc,IAAI,YAAY;IAIxB,qBAAqB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC;IAQ3D,eAAe,CAAC,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAI/D,cAAc,IAAI,OAAO,CAAC,WAAW,CAAC;IAItC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAIvB,OAAO,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAGlD"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-audio-api",
|
|
3
|
-
"version": "0.5.
|
|
3
|
+
"version": "0.5.6",
|
|
4
4
|
"description": "react-native-audio-api provides system for controlling audio in React Native environment compatible with Web Audio API specification",
|
|
5
5
|
"bin": {
|
|
6
6
|
"setup-rn-audio-api-web": "./scripts/setup-rn-audio-api-web.js"
|
package/src/api.ts
CHANGED
|
@@ -1,13 +1,21 @@
|
|
|
1
1
|
import NativeAudioAPIModule from './specs/NativeAudioAPIModule';
|
|
2
|
-
import type { IAudioContext } from './interfaces';
|
|
2
|
+
import type { IAudioContext, IOfflineAudioContext } from './interfaces';
|
|
3
3
|
|
|
4
4
|
/* eslint-disable no-var */
|
|
5
5
|
declare global {
|
|
6
6
|
var createAudioContext: (sampleRate?: number) => IAudioContext;
|
|
7
|
+
var createOfflineAudioContext: (
|
|
8
|
+
numberOfChannels: number,
|
|
9
|
+
length: number,
|
|
10
|
+
sampleRate: number
|
|
11
|
+
) => IOfflineAudioContext;
|
|
7
12
|
}
|
|
8
13
|
/* eslint-disable no-var */
|
|
9
14
|
|
|
10
|
-
if (
|
|
15
|
+
if (
|
|
16
|
+
global.createAudioContext == null ||
|
|
17
|
+
global.createOfflineAudioContext == null
|
|
18
|
+
) {
|
|
11
19
|
if (!NativeAudioAPIModule) {
|
|
12
20
|
throw new Error(
|
|
13
21
|
`Failed to install react-native-audio-api: The native module could not be found.`
|
|
@@ -20,6 +28,7 @@ if (global.createAudioContext == null) {
|
|
|
20
28
|
export { default as AudioBuffer } from './core/AudioBuffer';
|
|
21
29
|
export { default as AudioBufferSourceNode } from './core/AudioBufferSourceNode';
|
|
22
30
|
export { default as AudioContext } from './core/AudioContext';
|
|
31
|
+
export { default as OfflineAudioContext } from './core/OfflineAudioContext';
|
|
23
32
|
export { default as AudioDestinationNode } from './core/AudioDestinationNode';
|
|
24
33
|
export { default as AudioNode } from './core/AudioNode';
|
|
25
34
|
export { default as AnalyserNode } from './core/AnalyserNode';
|
package/src/api.web.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
export { default as AudioBuffer } from './web-core/AudioBuffer';
|
|
2
2
|
export { default as AudioBufferSourceNode } from './web-core/AudioBufferSourceNode';
|
|
3
3
|
export { default as AudioContext } from './web-core/AudioContext';
|
|
4
|
+
export { default as OfflineAudioContext } from './web-core/OfflineAudioContext';
|
|
4
5
|
export { default as AudioDestinationNode } from './web-core/AudioDestinationNode';
|
|
5
6
|
export { default as AudioNode } from './web-core/AudioNode';
|
|
6
7
|
export { default as AnalyserNode } from './web-core/AnalyserNode';
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { IOfflineAudioContext } from '../interfaces';
|
|
2
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
3
|
+
import { OfflineAudioContextOptions } from '../types';
|
|
4
|
+
import { InvalidStateError, NotSupportedError } from '../errors';
|
|
5
|
+
import AudioBuffer from './AudioBuffer';
|
|
6
|
+
|
|
7
|
+
export default class OfflineAudioContext extends BaseAudioContext {
|
|
8
|
+
private isSuspended: boolean;
|
|
9
|
+
private isRendering: boolean;
|
|
10
|
+
private duration: number;
|
|
11
|
+
|
|
12
|
+
constructor(options: OfflineAudioContextOptions);
|
|
13
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
14
|
+
constructor(
|
|
15
|
+
arg0: OfflineAudioContextOptions | number,
|
|
16
|
+
arg1?: number,
|
|
17
|
+
arg2?: number
|
|
18
|
+
) {
|
|
19
|
+
if (typeof arg0 === 'object') {
|
|
20
|
+
const { numberOfChannels, length, sampleRate } = arg0;
|
|
21
|
+
super(
|
|
22
|
+
global.createOfflineAudioContext(numberOfChannels, length, sampleRate)
|
|
23
|
+
);
|
|
24
|
+
|
|
25
|
+
this.duration = length / sampleRate;
|
|
26
|
+
} else if (
|
|
27
|
+
typeof arg0 === 'number' &&
|
|
28
|
+
typeof arg1 === 'number' &&
|
|
29
|
+
typeof arg2 === 'number'
|
|
30
|
+
) {
|
|
31
|
+
super(global.createOfflineAudioContext(arg0, arg1, arg2));
|
|
32
|
+
this.duration = arg1 / arg2;
|
|
33
|
+
} else {
|
|
34
|
+
throw new NotSupportedError('Invalid constructor arguments');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
this.isSuspended = false;
|
|
38
|
+
this.isRendering = false;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async resume(): Promise<undefined> {
|
|
42
|
+
if (!this.isRendering) {
|
|
43
|
+
throw new InvalidStateError(
|
|
44
|
+
'Cannot resume an OfflineAudioContext while rendering'
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (!this.isSuspended) {
|
|
49
|
+
throw new InvalidStateError(
|
|
50
|
+
'Cannot resume an OfflineAudioContext that is not suspended'
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
this.isSuspended = false;
|
|
55
|
+
|
|
56
|
+
await (this.context as IOfflineAudioContext).resume();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async suspend(suspendTime: number): Promise<undefined> {
|
|
60
|
+
if (suspendTime < 0) {
|
|
61
|
+
throw new InvalidStateError('suspendTime must be a non-negative number');
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (suspendTime < this.context.currentTime) {
|
|
65
|
+
throw new InvalidStateError(
|
|
66
|
+
`suspendTime must be greater than the current time: ${suspendTime}`
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (suspendTime > this.duration) {
|
|
71
|
+
throw new InvalidStateError(
|
|
72
|
+
`suspendTime must be less than the duration of the context: ${suspendTime}`
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
this.isSuspended = true;
|
|
77
|
+
|
|
78
|
+
await (this.context as IOfflineAudioContext).suspend(suspendTime);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async startRendering(): Promise<AudioBuffer> {
|
|
82
|
+
if (this.isRendering) {
|
|
83
|
+
throw new InvalidStateError('OfflineAudioContext is already rendering');
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
this.isRendering = true;
|
|
87
|
+
|
|
88
|
+
const audioBuffer = await (
|
|
89
|
+
this.context as IOfflineAudioContext
|
|
90
|
+
).startRendering();
|
|
91
|
+
|
|
92
|
+
return new AudioBuffer(audioBuffer);
|
|
93
|
+
}
|
|
94
|
+
}
|
package/src/interfaces.ts
CHANGED
|
@@ -9,6 +9,11 @@ import {
|
|
|
9
9
|
|
|
10
10
|
export interface AudioAPIInstaller {
|
|
11
11
|
createAudioContext: (sampleRate?: number) => IAudioContext;
|
|
12
|
+
createOfflineAudioContext: (
|
|
13
|
+
numberOfChannels: number,
|
|
14
|
+
length: number,
|
|
15
|
+
sampleRate: number
|
|
16
|
+
) => IAudioContext;
|
|
12
17
|
}
|
|
13
18
|
|
|
14
19
|
export interface IBaseAudioContext {
|
|
@@ -43,6 +48,12 @@ export interface IAudioContext extends IBaseAudioContext {
|
|
|
43
48
|
suspend(): Promise<void>;
|
|
44
49
|
}
|
|
45
50
|
|
|
51
|
+
export interface IOfflineAudioContext extends IBaseAudioContext {
|
|
52
|
+
resume(): Promise<void>;
|
|
53
|
+
suspend(suspendTime: number): Promise<void>;
|
|
54
|
+
startRendering(): Promise<IAudioBuffer>;
|
|
55
|
+
}
|
|
56
|
+
|
|
46
57
|
export interface IAudioNode {
|
|
47
58
|
readonly context: BaseAudioContext;
|
|
48
59
|
readonly numberOfInputs: number;
|
package/src/types.ts
CHANGED
|
@@ -29,6 +29,12 @@ export interface AudioContextOptions {
|
|
|
29
29
|
sampleRate: number;
|
|
30
30
|
}
|
|
31
31
|
|
|
32
|
+
export interface OfflineAudioContextOptions {
|
|
33
|
+
numberOfChannels: number;
|
|
34
|
+
length: number;
|
|
35
|
+
sampleRate: number;
|
|
36
|
+
}
|
|
37
|
+
|
|
32
38
|
export type WindowType = 'blackman' | 'hann';
|
|
33
39
|
|
|
34
40
|
export interface AudioBufferSourceNodeOptions {
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ContextState,
|
|
3
|
+
PeriodicWaveConstraints,
|
|
4
|
+
OfflineAudioContextOptions,
|
|
5
|
+
AudioBufferSourceNodeOptions,
|
|
6
|
+
} from '../types';
|
|
7
|
+
import { InvalidAccessError, NotSupportedError } from '../errors';
|
|
8
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
9
|
+
import AnalyserNode from './AnalyserNode';
|
|
10
|
+
import AudioDestinationNode from './AudioDestinationNode';
|
|
11
|
+
import AudioBuffer from './AudioBuffer';
|
|
12
|
+
import AudioBufferSourceNode from './AudioBufferSourceNode';
|
|
13
|
+
import BiquadFilterNode from './BiquadFilterNode';
|
|
14
|
+
import GainNode from './GainNode';
|
|
15
|
+
import OscillatorNode from './OscillatorNode';
|
|
16
|
+
import PeriodicWave from './PeriodicWave';
|
|
17
|
+
import StereoPannerNode from './StereoPannerNode';
|
|
18
|
+
|
|
19
|
+
import { globalWasmPromise, globalTag } from './custom/LoadCustomWasm';
|
|
20
|
+
|
|
21
|
+
export default class OfflineAudioContext implements BaseAudioContext {
|
|
22
|
+
readonly context: globalThis.OfflineAudioContext;
|
|
23
|
+
|
|
24
|
+
readonly destination: AudioDestinationNode;
|
|
25
|
+
readonly sampleRate: number;
|
|
26
|
+
|
|
27
|
+
constructor(options: OfflineAudioContextOptions);
|
|
28
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
29
|
+
constructor(
|
|
30
|
+
arg0: OfflineAudioContextOptions | number,
|
|
31
|
+
arg1?: number,
|
|
32
|
+
arg2?: number
|
|
33
|
+
) {
|
|
34
|
+
if (typeof arg0 === 'object') {
|
|
35
|
+
this.context = new window.OfflineAudioContext(arg0);
|
|
36
|
+
} else if (
|
|
37
|
+
typeof arg0 === 'number' &&
|
|
38
|
+
typeof arg1 === 'number' &&
|
|
39
|
+
typeof arg2 === 'number'
|
|
40
|
+
) {
|
|
41
|
+
this.context = new window.OfflineAudioContext(arg0, arg1, arg2);
|
|
42
|
+
} else {
|
|
43
|
+
throw new NotSupportedError('Invalid constructor arguments');
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
this.sampleRate = this.context.sampleRate;
|
|
47
|
+
this.destination = new AudioDestinationNode(this, this.context.destination);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
public get currentTime(): number {
|
|
51
|
+
return this.context.currentTime;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
public get state(): ContextState {
|
|
55
|
+
return this.context.state as ContextState;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
createOscillator(): OscillatorNode {
|
|
59
|
+
return new OscillatorNode(this, this.context.createOscillator());
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
createGain(): GainNode {
|
|
63
|
+
return new GainNode(this, this.context.createGain());
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
createStereoPanner(): StereoPannerNode {
|
|
67
|
+
return new StereoPannerNode(this, this.context.createStereoPanner());
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
createBiquadFilter(): BiquadFilterNode {
|
|
71
|
+
return new BiquadFilterNode(this, this.context.createBiquadFilter());
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async createBufferSource(
|
|
75
|
+
options?: AudioBufferSourceNodeOptions
|
|
76
|
+
): Promise<AudioBufferSourceNode> {
|
|
77
|
+
if (!options || !options.pitchCorrection) {
|
|
78
|
+
return new AudioBufferSourceNode(
|
|
79
|
+
this,
|
|
80
|
+
this.context.createBufferSource(),
|
|
81
|
+
false
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
await globalWasmPromise;
|
|
86
|
+
|
|
87
|
+
const wasmStretch = await window[globalTag](this.context);
|
|
88
|
+
|
|
89
|
+
return new AudioBufferSourceNode(this, wasmStretch, true);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
createBuffer(
|
|
93
|
+
numOfChannels: number,
|
|
94
|
+
length: number,
|
|
95
|
+
sampleRate: number
|
|
96
|
+
): AudioBuffer {
|
|
97
|
+
if (numOfChannels < 1 || numOfChannels >= 32) {
|
|
98
|
+
throw new NotSupportedError(
|
|
99
|
+
`The number of channels provided (${numOfChannels}) is outside the range [1, 32]`
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (length <= 0) {
|
|
104
|
+
throw new NotSupportedError(
|
|
105
|
+
`The number of frames provided (${length}) is less than or equal to the minimum bound (0)`
|
|
106
|
+
);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (sampleRate < 8000 || sampleRate > 96000) {
|
|
110
|
+
throw new NotSupportedError(
|
|
111
|
+
`The sample rate provided (${sampleRate}) is outside the range [8000, 96000]`
|
|
112
|
+
);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return new AudioBuffer(
|
|
116
|
+
this.context.createBuffer(numOfChannels, length, sampleRate)
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
createPeriodicWave(
|
|
121
|
+
real: Float32Array,
|
|
122
|
+
imag: Float32Array,
|
|
123
|
+
constraints?: PeriodicWaveConstraints
|
|
124
|
+
): PeriodicWave {
|
|
125
|
+
if (real.length !== imag.length) {
|
|
126
|
+
throw new InvalidAccessError(
|
|
127
|
+
`The lengths of the real (${real.length}) and imaginary (${imag.length}) arrays must match.`
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return new PeriodicWave(
|
|
132
|
+
this.context.createPeriodicWave(real, imag, constraints)
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
createAnalyser(): AnalyserNode {
|
|
137
|
+
return new AnalyserNode(this, this.context.createAnalyser());
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async decodeAudioDataSource(source: string): Promise<AudioBuffer> {
|
|
141
|
+
const arrayBuffer = await fetch(source).then((response) =>
|
|
142
|
+
response.arrayBuffer()
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
return this.decodeAudioData(arrayBuffer);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async decodeAudioData(arrayBuffer: ArrayBuffer): Promise<AudioBuffer> {
|
|
149
|
+
return new AudioBuffer(await this.context.decodeAudioData(arrayBuffer));
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async startRendering(): Promise<AudioBuffer> {
|
|
153
|
+
return new AudioBuffer(await this.context.startRendering());
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
async resume(): Promise<void> {
|
|
157
|
+
await this.context.resume();
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
async suspend(suspendTime: number): Promise<void> {
|
|
161
|
+
await this.context.suspend(suspendTime);
|
|
162
|
+
}
|
|
163
|
+
}
|