react-native-audio-api 0.11.0-alpha.3 → 0.11.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/cpp/audioapi/android/core/AndroidAudioRecorder.cpp +34 -6
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/FFmpegFileWriter.cpp +4 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/ptrs.hpp +8 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/utils.cpp +4 -0
- package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.h +1 -0
- package/common/cpp/audioapi/core/utils/AudioFileWriter.h +1 -0
- package/common/cpp/audioapi/core/utils/AudioRecorderCallback.h +1 -0
- package/ios/audioapi/ios/core/IOSAudioRecorder.mm +8 -7
- package/ios/audioapi/ios/core/NativeAudioPlayer.m +1 -1
- package/ios/audioapi/ios/core/NativeAudioRecorder.m +1 -1
- package/ios/audioapi/ios/system/AudioEngine.h +2 -0
- package/ios/audioapi/ios/system/AudioEngine.mm +47 -4
- package/ios/audioapi/ios/system/NotificationManager.mm +6 -3
- package/package.json +1 -1
|
@@ -2,7 +2,11 @@
|
|
|
2
2
|
#include <audioapi/android/core/AndroidAudioRecorder.h>
|
|
3
3
|
#include <audioapi/android/core/utils/AndroidFileWriterBackend.h>
|
|
4
4
|
#include <audioapi/android/core/utils/AndroidRecorderCallback.h>
|
|
5
|
+
|
|
6
|
+
#if !RN_AUDIO_API_FFMPEG_DISABLED
|
|
5
7
|
#include <audioapi/android/core/utils/ffmpegBackend/FFmpegFileWriter.h>
|
|
8
|
+
#endif // RN_AUDIO_API_FFMPEG_DISABLED
|
|
9
|
+
|
|
6
10
|
#include <audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.h>
|
|
7
11
|
#include <audioapi/core/sources/RecorderAdapterNode.h>
|
|
8
12
|
#include <audioapi/core/utils/Constants.h>
|
|
@@ -29,6 +33,25 @@ AndroidAudioRecorder::AndroidAudioRecorder(
|
|
|
29
33
|
}
|
|
30
34
|
|
|
31
35
|
AndroidAudioRecorder::~AndroidAudioRecorder() {
|
|
36
|
+
{
|
|
37
|
+
std::scoped_lock dtorLock(callbackMutex_, fileWriterMutex_, adapterNodeMutex_);
|
|
38
|
+
|
|
39
|
+
if (usesFileOutput()) {
|
|
40
|
+
fileOutputEnabled_.store(false, std::memory_order_release);
|
|
41
|
+
fileWriter_->closeFile();
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (usesCallback()) {
|
|
45
|
+
callbackOutputEnabled_.store(false, std::memory_order_release);
|
|
46
|
+
dataCallback_->cleanup();
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (isConnected()) {
|
|
50
|
+
isConnected_.store(false, std::memory_order_release);
|
|
51
|
+
adapterNode_->cleanup();
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
32
55
|
nativeAudioRecorder_.release();
|
|
33
56
|
|
|
34
57
|
if (mStream_) {
|
|
@@ -86,7 +109,7 @@ Result<std::string, std::string> AndroidAudioRecorder::start() {
|
|
|
86
109
|
|
|
87
110
|
if (usesFileOutput()) {
|
|
88
111
|
auto fileResult =
|
|
89
|
-
std::
|
|
112
|
+
std::static_pointer_cast<AndroidFileWriterBackend>(fileWriter_)
|
|
90
113
|
->openFile(streamSampleRate_, streamChannelCount_, streamMaxBufferSizeInFrames_);
|
|
91
114
|
|
|
92
115
|
if (!fileResult.is_ok()) {
|
|
@@ -98,7 +121,7 @@ Result<std::string, std::string> AndroidAudioRecorder::start() {
|
|
|
98
121
|
}
|
|
99
122
|
|
|
100
123
|
if (usesCallback()) {
|
|
101
|
-
std::
|
|
124
|
+
std::static_pointer_cast<AndroidRecorderCallback>(dataCallback_)
|
|
102
125
|
->prepare(streamSampleRate_, streamChannelCount_, streamMaxBufferSizeInFrames_);
|
|
103
126
|
}
|
|
104
127
|
|
|
@@ -172,13 +195,18 @@ Result<std::string, std::string> AndroidAudioRecorder::enableFileOutput(
|
|
|
172
195
|
if (properties->format == AudioFileProperties::Format::WAV) {
|
|
173
196
|
fileWriter_ = std::make_shared<MiniAudioFileWriter>(audioEventHandlerRegistry_, properties);
|
|
174
197
|
} else {
|
|
198
|
+
#if !RN_AUDIO_API_FFMPEG_DISABLED
|
|
175
199
|
fileWriter_ = std::make_shared<android::ffmpeg::FFmpegAudioFileWriter>(
|
|
176
200
|
audioEventHandlerRegistry_, properties);
|
|
201
|
+
#else
|
|
202
|
+
return Result<std::string, std::string>::Err(
|
|
203
|
+
"FFmpeg backend is disabled. Cannot create file writer for the requested format. Use WAV format instead.");
|
|
204
|
+
#endif
|
|
177
205
|
}
|
|
178
206
|
|
|
179
207
|
if (!isIdle()) {
|
|
180
208
|
auto fileResult =
|
|
181
|
-
std::
|
|
209
|
+
std::static_pointer_cast<AndroidFileWriterBackend>(fileWriter_)
|
|
182
210
|
->openFile(streamSampleRate_, streamChannelCount_, streamMaxBufferSizeInFrames_);
|
|
183
211
|
|
|
184
212
|
if (!fileResult.is_ok()) {
|
|
@@ -227,7 +255,7 @@ Result<NoneType, std::string> AndroidAudioRecorder::setOnAudioReadyCallback(
|
|
|
227
255
|
audioEventHandlerRegistry_, sampleRate, bufferLength, channelCount, callbackId);
|
|
228
256
|
|
|
229
257
|
if (!isIdle()) {
|
|
230
|
-
std::
|
|
258
|
+
std::static_pointer_cast<AndroidRecorderCallback>(dataCallback_)
|
|
231
259
|
->prepare(streamSampleRate_, streamChannelCount_, streamMaxBufferSizeInFrames_);
|
|
232
260
|
}
|
|
233
261
|
|
|
@@ -271,14 +299,14 @@ oboe::DataCallbackResult AndroidAudioRecorder::onAudioReady(
|
|
|
271
299
|
|
|
272
300
|
if (usesFileOutput()) {
|
|
273
301
|
if (auto fileWriterLock = Locker::tryLock(fileWriterMutex_)) {
|
|
274
|
-
std::
|
|
302
|
+
std::static_pointer_cast<AndroidFileWriterBackend>(fileWriter_)
|
|
275
303
|
->writeAudioData(audioData, numFrames);
|
|
276
304
|
}
|
|
277
305
|
}
|
|
278
306
|
|
|
279
307
|
if (usesCallback()) {
|
|
280
308
|
if (auto callbackLock = Locker::tryLock(callbackMutex_)) {
|
|
281
|
-
std::
|
|
309
|
+
std::static_pointer_cast<AndroidRecorderCallback>(dataCallback_)
|
|
282
310
|
->receiveAudioData(audioData, numFrames);
|
|
283
311
|
}
|
|
284
312
|
}
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
#if !RN_AUDIO_API_FFMPEG_DISABLED
|
|
2
|
+
|
|
1
3
|
extern "C" {
|
|
2
4
|
#include <libavcodec/avcodec.h>
|
|
3
5
|
#include <libavformat/avformat.h>
|
|
@@ -508,3 +510,5 @@ CloseFileResult FFmpegAudioFileWriter::finalizeOutput() {
|
|
|
508
510
|
}
|
|
509
511
|
|
|
510
512
|
} // namespace audioapi::android::ffmpeg
|
|
513
|
+
|
|
514
|
+
#endif // RN_AUDIO_API_FFMPEG_DISABLED
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
#if !RN_AUDIO_API_FFMPEG_DISABLED
|
|
2
|
+
|
|
3
|
+
#pragma once
|
|
4
|
+
|
|
1
5
|
extern "C" {
|
|
2
6
|
#include <libavcodec/avcodec.h>
|
|
3
7
|
#include <libavformat/avformat.h>
|
|
@@ -46,3 +50,7 @@ template<> inline void AvDtor<AVAudioFifo>::operator()(AVAudioFifo* fifo) const
|
|
|
46
50
|
}
|
|
47
51
|
|
|
48
52
|
} // namespace audioapi::android::ffmpeg
|
|
53
|
+
|
|
54
|
+
#else
|
|
55
|
+
|
|
56
|
+
#endif // RN_AUDIO_API_FFMPEG_DISABLED
|
|
@@ -31,14 +31,14 @@ IOSAudioRecorder::IOSAudioRecorder(
|
|
|
31
31
|
AudioReceiverBlock receiverBlock = ^(const AudioBufferList *inputBuffer, int numFrames) {
|
|
32
32
|
if (usesFileOutput()) {
|
|
33
33
|
if (auto lock = Locker::tryLock(fileWriterMutex_)) {
|
|
34
|
-
std::
|
|
34
|
+
std::static_pointer_cast<IOSFileWriter>(fileWriter_)
|
|
35
35
|
->writeAudioData(inputBuffer, numFrames);
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
38
|
|
|
39
39
|
if (usesCallback()) {
|
|
40
40
|
if (auto lock = Locker::tryLock(callbackMutex_)) {
|
|
41
|
-
std::
|
|
41
|
+
std::static_pointer_cast<IOSRecorderCallback>(dataCallback_)
|
|
42
42
|
->receiveAudioData(inputBuffer, numFrames);
|
|
43
43
|
}
|
|
44
44
|
}
|
|
@@ -76,7 +76,7 @@ Result<std::string, std::string> IOSAudioRecorder::start()
|
|
|
76
76
|
return Result<std::string, std::string>::Err("Microphone permissions are not granted");
|
|
77
77
|
}
|
|
78
78
|
|
|
79
|
-
// TODO: recorder should probably request
|
|
79
|
+
// TODO: recorder should probably request the options if not set by user
|
|
80
80
|
// but lets handle that in another PR
|
|
81
81
|
if (![audioSessionManager isSessionActive]) {
|
|
82
82
|
return Result<std::string, std::string>::Err("Audio session is not active");
|
|
@@ -90,11 +90,12 @@ Result<std::string, std::string> IOSAudioRecorder::start()
|
|
|
90
90
|
// Engine will be started again once the native recorder starts
|
|
91
91
|
[AudioEngine.sharedInstance stopIfNecessary];
|
|
92
92
|
|
|
93
|
+
// Estimate the maximum input buffer lengths that can be expected from the sink node
|
|
93
94
|
size_t maxInputBufferLength = [nativeRecorder_ getBufferSize];
|
|
94
95
|
auto inputFormat = [nativeRecorder_ getInputFormat];
|
|
95
96
|
|
|
96
97
|
if (usesFileOutput()) {
|
|
97
|
-
auto fileResult = std::
|
|
98
|
+
auto fileResult = std::static_pointer_cast<IOSFileWriter>(fileWriter_)
|
|
98
99
|
->openFile(inputFormat, maxInputBufferLength);
|
|
99
100
|
|
|
100
101
|
if (fileResult.is_err()) {
|
|
@@ -106,7 +107,7 @@ Result<std::string, std::string> IOSAudioRecorder::start()
|
|
|
106
107
|
}
|
|
107
108
|
|
|
108
109
|
if (usesCallback()) {
|
|
109
|
-
auto callbackResult = std::
|
|
110
|
+
auto callbackResult = std::static_pointer_cast<IOSRecorderCallback>(dataCallback_)
|
|
110
111
|
->prepare(inputFormat, maxInputBufferLength);
|
|
111
112
|
|
|
112
113
|
if (callbackResult.is_err()) {
|
|
@@ -172,7 +173,7 @@ Result<std::string, std::string> IOSAudioRecorder::enableFileOutput(
|
|
|
172
173
|
fileWriter_ = std::make_shared<IOSFileWriter>(audioEventHandlerRegistry_, properties);
|
|
173
174
|
|
|
174
175
|
if (!isIdle()) {
|
|
175
|
-
auto result = std::
|
|
176
|
+
auto result = std::static_pointer_cast<IOSFileWriter>(fileWriter_)
|
|
176
177
|
->openFile([nativeRecorder_ getInputFormat], [nativeRecorder_ getBufferSize]);
|
|
177
178
|
|
|
178
179
|
if (result.is_err()) {
|
|
@@ -273,7 +274,7 @@ Result<NoneType, std::string> IOSAudioRecorder::setOnAudioReadyCallback(
|
|
|
273
274
|
audioEventHandlerRegistry_, sampleRate, bufferLength, channelCount, callbackId);
|
|
274
275
|
|
|
275
276
|
if (!isIdle()) {
|
|
276
|
-
auto result = std::
|
|
277
|
+
auto result = std::static_pointer_cast<IOSRecorderCallback>(dataCallback_)
|
|
277
278
|
->prepare([nativeRecorder_ getInputFormat], [nativeRecorder_ getBufferSize]);
|
|
278
279
|
|
|
279
280
|
if (result.is_err()) {
|
|
@@ -95,7 +95,7 @@ static inline uint32_t nextPowerOfTwo(uint32_t x)
|
|
|
95
95
|
{
|
|
96
96
|
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
97
97
|
assert(audioEngine != nil);
|
|
98
|
-
[audioEngine
|
|
98
|
+
[audioEngine stopIfPossible];
|
|
99
99
|
[audioEngine detachInputNode];
|
|
100
100
|
[audioEngine restartAudioEngine];
|
|
101
101
|
}
|
|
@@ -57,8 +57,6 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
57
57
|
|
|
58
58
|
- (void)detachSourceNodeWithId:(NSString *)sourceNodeId
|
|
59
59
|
{
|
|
60
|
-
NSLog(@"[AudioEngine] detaching source node with ID: %@", sourceNodeId);
|
|
61
|
-
|
|
62
60
|
AVAudioSourceNode *sourceNode = [self.sourceNodes valueForKey:sourceNodeId];
|
|
63
61
|
|
|
64
62
|
if (sourceNode == nil) {
|
|
@@ -75,7 +73,6 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
75
73
|
- (void)attachInputNode:(AVAudioSinkNode *)inputNode
|
|
76
74
|
{
|
|
77
75
|
self.inputNode = inputNode;
|
|
78
|
-
|
|
79
76
|
AVAudioFormat *format = [self.audioEngine.inputNode inputFormatForBus:0];
|
|
80
77
|
|
|
81
78
|
[self.audioEngine attachNode:inputNode];
|
|
@@ -105,11 +102,39 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
105
102
|
|
|
106
103
|
- (void)onInterruptionEnd:(bool)shouldResume
|
|
107
104
|
{
|
|
105
|
+
NSError *error = nil;
|
|
106
|
+
|
|
108
107
|
if (self.state != AudioEngineState::AudioEngineStateInterrupted) {
|
|
108
|
+
// If engine was not interrupted, do nothing
|
|
109
|
+
// Not a real condition, but better be safe than sorry :shrug:
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Stop just in case, reset the engine and build it from scratch
|
|
114
|
+
[self stopIfNecessary];
|
|
115
|
+
[self.audioEngine reset];
|
|
116
|
+
[self rebuildAudioEngine];
|
|
117
|
+
|
|
118
|
+
// If shouldResume is false, mark the engine as paused and wait
|
|
119
|
+
// for JS-side resume command
|
|
120
|
+
// TODO: this should be notified to the user f.e. via Event Emitter
|
|
121
|
+
if (!shouldResume) {
|
|
122
|
+
self.state = AudioEngineState::AudioEngineStatePaused;
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
[self.audioEngine prepare];
|
|
127
|
+
[self.audioEngine startAndReturnError:&error];
|
|
128
|
+
|
|
129
|
+
if (error != nil) {
|
|
130
|
+
NSLog(
|
|
131
|
+
@"Error while restarting the audio engine after interruption: %@",
|
|
132
|
+
[error debugDescription]);
|
|
133
|
+
self.state = AudioEngineState::AudioEngineStateIdle;
|
|
109
134
|
return;
|
|
110
135
|
}
|
|
111
136
|
|
|
112
|
-
|
|
137
|
+
self.state = AudioEngineState::AudioEngineStateRunning;
|
|
113
138
|
}
|
|
114
139
|
|
|
115
140
|
- (AudioEngineState)getState
|
|
@@ -117,6 +142,7 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
117
142
|
return self.state;
|
|
118
143
|
}
|
|
119
144
|
|
|
145
|
+
/// @brief Rebuilds the audio engine by re-attaching and re-connecting all source nodes and input node.
|
|
120
146
|
- (void)rebuildAudioEngine
|
|
121
147
|
{
|
|
122
148
|
self.audioEngine = [[AVAudioEngine alloc] init];
|
|
@@ -135,6 +161,7 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
135
161
|
}
|
|
136
162
|
}
|
|
137
163
|
|
|
164
|
+
// @brief Starts the audio engine if not already running.
|
|
138
165
|
- (bool)startEngine
|
|
139
166
|
{
|
|
140
167
|
NSError *error = nil;
|
|
@@ -208,6 +235,22 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
208
235
|
[self stopEngine];
|
|
209
236
|
}
|
|
210
237
|
|
|
238
|
+
- (void)stopIfPossible
|
|
239
|
+
{
|
|
240
|
+
if (self.state == AudioEngineState::AudioEngineStateIdle) {
|
|
241
|
+
return;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
bool hasInput = self.inputNode != nil;
|
|
245
|
+
bool hasSources = [self.sourceNodes count] > 0;
|
|
246
|
+
|
|
247
|
+
if (hasInput || hasSources) {
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
[self stopEngine];
|
|
252
|
+
}
|
|
253
|
+
|
|
211
254
|
- (void)restartAudioEngine
|
|
212
255
|
{
|
|
213
256
|
if ([self.audioEngine isRunning]) {
|
|
@@ -129,11 +129,12 @@ static NSString *NotificationManagerContext = @"NotificationManagerContext";
|
|
|
129
129
|
}
|
|
130
130
|
|
|
131
131
|
bool shouldResume = interruptionOption == AVAudioSessionInterruptionOptionShouldResume;
|
|
132
|
-
[audioEngine onInterruptionEnd:shouldResume];
|
|
133
132
|
|
|
134
133
|
if (self.audioInterruptionsObserved) {
|
|
135
134
|
NSDictionary *body = @{@"type" : @"ended", @"shouldResume" : @(shouldResume)};
|
|
136
135
|
[self.audioAPIModule invokeHandlerWithEventName:@"interruption" eventBody:body];
|
|
136
|
+
} else {
|
|
137
|
+
[audioEngine onInterruptionEnd:shouldResume];
|
|
137
138
|
}
|
|
138
139
|
}
|
|
139
140
|
|
|
@@ -156,11 +157,12 @@ static NSString *NotificationManagerContext = @"NotificationManagerContext";
|
|
|
156
157
|
}
|
|
157
158
|
|
|
158
159
|
bool shouldResume = secondaryAudioType == AVAudioSessionSilenceSecondaryAudioHintTypeEnd;
|
|
159
|
-
[audioEngine onInterruptionEnd:shouldResume];
|
|
160
160
|
|
|
161
161
|
if (self.audioInterruptionsObserved) {
|
|
162
162
|
NSDictionary *body = @{@"type" : @"ended", @"shouldResume" : @(shouldResume)};
|
|
163
163
|
[self.audioAPIModule invokeHandlerWithEventName:@"interruption" eventBody:body];
|
|
164
|
+
} else {
|
|
165
|
+
[audioEngine onInterruptionEnd:shouldResume];
|
|
164
166
|
}
|
|
165
167
|
}
|
|
166
168
|
|
|
@@ -273,11 +275,12 @@ static NSString *NotificationManagerContext = @"NotificationManagerContext";
|
|
|
273
275
|
return;
|
|
274
276
|
}
|
|
275
277
|
|
|
276
|
-
[audioEngine onInterruptionEnd:true];
|
|
277
278
|
NSDictionary *body = @{@"type" : @"ended", @"shouldResume" : @true};
|
|
278
279
|
|
|
279
280
|
if (self.audioInterruptionsObserved) {
|
|
280
281
|
[self.audioAPIModule invokeHandlerWithEventName:@"interruption" eventBody:body];
|
|
282
|
+
} else {
|
|
283
|
+
[audioEngine onInterruptionEnd:true];
|
|
281
284
|
}
|
|
282
285
|
}
|
|
283
286
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-audio-api",
|
|
3
|
-
"version": "0.11.0-alpha.
|
|
3
|
+
"version": "0.11.0-alpha.4",
|
|
4
4
|
"description": "react-native-audio-api provides system for controlling audio in React Native environment compatible with Web Audio API specification",
|
|
5
5
|
"bin": {
|
|
6
6
|
"setup-rn-audio-api-web": "./scripts/setup-rn-audio-api-web.js"
|