react-native-audio-api 0.11.0-nightly-95f9c99-20251215 → 0.11.0-nightly-dd83923-20251216
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/cpp/audioapi/android/core/AndroidAudioRecorder.cpp +382 -39
- package/android/src/main/cpp/audioapi/android/core/AndroidAudioRecorder.h +45 -18
- package/android/src/main/cpp/audioapi/android/core/NativeAudioRecorder.hpp +9 -9
- package/android/src/main/cpp/audioapi/android/core/utils/AndroidFileWriterBackend.h +33 -0
- package/android/src/main/cpp/audioapi/android/core/utils/AndroidRecorderCallback.cpp +170 -0
- package/android/src/main/cpp/audioapi/android/core/utils/AndroidRecorderCallback.h +46 -0
- package/android/src/main/cpp/audioapi/android/core/utils/AudioDecoder.cpp +0 -1
- package/android/src/main/cpp/audioapi/android/core/utils/FileOptions.cpp +83 -0
- package/android/src/main/cpp/audioapi/android/core/utils/FileOptions.h +22 -0
- package/android/src/main/cpp/audioapi/android/core/utils/MiniaudioImplementation.cpp +8 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/FFmpegFileWriter.cpp +493 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/FFmpegFileWriter.h +70 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/ptrs.hpp +56 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/utils.cpp +114 -0
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/utils.h +34 -0
- package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.cpp +296 -0
- package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.h +40 -0
- package/android/src/main/cpp/audioapi/android/system/NativeFileInfo.hpp +32 -0
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIModule.kt +2 -0
- package/android/src/main/java/com/swmansion/audioapi/system/AudioFocusListener.kt +7 -3
- package/android/src/main/java/com/swmansion/audioapi/system/CentralizedForegroundService.kt +1 -0
- package/android/src/main/java/com/swmansion/audioapi/system/NativeFileInfo.kt +18 -0
- package/android/src/main/java/com/swmansion/audioapi/system/notification/PlaybackNotification.kt +1 -0
- package/android/src/main/java/com/swmansion/audioapi/system/notification/RecordingNotificationReceiver.kt +2 -0
- package/android/src/oldarch/NativeAudioAPIModuleSpec.java +100 -80
- package/common/cpp/audioapi/AudioAPIModuleInstaller.h +3 -11
- package/common/cpp/audioapi/HostObjects/inputs/AudioRecorderHostObject.cpp +145 -16
- package/common/cpp/audioapi/HostObjects/inputs/AudioRecorderHostObject.h +21 -6
- package/common/cpp/audioapi/core/inputs/AudioRecorder.cpp +43 -60
- package/common/cpp/audioapi/core/inputs/AudioRecorder.h +53 -33
- package/common/cpp/audioapi/core/sources/RecorderAdapterNode.cpp +42 -14
- package/common/cpp/audioapi/core/sources/RecorderAdapterNode.h +12 -9
- package/common/cpp/audioapi/core/utils/AudioFileWriter.cpp +41 -0
- package/common/cpp/audioapi/core/utils/AudioFileWriter.h +44 -0
- package/common/cpp/audioapi/core/utils/AudioRecorderCallback.cpp +101 -0
- package/common/cpp/audioapi/core/utils/AudioRecorderCallback.h +52 -0
- package/common/cpp/audioapi/utils/AudioFileProperties.cpp +92 -0
- package/common/cpp/audioapi/utils/AudioFileProperties.h +76 -0
- package/common/cpp/audioapi/utils/Result.hpp +323 -0
- package/common/cpp/audioapi/utils/UnitConversion.h +9 -0
- package/ios/audioapi/ios/AudioAPIModule.mm +9 -14
- package/ios/audioapi/ios/core/IOSAudioPlayer.h +1 -1
- package/ios/audioapi/ios/core/IOSAudioPlayer.mm +7 -6
- package/ios/audioapi/ios/core/IOSAudioRecorder.h +39 -13
- package/ios/audioapi/ios/core/IOSAudioRecorder.mm +302 -26
- package/ios/audioapi/ios/core/NativeAudioPlayer.m +7 -11
- package/ios/audioapi/ios/core/NativeAudioRecorder.h +8 -9
- package/ios/audioapi/ios/core/NativeAudioRecorder.m +70 -76
- package/ios/audioapi/ios/core/utils/AudioDecoder.mm +1 -0
- package/ios/audioapi/ios/core/utils/FileOptions.h +33 -0
- package/ios/audioapi/ios/core/utils/FileOptions.mm +195 -0
- package/ios/audioapi/ios/core/utils/IOSFileWriter.h +53 -0
- package/ios/audioapi/ios/core/utils/IOSFileWriter.mm +239 -0
- package/ios/audioapi/ios/core/utils/IOSRecorderCallback.h +47 -0
- package/ios/audioapi/ios/core/utils/IOSRecorderCallback.mm +185 -0
- package/ios/audioapi/ios/system/AudioEngine.h +21 -16
- package/ios/audioapi/ios/system/AudioEngine.mm +138 -130
- package/ios/audioapi/ios/system/AudioSessionManager.h +19 -9
- package/ios/audioapi/ios/system/AudioSessionManager.mm +250 -215
- package/ios/audioapi/ios/system/NotificationManager.mm +24 -42
- package/lib/commonjs/api.js +82 -109
- package/lib/commonjs/api.js.map +1 -1
- package/lib/commonjs/core/AudioRecorder.js +159 -13
- package/lib/commonjs/core/AudioRecorder.js.map +1 -1
- package/lib/commonjs/specs/NativeAudioAPIModule.js.map +1 -1
- package/lib/commonjs/system/notification/PlaybackNotificationManager.js +17 -14
- package/lib/commonjs/system/notification/PlaybackNotificationManager.js.map +1 -1
- package/lib/commonjs/system/notification/RecordingNotificationManager.js +22 -19
- package/lib/commonjs/system/notification/RecordingNotificationManager.js.map +1 -1
- package/lib/commonjs/system/notification/SimpleNotificationManager.js +16 -13
- package/lib/commonjs/system/notification/SimpleNotificationManager.js.map +1 -1
- package/lib/commonjs/types.js +39 -0
- package/lib/commonjs/types.js.map +1 -1
- package/lib/commonjs/utils/filePresets.js +43 -0
- package/lib/commonjs/utils/filePresets.js.map +1 -0
- package/lib/commonjs/web-system/notification/PlaybackNotificationManager.js +6 -3
- package/lib/commonjs/web-system/notification/PlaybackNotificationManager.js.map +1 -1
- package/lib/commonjs/web-system/notification/RecordingNotificationManager.js +6 -3
- package/lib/commonjs/web-system/notification/RecordingNotificationManager.js.map +1 -1
- package/lib/module/api.js +5 -4
- package/lib/module/api.js.map +1 -1
- package/lib/module/core/AudioRecorder.js +159 -13
- package/lib/module/core/AudioRecorder.js.map +1 -1
- package/lib/module/specs/NativeAudioAPIModule.js.map +1 -1
- package/lib/module/system/notification/PlaybackNotificationManager.js +17 -14
- package/lib/module/system/notification/PlaybackNotificationManager.js.map +1 -1
- package/lib/module/system/notification/RecordingNotificationManager.js +22 -19
- package/lib/module/system/notification/RecordingNotificationManager.js.map +1 -1
- package/lib/module/system/notification/SimpleNotificationManager.js +16 -13
- package/lib/module/system/notification/SimpleNotificationManager.js.map +1 -1
- package/lib/module/types.js +38 -1
- package/lib/module/types.js.map +1 -1
- package/lib/module/utils/filePresets.js +39 -0
- package/lib/module/utils/filePresets.js.map +1 -0
- package/lib/module/web-system/notification/PlaybackNotificationManager.js +6 -3
- package/lib/module/web-system/notification/PlaybackNotificationManager.js.map +1 -1
- package/lib/module/web-system/notification/RecordingNotificationManager.js +6 -3
- package/lib/module/web-system/notification/RecordingNotificationManager.js.map +1 -1
- package/lib/typescript/api.d.ts +5 -4
- package/lib/typescript/api.d.ts.map +1 -1
- package/lib/typescript/core/AudioRecorder.d.ts +69 -7
- package/lib/typescript/core/AudioRecorder.d.ts.map +1 -1
- package/lib/typescript/events/types.d.ts +36 -2
- package/lib/typescript/events/types.d.ts.map +1 -1
- package/lib/typescript/interfaces.d.ts +24 -4
- package/lib/typescript/interfaces.d.ts.map +1 -1
- package/lib/typescript/specs/NativeAudioAPIModule.d.ts +1 -1
- package/lib/typescript/specs/NativeAudioAPIModule.d.ts.map +1 -1
- package/lib/typescript/system/notification/PlaybackNotificationManager.d.ts +4 -3
- package/lib/typescript/system/notification/PlaybackNotificationManager.d.ts.map +1 -1
- package/lib/typescript/system/notification/RecordingNotificationManager.d.ts +4 -3
- package/lib/typescript/system/notification/RecordingNotificationManager.d.ts.map +1 -1
- package/lib/typescript/system/notification/SimpleNotificationManager.d.ts +3 -2
- package/lib/typescript/system/notification/SimpleNotificationManager.d.ts.map +1 -1
- package/lib/typescript/system/notification/types.d.ts.map +1 -1
- package/lib/typescript/types.d.ts +79 -3
- package/lib/typescript/types.d.ts.map +1 -1
- package/lib/typescript/utils/filePresets.d.ts +9 -0
- package/lib/typescript/utils/filePresets.d.ts.map +1 -0
- package/lib/typescript/web-system/notification/PlaybackNotificationManager.d.ts +4 -3
- package/lib/typescript/web-system/notification/PlaybackNotificationManager.d.ts.map +1 -1
- package/lib/typescript/web-system/notification/RecordingNotificationManager.d.ts +4 -3
- package/lib/typescript/web-system/notification/RecordingNotificationManager.d.ts.map +1 -1
- package/package.json +4 -4
- package/src/AudioAPIModule/globals.d.ts +1 -2
- package/src/api.ts +8 -29
- package/src/core/AudioRecorder.ts +195 -23
- package/src/events/types.ts +40 -2
- package/src/interfaces.ts +34 -5
- package/src/specs/NativeAudioAPIModule.ts +2 -2
- package/src/system/notification/PlaybackNotificationManager.ts +20 -16
- package/src/system/notification/RecordingNotificationManager.ts +26 -21
- package/src/system/notification/SimpleNotificationManager.ts +18 -13
- package/src/system/notification/types.ts +1 -0
- package/src/types.ts +89 -3
- package/src/utils/filePresets.ts +47 -0
- package/src/web-system/notification/PlaybackNotificationManager.ts +9 -5
- package/src/web-system/notification/RecordingNotificationManager.ts +9 -5
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
#import <AVFoundation/AVFoundation.h>
|
|
2
|
+
#import <Foundation/Foundation.h>
|
|
3
|
+
|
|
4
|
+
#include <audioapi/HostObjects/sources/AudioBufferHostObject.h>
|
|
5
|
+
#include <audioapi/core/utils/Constants.h>
|
|
6
|
+
#include <audioapi/dsp/VectorMath.h>
|
|
7
|
+
#include <audioapi/events/AudioEventHandlerRegistry.h>
|
|
8
|
+
#include <audioapi/ios/core/utils/IOSRecorderCallback.h>
|
|
9
|
+
#include <audioapi/utils/AudioArray.h>
|
|
10
|
+
#include <audioapi/utils/AudioBus.h>
|
|
11
|
+
#include <audioapi/utils/CircularAudioArray.h>
|
|
12
|
+
#include <audioapi/utils/Result.hpp>
|
|
13
|
+
#include <algorithm>
|
|
14
|
+
|
|
15
|
+
namespace audioapi {
|
|
16
|
+
|
|
17
|
+
IOSRecorderCallback::IOSRecorderCallback(
|
|
18
|
+
const std::shared_ptr<AudioEventHandlerRegistry> &audioEventHandlerRegistry,
|
|
19
|
+
float sampleRate,
|
|
20
|
+
size_t bufferLength,
|
|
21
|
+
int channelCount,
|
|
22
|
+
uint64_t callbackId)
|
|
23
|
+
: AudioRecorderCallback(
|
|
24
|
+
audioEventHandlerRegistry,
|
|
25
|
+
sampleRate,
|
|
26
|
+
bufferLength,
|
|
27
|
+
channelCount,
|
|
28
|
+
callbackId)
|
|
29
|
+
{
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
IOSRecorderCallback::~IOSRecorderCallback()
|
|
33
|
+
{
|
|
34
|
+
@autoreleasepool {
|
|
35
|
+
converter_ = nil;
|
|
36
|
+
bufferFormat_ = nil;
|
|
37
|
+
callbackFormat_ = nil;
|
|
38
|
+
converterInputBuffer_ = nil;
|
|
39
|
+
converterOutputBuffer_ = nil;
|
|
40
|
+
|
|
41
|
+
for (int i = 0; i < channelCount_; ++i) {
|
|
42
|
+
circularBus_[i]->zero();
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/// @brief Prepares the IOSRecorderCallback for receiving audio data.
|
|
48
|
+
/// This involves setting up the audio converter and buffers based on the provided input format.
|
|
49
|
+
/// This method should be called from the JS thread only.
|
|
50
|
+
/// @param bufferFormat The format of the incoming audio data.
|
|
51
|
+
/// @param maxInputBufferLength The maximum length of the input buffer in frames.
|
|
52
|
+
/// @returns Result indicating success or error with message.
|
|
53
|
+
Result<NoneType, std::string> IOSRecorderCallback::prepare(
|
|
54
|
+
AVAudioFormat *bufferFormat,
|
|
55
|
+
size_t maxInputBufferLength)
|
|
56
|
+
{
|
|
57
|
+
@autoreleasepool {
|
|
58
|
+
bufferFormat_ = bufferFormat;
|
|
59
|
+
converterInputBufferSize_ = maxInputBufferLength;
|
|
60
|
+
|
|
61
|
+
if (bufferFormat.sampleRate <= 0 || bufferFormat.channelCount == 0) {
|
|
62
|
+
return Result<NoneType, std::string>::Err(
|
|
63
|
+
"Invalid input format: sampleRate and channelCount must be greater than 0");
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (sampleRate_ <= 0 || channelCount_ == 0) {
|
|
67
|
+
return Result<NoneType, std::string>::Err(
|
|
68
|
+
"Invalid callback format: sampleRate and channelCount must be greater than 0");
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
converterOutputBufferSize_ = std::max(
|
|
72
|
+
(double)maxInputBufferLength, sampleRate_ / bufferFormat.sampleRate * maxInputBufferLength);
|
|
73
|
+
|
|
74
|
+
callbackFormat_ = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
|
|
75
|
+
sampleRate:sampleRate_
|
|
76
|
+
channels:channelCount_
|
|
77
|
+
interleaved:NO];
|
|
78
|
+
|
|
79
|
+
converter_ = [[AVAudioConverter alloc] initFromFormat:bufferFormat toFormat:callbackFormat_];
|
|
80
|
+
converter_.sampleRateConverterAlgorithm = AVSampleRateConverterAlgorithm_Normal;
|
|
81
|
+
converter_.sampleRateConverterQuality = AVAudioQualityMax;
|
|
82
|
+
converter_.primeMethod = AVAudioConverterPrimeMethod_None;
|
|
83
|
+
|
|
84
|
+
converterInputBuffer_ =
|
|
85
|
+
[[AVAudioPCMBuffer alloc] initWithPCMFormat:bufferFormat_
|
|
86
|
+
frameCapacity:(AVAudioFrameCount)converterInputBufferSize_];
|
|
87
|
+
converterOutputBuffer_ =
|
|
88
|
+
[[AVAudioPCMBuffer alloc] initWithPCMFormat:callbackFormat_
|
|
89
|
+
frameCapacity:(AVAudioFrameCount)converterOutputBufferSize_];
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return Result<NoneType, std::string>::Ok(None);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/// @brief Cleans up resources used by the IOSRecorderCallback.
|
|
96
|
+
/// This method should be called from the JS thread only.
|
|
97
|
+
void IOSRecorderCallback::cleanup()
|
|
98
|
+
{
|
|
99
|
+
@autoreleasepool {
|
|
100
|
+
emitAudioData(true);
|
|
101
|
+
|
|
102
|
+
converter_ = nil;
|
|
103
|
+
bufferFormat_ = nil;
|
|
104
|
+
callbackFormat_ = nil;
|
|
105
|
+
converterInputBuffer_ = nil;
|
|
106
|
+
converterOutputBuffer_ = nil;
|
|
107
|
+
|
|
108
|
+
for (int i = 0; i < channelCount_; ++i) {
|
|
109
|
+
circularBus_[i]->zero();
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/// @brief Receives audio data from the recorder, processes it, and stores it in the circular buffer.
|
|
115
|
+
/// The data is converted using AVAudioConverter if the input format differs from the user desired callback format.
|
|
116
|
+
/// This method runs on the audio thread.
|
|
117
|
+
/// @param inputBuffer Pointer to the AudioBufferList containing the incoming audio data.
|
|
118
|
+
/// @param numFrames Number of frames in the input buffer.
|
|
119
|
+
void IOSRecorderCallback::receiveAudioData(const AudioBufferList *inputBuffer, int numFrames)
|
|
120
|
+
{
|
|
121
|
+
if (!isInitialized_.load(std::memory_order_acquire)) {
|
|
122
|
+
return;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
@autoreleasepool {
|
|
126
|
+
NSError *error = nil;
|
|
127
|
+
|
|
128
|
+
if (bufferFormat_.sampleRate == sampleRate_ && bufferFormat_.channelCount == channelCount_ &&
|
|
129
|
+
!bufferFormat_.isInterleaved) {
|
|
130
|
+
// Directly write to circular buffer
|
|
131
|
+
for (int i = 0; i < channelCount_; ++i) {
|
|
132
|
+
auto *inputChannel = static_cast<float *>(inputBuffer->mBuffers[i].mData);
|
|
133
|
+
circularBus_[i]->push_back(inputChannel, numFrames);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
emitAudioData();
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
size_t outputFrameCount = ceil(numFrames * (sampleRate_ / bufferFormat_.sampleRate));
|
|
141
|
+
|
|
142
|
+
for (size_t i = 0; i < bufferFormat_.channelCount; ++i) {
|
|
143
|
+
memcpy(
|
|
144
|
+
converterInputBuffer_.mutableAudioBufferList->mBuffers[i].mData,
|
|
145
|
+
inputBuffer->mBuffers[i].mData,
|
|
146
|
+
inputBuffer->mBuffers[i].mDataByteSize);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
converterInputBuffer_.frameLength = numFrames;
|
|
150
|
+
|
|
151
|
+
__block BOOL handedOff = false;
|
|
152
|
+
AVAudioConverterInputBlock inputBlock = ^AVAudioBuffer *_Nullable(
|
|
153
|
+
AVAudioPacketCount inNumberOfPackets, AVAudioConverterInputStatus *outStatus)
|
|
154
|
+
{
|
|
155
|
+
if (handedOff) {
|
|
156
|
+
*outStatus = AVAudioConverterInputStatus_NoDataNow;
|
|
157
|
+
return nil;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
handedOff = true;
|
|
161
|
+
*outStatus = AVAudioConverterInputStatus_HaveData;
|
|
162
|
+
return converterInputBuffer_;
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
[converter_ convertToBuffer:converterOutputBuffer_ error:&error withInputFromBlock:inputBlock];
|
|
166
|
+
converterOutputBuffer_.frameLength = sampleRate_ / bufferFormat_.sampleRate * numFrames;
|
|
167
|
+
|
|
168
|
+
if (error != nil) {
|
|
169
|
+
invokeOnErrorCallback(
|
|
170
|
+
std::string("Error during audio conversion, native error: ") +
|
|
171
|
+
[[error debugDescription] UTF8String]);
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
for (int i = 0; i < channelCount_; ++i) {
|
|
176
|
+
auto *inputChannel =
|
|
177
|
+
static_cast<float *>(converterOutputBuffer_.audioBufferList->mBuffers[i].mData);
|
|
178
|
+
circularBus_[i]->push_back(inputChannel, outputFrameCount);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
emitAudioData();
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
} // namespace audioapi
|
|
@@ -5,30 +5,26 @@
|
|
|
5
5
|
|
|
6
6
|
@class AudioSessionManager;
|
|
7
7
|
|
|
8
|
+
typedef NS_ENUM(NSInteger, AudioEngineState) {
|
|
9
|
+
AudioEngineStateIdle = 0,
|
|
10
|
+
AudioEngineStateRunning,
|
|
11
|
+
AudioEngineStatePaused,
|
|
12
|
+
AudioEngineStateInterrupted
|
|
13
|
+
};
|
|
14
|
+
|
|
8
15
|
@interface AudioEngine : NSObject
|
|
9
16
|
|
|
10
|
-
@property (nonatomic, assign)
|
|
11
|
-
@property (nonatomic, assign) bool isSupposedToBeRunning;
|
|
17
|
+
@property (nonatomic, assign) AudioEngineState state;
|
|
12
18
|
@property (nonatomic, strong) AVAudioEngine *audioEngine;
|
|
13
19
|
@property (nonatomic, strong) NSMutableDictionary *sourceNodes;
|
|
14
20
|
@property (nonatomic, strong) NSMutableDictionary *sourceFormats;
|
|
15
|
-
@property (nonatomic, strong) NSMutableDictionary *sourceStates;
|
|
16
21
|
@property (nonatomic, strong) AVAudioSinkNode *inputNode;
|
|
17
22
|
@property (nonatomic, weak) AudioSessionManager *sessionManager;
|
|
18
23
|
|
|
19
|
-
- (instancetype)
|
|
20
|
-
|
|
24
|
+
- (instancetype)init;
|
|
21
25
|
+ (instancetype)sharedInstance;
|
|
26
|
+
|
|
22
27
|
- (void)cleanup;
|
|
23
|
-
- (bool)rebuildAudioEngineAndStartIfNecessary;
|
|
24
|
-
- (bool)restartAudioEngine;
|
|
25
|
-
- (bool)startEngine;
|
|
26
|
-
- (void)stopEngine;
|
|
27
|
-
- (void)pauseEngine:(NSString *)sourceNodeId;
|
|
28
|
-
- (bool)isRunning;
|
|
29
|
-
- (void)markAsInterrupted;
|
|
30
|
-
- (void)unmarkAsInterrupted;
|
|
31
|
-
- (bool)isSupposedToRun;
|
|
32
28
|
|
|
33
29
|
- (NSString *)attachSourceNode:(AVAudioSourceNode *)sourceNode format:(AVAudioFormat *)format;
|
|
34
30
|
- (void)detachSourceNodeWithId:(NSString *)sourceNodeId;
|
|
@@ -36,10 +32,19 @@
|
|
|
36
32
|
- (void)attachInputNode:(AVAudioSinkNode *)inputNode;
|
|
37
33
|
- (void)detachInputNode;
|
|
38
34
|
|
|
39
|
-
- (void)
|
|
35
|
+
- (void)onInterruptionBegin;
|
|
36
|
+
- (void)onInterruptionEnd:(bool)shouldResume;
|
|
37
|
+
|
|
38
|
+
- (AudioEngineState)getState;
|
|
40
39
|
|
|
41
40
|
- (bool)startIfNecessary;
|
|
42
|
-
- (void)stopIfNecessary;
|
|
43
41
|
- (void)pauseIfNecessary;
|
|
42
|
+
- (void)stopIfNecessary;
|
|
43
|
+
|
|
44
|
+
- (void)stopIfPossible;
|
|
45
|
+
|
|
46
|
+
- (void)restartAudioEngine;
|
|
47
|
+
|
|
48
|
+
- (void)logAudioEngineState;
|
|
44
49
|
|
|
45
50
|
@end
|
|
@@ -10,29 +10,25 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
10
10
|
return _sharedInstance;
|
|
11
11
|
}
|
|
12
12
|
|
|
13
|
-
- (instancetype)
|
|
13
|
+
- (instancetype)init
|
|
14
14
|
{
|
|
15
15
|
if (self = [super init]) {
|
|
16
|
-
self.
|
|
17
|
-
self.isSupposedToBeRunning = true;
|
|
16
|
+
self.state = AudioEngineState::AudioEngineStateIdle;
|
|
18
17
|
self.audioEngine = [[AVAudioEngine alloc] init];
|
|
19
18
|
self.inputNode = nil;
|
|
20
19
|
|
|
21
20
|
self.sourceNodes = [[NSMutableDictionary alloc] init];
|
|
22
21
|
self.sourceFormats = [[NSMutableDictionary alloc] init];
|
|
23
|
-
self.sourceStates = [[NSMutableDictionary alloc] init];
|
|
24
22
|
|
|
25
|
-
self.sessionManager =
|
|
23
|
+
self.sessionManager = [AudioSessionManager sharedInstance];
|
|
26
24
|
}
|
|
27
25
|
|
|
28
26
|
_sharedInstance = self;
|
|
29
|
-
|
|
30
27
|
return self;
|
|
31
28
|
}
|
|
32
29
|
|
|
33
30
|
- (void)cleanup
|
|
34
31
|
{
|
|
35
|
-
NSLog(@"[AudioEngine] cleanup");
|
|
36
32
|
if ([self.audioEngine isRunning]) {
|
|
37
33
|
[self.audioEngine stop];
|
|
38
34
|
}
|
|
@@ -40,212 +36,229 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
40
36
|
self.audioEngine = nil;
|
|
41
37
|
self.sourceNodes = nil;
|
|
42
38
|
self.sourceFormats = nil;
|
|
43
|
-
self.sourceStates = nil;
|
|
44
39
|
self.inputNode = nil;
|
|
45
40
|
|
|
46
41
|
[self.sessionManager setActive:false];
|
|
47
42
|
self.sessionManager = nil;
|
|
48
43
|
}
|
|
49
44
|
|
|
50
|
-
- (
|
|
45
|
+
- (NSString *)attachSourceNode:(AVAudioSourceNode *)sourceNode format:(AVAudioFormat *)format
|
|
51
46
|
{
|
|
52
|
-
|
|
53
|
-
AVAudioSourceNode *sourceNode = [self.sourceNodes valueForKey:sourceNodeId];
|
|
54
|
-
AVAudioFormat *format = [self.sourceFormats valueForKey:sourceNodeId];
|
|
55
|
-
|
|
56
|
-
[self.audioEngine attachNode:sourceNode];
|
|
57
|
-
[self.audioEngine connect:sourceNode to:self.audioEngine.mainMixerNode format:format];
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
if (self.inputNode) {
|
|
61
|
-
[self.audioEngine attachNode:self.inputNode];
|
|
62
|
-
[self.audioEngine connect:self.audioEngine.inputNode to:self.inputNode format:nil];
|
|
63
|
-
}
|
|
64
|
-
}
|
|
47
|
+
NSString *sourceNodeId = [[NSUUID UUID] UUIDString];
|
|
65
48
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
[self rebuildAudioEngine];
|
|
69
|
-
return [self startIfNecessary];
|
|
70
|
-
}
|
|
49
|
+
[self.sourceNodes setValue:sourceNode forKey:sourceNodeId];
|
|
50
|
+
[self.sourceFormats setValue:format forKey:sourceNodeId];
|
|
71
51
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
if ([self.audioEngine isRunning]) {
|
|
75
|
-
[self.audioEngine stop];
|
|
76
|
-
}
|
|
52
|
+
[self.audioEngine attachNode:sourceNode];
|
|
53
|
+
[self.audioEngine connect:sourceNode to:self.audioEngine.mainMixerNode format:format];
|
|
77
54
|
|
|
78
|
-
|
|
79
|
-
return [self rebuildAudioEngineAndStartIfNecessary];
|
|
55
|
+
return sourceNodeId;
|
|
80
56
|
}
|
|
81
57
|
|
|
82
|
-
- (
|
|
58
|
+
- (void)detachSourceNodeWithId:(NSString *)sourceNodeId
|
|
83
59
|
{
|
|
84
|
-
|
|
85
|
-
NSError *error = nil;
|
|
86
|
-
self.isSupposedToBeRunning = true;
|
|
87
|
-
|
|
88
|
-
if ([self.audioEngine isRunning] && ![self isInterrupted]) {
|
|
89
|
-
NSLog(@"[AudioEngine] Engine is already running");
|
|
90
|
-
return true;
|
|
91
|
-
}
|
|
60
|
+
AVAudioSourceNode *sourceNode = [self.sourceNodes valueForKey:sourceNodeId];
|
|
92
61
|
|
|
93
|
-
if (
|
|
94
|
-
|
|
62
|
+
if (sourceNode == nil) {
|
|
63
|
+
NSLog(@"[AudioEngine] No source node found with ID: %@", sourceNodeId);
|
|
64
|
+
return;
|
|
95
65
|
}
|
|
96
66
|
|
|
97
|
-
|
|
98
|
-
NSLog(@"[AudioEngine] rebuilding after interruption");
|
|
99
|
-
[self.audioEngine stop];
|
|
100
|
-
[self.audioEngine reset];
|
|
67
|
+
[self.audioEngine detachNode:sourceNode];
|
|
101
68
|
|
|
102
|
-
|
|
69
|
+
[self.sourceNodes removeObjectForKey:sourceNodeId];
|
|
70
|
+
[self.sourceFormats removeObjectForKey:sourceNodeId];
|
|
71
|
+
}
|
|
103
72
|
|
|
104
|
-
|
|
105
|
-
|
|
73
|
+
- (void)attachInputNode:(AVAudioSinkNode *)inputNode
|
|
74
|
+
{
|
|
75
|
+
self.inputNode = inputNode;
|
|
76
|
+
AVAudioFormat *format = [self.audioEngine.inputNode inputFormatForBus:0];
|
|
106
77
|
|
|
107
|
-
[self.audioEngine
|
|
108
|
-
[self.audioEngine
|
|
78
|
+
[self.audioEngine attachNode:inputNode];
|
|
79
|
+
[self.audioEngine connect:self.audioEngine.inputNode to:inputNode format:format];
|
|
80
|
+
}
|
|
109
81
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
82
|
+
- (void)detachInputNode
|
|
83
|
+
{
|
|
84
|
+
if (self.inputNode == nil) {
|
|
85
|
+
return;
|
|
113
86
|
}
|
|
114
87
|
|
|
115
|
-
|
|
88
|
+
[self.audioEngine detachNode:self.inputNode];
|
|
89
|
+
self.inputNode = nil;
|
|
116
90
|
}
|
|
117
91
|
|
|
118
|
-
- (void)
|
|
92
|
+
- (void)onInterruptionBegin
|
|
119
93
|
{
|
|
120
|
-
|
|
121
|
-
|
|
94
|
+
if (self.state != AudioEngineState::AudioEngineStateRunning) {
|
|
95
|
+
// If engine was not active, do nothing
|
|
122
96
|
return;
|
|
123
97
|
}
|
|
124
98
|
|
|
125
|
-
|
|
126
|
-
|
|
99
|
+
// If engine was active or paused (or interrupted :)) mark as interrupted
|
|
100
|
+
self.state = AudioEngineState::AudioEngineStateInterrupted;
|
|
127
101
|
}
|
|
128
102
|
|
|
129
|
-
- (void)
|
|
103
|
+
- (void)onInterruptionEnd:(bool)shouldResume
|
|
130
104
|
{
|
|
131
|
-
|
|
132
|
-
self.isSupposedToBeRunning = false;
|
|
105
|
+
NSError *error = nil;
|
|
133
106
|
|
|
134
|
-
if (
|
|
107
|
+
if (self.state != AudioEngineState::AudioEngineStateInterrupted) {
|
|
108
|
+
// If engine was not interrupted, do nothing
|
|
109
|
+
// Not a real condition, but better be safe than sorry :shrug:
|
|
135
110
|
return;
|
|
136
111
|
}
|
|
137
112
|
|
|
138
|
-
|
|
139
|
-
[self
|
|
140
|
-
|
|
113
|
+
// Stop just in case, reset the engine and build it from scratch
|
|
114
|
+
[self stopIfNecessary];
|
|
115
|
+
[self.audioEngine reset];
|
|
116
|
+
[self rebuildAudioEngine];
|
|
141
117
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
118
|
+
// If shouldResume is false, mark the engine as paused and wait
|
|
119
|
+
// for JS-side resume command
|
|
120
|
+
// TODO: this should be notified to the user f.e. via Event Emitter
|
|
121
|
+
if (!shouldResume) {
|
|
122
|
+
self.state = AudioEngineState::AudioEngineStatePaused;
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
146
125
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
126
|
+
[self.audioEngine prepare];
|
|
127
|
+
[self.audioEngine startAndReturnError:&error];
|
|
128
|
+
|
|
129
|
+
if (error != nil) {
|
|
130
|
+
NSLog(
|
|
131
|
+
@"Error while restarting the audio engine after interruption: %@",
|
|
132
|
+
[error debugDescription]);
|
|
133
|
+
self.state = AudioEngineState::AudioEngineStateIdle;
|
|
134
|
+
return;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
self.state = AudioEngineState::AudioEngineStateRunning;
|
|
151
138
|
}
|
|
152
139
|
|
|
153
|
-
- (
|
|
140
|
+
- (AudioEngineState)getState
|
|
154
141
|
{
|
|
155
|
-
self.
|
|
142
|
+
return self.state;
|
|
156
143
|
}
|
|
157
144
|
|
|
158
|
-
-
|
|
145
|
+
/// @brief Rebuilds the audio engine by re-attaching and re-connecting all source nodes and input node.
|
|
146
|
+
- (void)rebuildAudioEngine
|
|
159
147
|
{
|
|
160
|
-
|
|
161
|
-
NSLog(@"[AudioEngine] attaching new source node with ID: %@", sourceNodeId);
|
|
148
|
+
self.audioEngine = [[AVAudioEngine alloc] init];
|
|
162
149
|
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
150
|
+
for (id sourceNodeId in self.sourceNodes) {
|
|
151
|
+
AVAudioSourceNode *sourceNode = [self.sourceNodes valueForKey:sourceNodeId];
|
|
152
|
+
AVAudioFormat *format = [self.sourceFormats valueForKey:sourceNodeId];
|
|
166
153
|
|
|
167
|
-
|
|
168
|
-
|
|
154
|
+
[self.audioEngine attachNode:sourceNode];
|
|
155
|
+
[self.audioEngine connect:sourceNode to:self.audioEngine.mainMixerNode format:format];
|
|
156
|
+
}
|
|
169
157
|
|
|
170
|
-
|
|
158
|
+
if (self.inputNode) {
|
|
159
|
+
[self.audioEngine attachNode:self.inputNode];
|
|
160
|
+
[self.audioEngine connect:self.audioEngine.inputNode to:self.inputNode format:nil];
|
|
161
|
+
}
|
|
171
162
|
}
|
|
172
163
|
|
|
173
|
-
|
|
164
|
+
// @brief Starts the audio engine if not already running.
|
|
165
|
+
- (bool)startEngine
|
|
174
166
|
{
|
|
175
|
-
|
|
167
|
+
NSError *error = nil;
|
|
176
168
|
|
|
177
|
-
|
|
169
|
+
if ([self.audioEngine isRunning] && self.state == AudioEngineState::AudioEngineStateRunning) {
|
|
170
|
+
return true;
|
|
171
|
+
}
|
|
178
172
|
|
|
179
|
-
if (
|
|
180
|
-
|
|
181
|
-
return;
|
|
173
|
+
if (![self.sessionManager setActive:true]) {
|
|
174
|
+
return false;
|
|
182
175
|
}
|
|
183
176
|
|
|
184
|
-
|
|
177
|
+
if (self.state == AudioEngineState::AudioEngineStateInterrupted) {
|
|
178
|
+
NSLog(@"[AudioEngine] rebuilding after interruption");
|
|
179
|
+
[self.audioEngine stop];
|
|
180
|
+
[self.audioEngine reset];
|
|
181
|
+
[self rebuildAudioEngine];
|
|
182
|
+
}
|
|
185
183
|
|
|
186
|
-
[self.
|
|
187
|
-
[self.
|
|
188
|
-
[self.sourceStates removeObjectForKey:sourceNodeId];
|
|
189
|
-
}
|
|
184
|
+
[self.audioEngine prepare];
|
|
185
|
+
[self.audioEngine startAndReturnError:&error];
|
|
190
186
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
187
|
+
if (error != nil) {
|
|
188
|
+
NSLog(@"Error while starting the audio engine: %@", [error debugDescription]);
|
|
189
|
+
return false;
|
|
190
|
+
}
|
|
194
191
|
|
|
195
|
-
|
|
196
|
-
|
|
192
|
+
self.state = AudioEngineState::AudioEngineStateRunning;
|
|
193
|
+
return true;
|
|
197
194
|
}
|
|
198
195
|
|
|
199
|
-
- (void)
|
|
196
|
+
- (void)stopEngine
|
|
200
197
|
{
|
|
201
|
-
if (self.
|
|
198
|
+
if (self.state == AudioEngineState::AudioEngineStateIdle) {
|
|
202
199
|
return;
|
|
203
200
|
}
|
|
204
201
|
|
|
205
|
-
[self.audioEngine
|
|
206
|
-
self.
|
|
207
|
-
[self restartAudioEngine];
|
|
202
|
+
[self.audioEngine stop];
|
|
203
|
+
self.state = AudioEngineState::AudioEngineStateIdle;
|
|
208
204
|
}
|
|
209
205
|
|
|
210
206
|
- (bool)startIfNecessary
|
|
211
207
|
{
|
|
212
|
-
if (
|
|
208
|
+
if (self.state == AudioEngineState::AudioEngineStateRunning) {
|
|
213
209
|
return true;
|
|
214
210
|
}
|
|
215
211
|
|
|
216
|
-
if ([self.sourceNodes count] > 0 || self.inputNode != nil) {
|
|
212
|
+
if (([self.sourceNodes count] > 0) || self.inputNode != nil) {
|
|
217
213
|
return [self startEngine];
|
|
218
214
|
}
|
|
219
215
|
|
|
220
216
|
return false;
|
|
221
217
|
}
|
|
222
218
|
|
|
223
|
-
- (void)
|
|
219
|
+
- (void)pauseIfNecessary
|
|
224
220
|
{
|
|
225
|
-
if (
|
|
221
|
+
if (self.state == AudioEngineState::AudioEngineStatePaused) {
|
|
226
222
|
return;
|
|
227
223
|
}
|
|
228
224
|
|
|
229
|
-
|
|
230
|
-
|
|
225
|
+
[self.audioEngine pause];
|
|
226
|
+
self.state = AudioEngineState::AudioEngineStatePaused;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
- (void)stopIfNecessary
|
|
230
|
+
{
|
|
231
|
+
if (self.state == AudioEngineState::AudioEngineStateIdle) {
|
|
232
|
+
return;
|
|
231
233
|
}
|
|
234
|
+
|
|
235
|
+
[self stopEngine];
|
|
232
236
|
}
|
|
233
237
|
|
|
234
|
-
- (void)
|
|
238
|
+
- (void)stopIfPossible
|
|
235
239
|
{
|
|
236
|
-
if (
|
|
240
|
+
if (self.state == AudioEngineState::AudioEngineStateIdle) {
|
|
237
241
|
return;
|
|
238
242
|
}
|
|
239
243
|
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
244
|
+
bool hasInput = self.inputNode != nil;
|
|
245
|
+
bool hasSources = [self.sourceNodes count] > 0;
|
|
246
|
+
|
|
247
|
+
if (hasInput || hasSources) {
|
|
248
|
+
return;
|
|
245
249
|
}
|
|
246
250
|
|
|
247
|
-
|
|
248
|
-
|
|
251
|
+
[self stopEngine];
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
- (void)restartAudioEngine
|
|
255
|
+
{
|
|
256
|
+
if ([self.audioEngine isRunning]) {
|
|
257
|
+
[self.audioEngine stop];
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
self.audioEngine = [[AVAudioEngine alloc] init];
|
|
261
|
+
[self rebuildAudioEngine];
|
|
249
262
|
}
|
|
250
263
|
|
|
251
264
|
- (void)logAudioEngineState
|
|
@@ -286,9 +299,4 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
286
299
|
NSLog(@"=======================================================");
|
|
287
300
|
}
|
|
288
301
|
|
|
289
|
-
- (bool)isSupposedToRun
|
|
290
|
-
{
|
|
291
|
-
return self.isSupposedToBeRunning;
|
|
292
|
-
}
|
|
293
|
-
|
|
294
302
|
@end
|
|
@@ -8,34 +8,44 @@
|
|
|
8
8
|
|
|
9
9
|
@property (nonatomic, weak) AVAudioSession *audioSession;
|
|
10
10
|
|
|
11
|
+
// State tracking
|
|
11
12
|
@property (nonatomic, assign) bool isActive;
|
|
12
|
-
@property (nonatomic, assign) bool hasDirtySettings;
|
|
13
|
-
@property (nonatomic, assign) AVAudioSessionMode sessionMode;
|
|
14
|
-
@property (nonatomic, assign) AVAudioSessionCategory sessionCategory;
|
|
15
|
-
@property (nonatomic, assign) AVAudioSessionCategoryOptions sessionOptions;
|
|
16
|
-
@property (nonatomic, assign) bool allowHapticsAndSystemSoundsDuringRecording;
|
|
17
13
|
@property (nonatomic, assign) bool shouldManageSession;
|
|
18
14
|
|
|
15
|
+
// Session configuration options (desired by user)
|
|
16
|
+
@property (nonatomic, assign) AVAudioSessionMode desiredMode;
|
|
17
|
+
@property (nonatomic, assign) AVAudioSessionCategory desiredCategory;
|
|
18
|
+
@property (nonatomic, assign) AVAudioSessionCategoryOptions desiredOptions;
|
|
19
|
+
@property (nonatomic, assign) bool allowHapticsAndSounds;
|
|
20
|
+
|
|
19
21
|
- (instancetype)init;
|
|
22
|
+
+ (instancetype)sharedInstance;
|
|
23
|
+
|
|
20
24
|
- (void)cleanup;
|
|
21
|
-
- (bool)configureAudioSession;
|
|
22
|
-
- (bool)reconfigureAudioSession;
|
|
23
|
-
- (void)markSettingsAsDirty;
|
|
24
25
|
|
|
25
|
-
- (NSNumber *)getDevicePreferredSampleRate;
|
|
26
26
|
- (void)setAudioSessionOptions:(NSString *)category
|
|
27
27
|
mode:(NSString *)mode
|
|
28
28
|
options:(NSArray *)options
|
|
29
29
|
allowHaptics:(BOOL)allowHaptics;
|
|
30
|
+
|
|
30
31
|
- (bool)setActive:(bool)active;
|
|
32
|
+
- (void)markInactive;
|
|
31
33
|
- (void)disableSessionManagement;
|
|
32
34
|
|
|
35
|
+
- (NSNumber *)getDevicePreferredSampleRate;
|
|
36
|
+
- (NSNumber *)getDevicePreferredInputChannelCount;
|
|
37
|
+
|
|
33
38
|
- (void)requestRecordingPermissions:(RCTPromiseResolveBlock)resolve
|
|
34
39
|
reject:(RCTPromiseRejectBlock)reject;
|
|
40
|
+
- (NSString *)requestRecordingPermissions;
|
|
41
|
+
|
|
35
42
|
- (void)checkRecordingPermissions:(RCTPromiseResolveBlock)resolve
|
|
36
43
|
reject:(RCTPromiseRejectBlock)reject;
|
|
44
|
+
- (NSString *)checkRecordingPermissions;
|
|
37
45
|
|
|
38
46
|
- (void)getDevicesInfo:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
|
|
39
47
|
- (NSArray<NSDictionary *> *)parseDeviceList:(NSArray<AVAudioSessionPortDescription *> *)devices;
|
|
40
48
|
|
|
49
|
+
- (bool)isSessionActive;
|
|
50
|
+
|
|
41
51
|
@end
|