react-native-audio-api 0.7.0-nightly-74078ac-20250730 → 0.7.0-nightly-cec6d21-20250801
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/cpp/audioapi/android/core/AudioDecoder.cpp +4 -2
- package/android/src/main/cpp/audioapi/android/core/AudioPlayer.cpp +14 -4
- package/android/src/main/cpp/audioapi/android/core/AudioPlayer.h +4 -2
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIModule.kt +4 -0
- package/android/src/main/java/com/swmansion/audioapi/system/MediaSessionManager.kt +4 -0
- package/android/src/oldarch/NativeAudioAPIModuleSpec.java +4 -0
- package/common/cpp/audioapi/HostObjects/AudioContextHostObject.h +0 -10
- package/common/cpp/audioapi/core/AudioContext.cpp +17 -10
- package/common/cpp/audioapi/core/AudioContext.h +3 -0
- package/common/cpp/audioapi/core/AudioParam.cpp +4 -5
- package/common/cpp/audioapi/core/BaseAudioContext.cpp +11 -3
- package/common/cpp/audioapi/core/BaseAudioContext.h +2 -0
- package/common/cpp/audioapi/core/OfflineAudioContext.cpp +4 -0
- package/common/cpp/audioapi/core/OfflineAudioContext.h +2 -0
- package/common/cpp/audioapi/core/effects/BiquadFilterNode.cpp +0 -1
- package/ios/audioapi/ios/AudioAPIModule.mm +13 -11
- package/ios/audioapi/ios/core/IOSAudioPlayer.h +5 -2
- package/ios/audioapi/ios/core/IOSAudioPlayer.mm +20 -10
- package/ios/audioapi/ios/core/NativeAudioPlayer.h +2 -2
- package/ios/audioapi/ios/core/NativeAudioPlayer.m +7 -2
- package/ios/audioapi/ios/core/NativeAudioRecorder.m +2 -0
- package/ios/audioapi/ios/system/AudioEngine.h +13 -2
- package/ios/audioapi/ios/system/AudioEngine.mm +108 -32
- package/ios/audioapi/ios/system/AudioSessionManager.h +1 -0
- package/ios/audioapi/ios/system/AudioSessionManager.mm +6 -0
- package/ios/audioapi/ios/system/NotificationManager.h +3 -0
- package/ios/audioapi/ios/system/NotificationManager.mm +145 -41
- package/lib/commonjs/core/AudioContext.js +3 -3
- package/lib/commonjs/core/AudioContext.js.map +1 -1
- package/lib/commonjs/specs/NativeAudioAPIModule.js.map +1 -1
- package/lib/commonjs/system/AudioManager.js +17 -0
- package/lib/commonjs/system/AudioManager.js.map +1 -1
- package/lib/module/core/AudioContext.js +3 -3
- package/lib/module/core/AudioContext.js.map +1 -1
- package/lib/module/specs/NativeAudioAPIModule.js.map +1 -1
- package/lib/module/system/AudioManager.js +17 -0
- package/lib/module/system/AudioManager.js.map +1 -1
- package/lib/typescript/core/AudioContext.d.ts +3 -3
- package/lib/typescript/core/AudioContext.d.ts.map +1 -1
- package/lib/typescript/interfaces.d.ts +3 -3
- package/lib/typescript/interfaces.d.ts.map +1 -1
- package/lib/typescript/specs/NativeAudioAPIModule.d.ts +1 -0
- package/lib/typescript/specs/NativeAudioAPIModule.d.ts.map +1 -1
- package/lib/typescript/system/AudioManager.d.ts +14 -0
- package/lib/typescript/system/AudioManager.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/core/AudioContext.ts +6 -6
- package/src/interfaces.ts +3 -3
- package/src/specs/NativeAudioAPIModule.ts +1 -0
- package/src/system/AudioManager.ts +17 -0
|
@@ -38,7 +38,8 @@ std::shared_ptr<AudioBus> AudioDecoder::decodeWithFilePath(
|
|
|
38
38
|
&decoder, buffer.data(), totalFrameCount, &framesDecoded);
|
|
39
39
|
|
|
40
40
|
if (framesDecoded == 0) {
|
|
41
|
-
// __android_log_print(ANDROID_LOG_ERROR, "AudioDecoder", "Failed to
|
|
41
|
+
// __android_log_print(ANDROID_LOG_ERROR, "AudioDecoder", "Failed to
|
|
42
|
+
// decode");
|
|
42
43
|
|
|
43
44
|
ma_decoder_uninit(&decoder);
|
|
44
45
|
return nullptr;
|
|
@@ -88,7 +89,8 @@ std::shared_ptr<AudioBus> AudioDecoder::decodeWithMemoryBlock(
|
|
|
88
89
|
&decoder, buffer.data(), totalFrameCount, &framesDecoded);
|
|
89
90
|
|
|
90
91
|
if (framesDecoded == 0) {
|
|
91
|
-
// __android_log_print(ANDROID_LOG_ERROR, "AudioDecoder", "Failed to
|
|
92
|
+
// __android_log_print(ANDROID_LOG_ERROR, "AudioDecoder", "Failed to
|
|
93
|
+
// decode");
|
|
92
94
|
|
|
93
95
|
ma_decoder_uninit(&decoder);
|
|
94
96
|
return nullptr;
|
|
@@ -45,10 +45,13 @@ bool AudioPlayer::openAudioStream() {
|
|
|
45
45
|
return true;
|
|
46
46
|
}
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
bool AudioPlayer::start() {
|
|
49
49
|
if (mStream_) {
|
|
50
|
-
mStream_->requestStart();
|
|
50
|
+
auto result = mStream_->requestStart();
|
|
51
|
+
return result == oboe::Result::OK;
|
|
51
52
|
}
|
|
53
|
+
|
|
54
|
+
return false;
|
|
52
55
|
}
|
|
53
56
|
|
|
54
57
|
void AudioPlayer::stop() {
|
|
@@ -57,10 +60,13 @@ void AudioPlayer::stop() {
|
|
|
57
60
|
}
|
|
58
61
|
}
|
|
59
62
|
|
|
60
|
-
|
|
63
|
+
bool AudioPlayer::resume() {
|
|
61
64
|
if (mStream_) {
|
|
62
|
-
mStream_->requestStart();
|
|
65
|
+
auto result = mStream_->requestStart();
|
|
66
|
+
return result == oboe::Result::OK;
|
|
63
67
|
}
|
|
68
|
+
|
|
69
|
+
return false;
|
|
64
70
|
}
|
|
65
71
|
|
|
66
72
|
void AudioPlayer::suspend() {
|
|
@@ -78,6 +84,10 @@ void AudioPlayer::cleanup() {
|
|
|
78
84
|
}
|
|
79
85
|
}
|
|
80
86
|
|
|
87
|
+
bool AudioPlayer::isRunning() const {
|
|
88
|
+
return mStream_ && mStream_->getState() == oboe::StreamState::Started;
|
|
89
|
+
}
|
|
90
|
+
|
|
81
91
|
DataCallbackResult AudioPlayer::onAudioReady(
|
|
82
92
|
AudioStream *oboeStream,
|
|
83
93
|
void *audioData,
|
|
@@ -19,12 +19,14 @@ class AudioPlayer : public AudioStreamDataCallback, AudioStreamErrorCallback {
|
|
|
19
19
|
float sampleRate,
|
|
20
20
|
int channelCount);
|
|
21
21
|
|
|
22
|
-
|
|
22
|
+
bool start();
|
|
23
23
|
void stop();
|
|
24
|
-
|
|
24
|
+
bool resume();
|
|
25
25
|
void suspend();
|
|
26
26
|
void cleanup();
|
|
27
27
|
|
|
28
|
+
bool isRunning() const;
|
|
29
|
+
|
|
28
30
|
DataCallbackResult onAudioReady(
|
|
29
31
|
AudioStream *oboeStream,
|
|
30
32
|
void *audioData,
|
|
@@ -91,6 +91,10 @@ class AudioAPIModule(
|
|
|
91
91
|
MediaSessionManager.observeAudioInterruptions(enabled)
|
|
92
92
|
}
|
|
93
93
|
|
|
94
|
+
override fun activelyReclaimSession(enabled: Boolean) {
|
|
95
|
+
MediaSessionManager.activelyReclaimSession(enabled)
|
|
96
|
+
}
|
|
97
|
+
|
|
94
98
|
override fun observeVolumeChanges(enabled: Boolean) {
|
|
95
99
|
MediaSessionManager.observeVolumeChanges(enabled)
|
|
96
100
|
}
|
|
@@ -66,6 +66,10 @@
|
|
|
66
66
|
@DoNotStrip
|
|
67
67
|
public abstract void observeAudioInterruptions(boolean enabled);
|
|
68
68
|
|
|
69
|
+
@ReactMethod
|
|
70
|
+
@DoNotStrip
|
|
71
|
+
public abstract void activelyReclaimSession(boolean enabled);
|
|
72
|
+
|
|
69
73
|
@ReactMethod
|
|
70
74
|
@DoNotStrip
|
|
71
75
|
public abstract void observeVolumeChanges(boolean enabled);
|
|
@@ -45,11 +45,6 @@ class AudioContextHostObject : public BaseAudioContextHostObject {
|
|
|
45
45
|
auto audioContext = std::static_pointer_cast<AudioContext>(context_);
|
|
46
46
|
auto result = audioContext->resume();
|
|
47
47
|
|
|
48
|
-
if (!result) {
|
|
49
|
-
promise->reject("Failed to resume audio context because it is already closed.");
|
|
50
|
-
return;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
48
|
promise->resolve([result](jsi::Runtime &runtime) {
|
|
54
49
|
return jsi::Value(result);
|
|
55
50
|
});
|
|
@@ -65,11 +60,6 @@ class AudioContextHostObject : public BaseAudioContextHostObject {
|
|
|
65
60
|
auto audioContext = std::static_pointer_cast<AudioContext>(context_);
|
|
66
61
|
auto result = audioContext->suspend();
|
|
67
62
|
|
|
68
|
-
if (!result) {
|
|
69
|
-
promise->reject("Failed to suspend audio context because it is already closed.");
|
|
70
|
-
return;
|
|
71
|
-
}
|
|
72
|
-
|
|
73
63
|
promise->resolve([result](jsi::Runtime &runtime) {
|
|
74
64
|
return jsi::Value(result);
|
|
75
65
|
});
|
|
@@ -63,14 +63,21 @@ bool AudioContext::resume() {
|
|
|
63
63
|
}
|
|
64
64
|
|
|
65
65
|
if (!playerHasBeenStarted_) {
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
66
|
+
if (audioPlayer_->start()) {
|
|
67
|
+
playerHasBeenStarted_ = true;
|
|
68
|
+
state_ = ContextState::RUNNING;
|
|
69
|
+
return true;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return false;
|
|
70
73
|
}
|
|
71
74
|
|
|
72
|
-
|
|
73
|
-
|
|
75
|
+
if (audioPlayer_->resume()) {
|
|
76
|
+
state_ = ContextState::RUNNING;
|
|
77
|
+
return true;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return false;
|
|
74
81
|
}
|
|
75
82
|
|
|
76
83
|
bool AudioContext::suspend() {
|
|
@@ -90,13 +97,13 @@ bool AudioContext::suspend() {
|
|
|
90
97
|
|
|
91
98
|
std::function<void(std::shared_ptr<AudioBus>, int)>
|
|
92
99
|
AudioContext::renderAudio() {
|
|
93
|
-
if (!isRunning() || !destination_) {
|
|
94
|
-
return [](const std::shared_ptr<AudioBus> &, int) {};
|
|
95
|
-
}
|
|
96
|
-
|
|
97
100
|
return [this](const std::shared_ptr<AudioBus> &data, int frames) {
|
|
98
101
|
destination_->renderAudio(data, frames);
|
|
99
102
|
};
|
|
100
103
|
}
|
|
101
104
|
|
|
105
|
+
bool AudioContext::isDriverRunning() const {
|
|
106
|
+
return audioPlayer_->isRunning();
|
|
107
|
+
}
|
|
108
|
+
|
|
102
109
|
} // namespace audioapi
|
|
@@ -21,6 +21,7 @@ class AudioContext : public BaseAudioContext {
|
|
|
21
21
|
bool resume();
|
|
22
22
|
bool suspend();
|
|
23
23
|
|
|
24
|
+
|
|
24
25
|
private:
|
|
25
26
|
#ifdef ANDROID
|
|
26
27
|
std::shared_ptr<AudioPlayer> audioPlayer_;
|
|
@@ -29,6 +30,8 @@ class AudioContext : public BaseAudioContext {
|
|
|
29
30
|
#endif
|
|
30
31
|
bool playerHasBeenStarted_;
|
|
31
32
|
|
|
33
|
+
bool isDriverRunning() const override;
|
|
34
|
+
|
|
32
35
|
std::function<void(std::shared_ptr<AudioBus>, int)> renderAudio();
|
|
33
36
|
};
|
|
34
37
|
|
|
@@ -18,11 +18,10 @@ AudioParam::AudioParam(
|
|
|
18
18
|
minValue_(minValue),
|
|
19
19
|
maxValue_(maxValue),
|
|
20
20
|
context_(context),
|
|
21
|
-
audioBus_(
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
context->getSampleRate())) {
|
|
21
|
+
audioBus_(std::make_shared<AudioBus>(
|
|
22
|
+
RENDER_QUANTUM_SIZE,
|
|
23
|
+
1,
|
|
24
|
+
context->getSampleRate())) {
|
|
26
25
|
startTime_ = 0;
|
|
27
26
|
endTime_ = 0;
|
|
28
27
|
startValue_ = value_;
|
|
@@ -28,7 +28,15 @@ BaseAudioContext::BaseAudioContext(
|
|
|
28
28
|
}
|
|
29
29
|
|
|
30
30
|
std::string BaseAudioContext::getState() {
|
|
31
|
-
|
|
31
|
+
if (isDriverRunning()) {
|
|
32
|
+
return BaseAudioContext::toString(state_);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (state_ == ContextState::CLOSED) {
|
|
36
|
+
return BaseAudioContext::toString(ContextState::CLOSED);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return BaseAudioContext::toString(ContextState::SUSPENDED);
|
|
32
40
|
}
|
|
33
41
|
|
|
34
42
|
float BaseAudioContext::getSampleRate() const {
|
|
@@ -155,11 +163,11 @@ AudioNodeManager *BaseAudioContext::getNodeManager() {
|
|
|
155
163
|
}
|
|
156
164
|
|
|
157
165
|
bool BaseAudioContext::isRunning() const {
|
|
158
|
-
return state_ == ContextState::RUNNING;
|
|
166
|
+
return state_ == ContextState::RUNNING && isDriverRunning();
|
|
159
167
|
}
|
|
160
168
|
|
|
161
169
|
bool BaseAudioContext::isSuspended() const {
|
|
162
|
-
return state_ == ContextState::SUSPENDED;
|
|
170
|
+
return state_ == ContextState::SUSPENDED || !isDriverRunning();
|
|
163
171
|
}
|
|
164
172
|
|
|
165
173
|
bool BaseAudioContext::isClosed() const {
|
|
@@ -87,6 +87,8 @@ class BaseAudioContext {
|
|
|
87
87
|
std::shared_ptr<PeriodicWave> cachedSawtoothWave_ = nullptr;
|
|
88
88
|
std::shared_ptr<PeriodicWave> cachedTriangleWave_ = nullptr;
|
|
89
89
|
|
|
90
|
+
virtual bool isDriverRunning() const = 0;
|
|
91
|
+
|
|
90
92
|
public:
|
|
91
93
|
std::shared_ptr<IAudioEventHandlerRegistry> audioEventHandlerRegistry_;
|
|
92
94
|
};
|
|
@@ -85,9 +85,8 @@ RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(getDevicePreferredSampleRate)
|
|
|
85
85
|
return [self.audioSessionManager getDevicePreferredSampleRate];
|
|
86
86
|
}
|
|
87
87
|
|
|
88
|
-
RCT_EXPORT_METHOD(
|
|
89
|
-
|
|
90
|
-
reject)
|
|
88
|
+
RCT_EXPORT_METHOD(setAudioSessionActivity : (BOOL)enabled resolve : (RCTPromiseResolveBlock)
|
|
89
|
+
resolve reject : (RCTPromiseRejectBlock)reject)
|
|
91
90
|
{
|
|
92
91
|
if ([self.audioSessionManager setActive:enabled]) {
|
|
93
92
|
resolve(@"true");
|
|
@@ -97,9 +96,8 @@ RCT_EXPORT_METHOD(
|
|
|
97
96
|
resolve(@"false");
|
|
98
97
|
}
|
|
99
98
|
|
|
100
|
-
RCT_EXPORT_METHOD(
|
|
101
|
-
|
|
102
|
-
options allowHaptics : (BOOL)allowHaptics)
|
|
99
|
+
RCT_EXPORT_METHOD(setAudioSessionOptions : (NSString *)category mode : (NSString *)mode options : (NSArray *)
|
|
100
|
+
options allowHaptics : (BOOL)allowHaptics)
|
|
103
101
|
{
|
|
104
102
|
[self.audioSessionManager setAudioSessionOptions:category mode:mode options:options allowHaptics:allowHaptics];
|
|
105
103
|
}
|
|
@@ -124,20 +122,24 @@ RCT_EXPORT_METHOD(observeAudioInterruptions : (BOOL)enabled)
|
|
|
124
122
|
[self.notificationManager observeAudioInterruptions:enabled];
|
|
125
123
|
}
|
|
126
124
|
|
|
125
|
+
RCT_EXPORT_METHOD(activelyReclaimSession : (BOOL)enabled)
|
|
126
|
+
{
|
|
127
|
+
[self.notificationManager activelyReclaimSession:enabled];
|
|
128
|
+
}
|
|
129
|
+
|
|
127
130
|
RCT_EXPORT_METHOD(observeVolumeChanges : (BOOL)enabled)
|
|
128
131
|
{
|
|
129
132
|
[self.notificationManager observeVolumeChanges:(BOOL)enabled];
|
|
130
133
|
}
|
|
131
134
|
|
|
132
|
-
RCT_EXPORT_METHOD(
|
|
133
|
-
|
|
134
|
-
reject)
|
|
135
|
+
RCT_EXPORT_METHOD(requestRecordingPermissions : (nonnull RCTPromiseResolveBlock)
|
|
136
|
+
resolve reject : (nonnull RCTPromiseRejectBlock)reject)
|
|
135
137
|
{
|
|
136
138
|
[self.audioSessionManager requestRecordingPermissions:resolve reject:reject];
|
|
137
139
|
}
|
|
138
140
|
|
|
139
|
-
RCT_EXPORT_METHOD(
|
|
140
|
-
|
|
141
|
+
RCT_EXPORT_METHOD(checkRecordingPermissions : (nonnull RCTPromiseResolveBlock)
|
|
142
|
+
resolve reject : (nonnull RCTPromiseRejectBlock)reject)
|
|
141
143
|
{
|
|
142
144
|
[self.audioSessionManager checkRecordingPermissions:resolve reject:reject];
|
|
143
145
|
}
|
|
@@ -21,12 +21,14 @@ class IOSAudioPlayer {
|
|
|
21
21
|
int channelCount);
|
|
22
22
|
~IOSAudioPlayer();
|
|
23
23
|
|
|
24
|
-
|
|
24
|
+
bool start();
|
|
25
25
|
void stop();
|
|
26
|
-
|
|
26
|
+
bool resume();
|
|
27
27
|
void suspend();
|
|
28
28
|
void cleanup();
|
|
29
29
|
|
|
30
|
+
bool isRunning() const;
|
|
31
|
+
|
|
30
32
|
protected:
|
|
31
33
|
std::shared_ptr<AudioBus> audioBus_;
|
|
32
34
|
NativeAudioPlayer *audioPlayer_;
|
|
@@ -34,4 +36,5 @@ class IOSAudioPlayer {
|
|
|
34
36
|
int channelCount_;
|
|
35
37
|
std::atomic<bool> isRunning_;
|
|
36
38
|
};
|
|
39
|
+
|
|
37
40
|
} // namespace audioapi
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
#include <audioapi/core/Constants.h>
|
|
4
4
|
#include <audioapi/dsp/VectorMath.h>
|
|
5
5
|
#include <audioapi/ios/core/IOSAudioPlayer.h>
|
|
6
|
+
#include <audioapi/ios/system/AudioEngine.h>
|
|
6
7
|
#include <audioapi/utils/AudioArray.h>
|
|
7
8
|
#include <audioapi/utils/AudioBus.h>
|
|
8
9
|
|
|
@@ -49,14 +50,15 @@ IOSAudioPlayer::~IOSAudioPlayer()
|
|
|
49
50
|
cleanup();
|
|
50
51
|
}
|
|
51
52
|
|
|
52
|
-
|
|
53
|
+
bool IOSAudioPlayer::start()
|
|
53
54
|
{
|
|
54
|
-
if (
|
|
55
|
-
return;
|
|
55
|
+
if (isRunning()) {
|
|
56
|
+
return true;
|
|
56
57
|
}
|
|
57
58
|
|
|
58
|
-
[audioPlayer_ start];
|
|
59
|
-
isRunning_.store(
|
|
59
|
+
bool success = [audioPlayer_ start];
|
|
60
|
+
isRunning_.store(success);
|
|
61
|
+
return success;
|
|
60
62
|
}
|
|
61
63
|
|
|
62
64
|
void IOSAudioPlayer::stop()
|
|
@@ -65,14 +67,15 @@ void IOSAudioPlayer::stop()
|
|
|
65
67
|
[audioPlayer_ stop];
|
|
66
68
|
}
|
|
67
69
|
|
|
68
|
-
|
|
70
|
+
bool IOSAudioPlayer::resume()
|
|
69
71
|
{
|
|
70
|
-
if (
|
|
71
|
-
return;
|
|
72
|
+
if (isRunning()) {
|
|
73
|
+
return true;
|
|
72
74
|
}
|
|
73
75
|
|
|
74
|
-
[audioPlayer_ resume];
|
|
75
|
-
isRunning_.store(
|
|
76
|
+
bool success = [audioPlayer_ resume];
|
|
77
|
+
isRunning_.store(success);
|
|
78
|
+
return success;
|
|
76
79
|
}
|
|
77
80
|
|
|
78
81
|
void IOSAudioPlayer::suspend()
|
|
@@ -81,6 +84,13 @@ void IOSAudioPlayer::suspend()
|
|
|
81
84
|
[audioPlayer_ suspend];
|
|
82
85
|
}
|
|
83
86
|
|
|
87
|
+
bool IOSAudioPlayer::isRunning() const
|
|
88
|
+
{
|
|
89
|
+
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
90
|
+
|
|
91
|
+
return isRunning_.load() && [audioEngine isRunning];
|
|
92
|
+
}
|
|
93
|
+
|
|
84
94
|
void IOSAudioPlayer::cleanup()
|
|
85
95
|
{
|
|
86
96
|
stop();
|
|
@@ -19,11 +19,11 @@ typedef void (^RenderAudioBlock)(AudioBufferList *outputBuffer, int numFrames);
|
|
|
19
19
|
sampleRate:(float)sampleRate
|
|
20
20
|
channelCount:(int)channelCount;
|
|
21
21
|
|
|
22
|
-
- (
|
|
22
|
+
- (bool)start;
|
|
23
23
|
|
|
24
24
|
- (void)stop;
|
|
25
25
|
|
|
26
|
-
- (
|
|
26
|
+
- (bool)resume;
|
|
27
27
|
|
|
28
28
|
- (void)suspend;
|
|
29
29
|
|
|
@@ -32,13 +32,15 @@
|
|
|
32
32
|
return self;
|
|
33
33
|
}
|
|
34
34
|
|
|
35
|
-
- (
|
|
35
|
+
- (bool)start
|
|
36
36
|
{
|
|
37
37
|
NSLog(@"[AudioPlayer] start");
|
|
38
38
|
|
|
39
39
|
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
40
40
|
assert(audioEngine != nil);
|
|
41
41
|
self.sourceNodeId = [audioEngine attachSourceNode:self.sourceNode format:self.format];
|
|
42
|
+
|
|
43
|
+
return [audioEngine startIfNecessary];
|
|
42
44
|
}
|
|
43
45
|
|
|
44
46
|
- (void)stop
|
|
@@ -48,15 +50,18 @@
|
|
|
48
50
|
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
49
51
|
assert(audioEngine != nil);
|
|
50
52
|
[audioEngine detachSourceNodeWithId:self.sourceNodeId];
|
|
53
|
+
[audioEngine stopIfNecessary];
|
|
51
54
|
self.sourceNodeId = nil;
|
|
52
55
|
}
|
|
53
56
|
|
|
54
|
-
- (
|
|
57
|
+
- (bool)resume
|
|
55
58
|
{
|
|
56
59
|
NSLog(@"[AudioPlayer] resume");
|
|
57
60
|
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
58
61
|
assert(audioEngine != nil);
|
|
59
62
|
[audioEngine startEngine];
|
|
63
|
+
|
|
64
|
+
return [audioEngine startEngine];
|
|
60
65
|
}
|
|
61
66
|
|
|
62
67
|
- (void)suspend
|
|
@@ -101,6 +101,7 @@
|
|
|
101
101
|
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
102
102
|
assert(audioEngine != nil);
|
|
103
103
|
[audioEngine attachInputNode:self.sinkNode];
|
|
104
|
+
[audioEngine startIfNecessary];
|
|
104
105
|
}
|
|
105
106
|
|
|
106
107
|
- (void)stop
|
|
@@ -108,6 +109,7 @@
|
|
|
108
109
|
AudioEngine *audioEngine = [AudioEngine sharedInstance];
|
|
109
110
|
assert(audioEngine != nil);
|
|
110
111
|
[audioEngine detachInputNode];
|
|
112
|
+
[audioEngine stopIfNecessary];
|
|
111
113
|
}
|
|
112
114
|
|
|
113
115
|
- (void)cleanup
|
|
@@ -7,6 +7,8 @@
|
|
|
7
7
|
|
|
8
8
|
@interface AudioEngine : NSObject
|
|
9
9
|
|
|
10
|
+
@property (nonatomic, assign) bool isInterrupted;
|
|
11
|
+
@property (nonatomic, assign) bool isSupposedToBeRunning;
|
|
10
12
|
@property (nonatomic, strong) AVAudioEngine *audioEngine;
|
|
11
13
|
@property (nonatomic, strong) NSMutableDictionary *sourceNodes;
|
|
12
14
|
@property (nonatomic, strong) NSMutableDictionary *sourceFormats;
|
|
@@ -18,12 +20,15 @@
|
|
|
18
20
|
|
|
19
21
|
+ (instancetype)sharedInstance;
|
|
20
22
|
- (void)cleanup;
|
|
21
|
-
- (bool)
|
|
23
|
+
- (bool)rebuildAudioEngineAndStartIfNecessary;
|
|
22
24
|
- (bool)restartAudioEngine;
|
|
23
|
-
- (
|
|
25
|
+
- (bool)startEngine;
|
|
24
26
|
- (void)stopEngine;
|
|
25
27
|
- (void)pauseEngine:(NSString *)sourceNodeId;
|
|
26
28
|
- (bool)isRunning;
|
|
29
|
+
- (void)markAsInterrupted;
|
|
30
|
+
- (void)unmarkAsInterrupted;
|
|
31
|
+
- (bool)isSupposedToRun;
|
|
27
32
|
|
|
28
33
|
- (NSString *)attachSourceNode:(AVAudioSourceNode *)sourceNode format:(AVAudioFormat *)format;
|
|
29
34
|
- (void)detachSourceNodeWithId:(NSString *)sourceNodeId;
|
|
@@ -31,4 +36,10 @@
|
|
|
31
36
|
- (void)attachInputNode:(AVAudioSinkNode *)inputNode;
|
|
32
37
|
- (void)detachInputNode;
|
|
33
38
|
|
|
39
|
+
- (void)logAudioEngineState;
|
|
40
|
+
|
|
41
|
+
- (bool)startIfNecessary;
|
|
42
|
+
- (void)stopIfNecessary;
|
|
43
|
+
- (void)pauseIfNecessary;
|
|
44
|
+
|
|
34
45
|
@end
|