react-native-audio-api 0.11.1 → 0.11.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/cpp/audioapi/android/core/AndroidAudioRecorder.cpp +8 -5
- package/android/src/main/cpp/audioapi/android/core/AndroidAudioRecorder.h +1 -1
- package/android/src/main/cpp/audioapi/android/core/utils/AndroidFileWriterBackend.h +1 -1
- package/android/src/main/cpp/audioapi/android/core/utils/FileOptions.cpp +7 -3
- package/android/src/main/cpp/audioapi/android/core/utils/FileOptions.h +1 -1
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/FFmpegFileWriter.cpp +4 -2
- package/android/src/main/cpp/audioapi/android/core/utils/ffmpegBackend/FFmpegFileWriter.h +1 -1
- package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.cpp +5 -4
- package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.h +2 -2
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIModule.kt +9 -0
- package/android/src/main/java/com/swmansion/audioapi/system/MediaSessionManager.kt +2 -0
- package/android/src/oldarch/NativeAudioAPIModuleSpec.java +4 -0
- package/common/cpp/audioapi/HostObjects/inputs/AudioRecorderHostObject.cpp +3 -1
- package/common/cpp/audioapi/core/inputs/AudioRecorder.h +1 -1
- package/common/cpp/audioapi/core/sources/StreamerNode.h +6 -0
- package/common/cpp/audioapi/jsi/JsiUtils.cpp +21 -0
- package/common/cpp/audioapi/jsi/JsiUtils.h +17 -0
- package/ios/audioapi/ios/AudioAPIModule.mm +9 -0
- package/ios/audioapi/ios/core/IOSAudioRecorder.h +1 -1
- package/ios/audioapi/ios/core/IOSAudioRecorder.mm +5 -4
- package/ios/audioapi/ios/core/utils/FileOptions.h +3 -1
- package/ios/audioapi/ios/core/utils/FileOptions.mm +9 -3
- package/ios/audioapi/ios/core/utils/IOSFileWriter.h +2 -1
- package/ios/audioapi/ios/core/utils/IOSFileWriter.mm +5 -2
- package/ios/audioapi/ios/system/AudioEngine.mm +3 -1
- package/ios/audioapi/ios/system/AudioSessionManager.h +3 -0
- package/ios/audioapi/ios/system/AudioSessionManager.mm +38 -2
- package/lib/commonjs/api.js +8 -8
- package/lib/commonjs/api.js.map +1 -1
- package/lib/commonjs/core/AudioDecoder.js +2 -2
- package/lib/commonjs/core/AudioDecoder.js.map +1 -1
- package/lib/commonjs/core/AudioRecorder.js +4 -4
- package/lib/commonjs/core/AudioRecorder.js.map +1 -1
- package/lib/commonjs/hooks/index.js +21 -0
- package/lib/commonjs/hooks/index.js.map +1 -0
- package/lib/commonjs/hooks/useAudioInput.js +70 -0
- package/lib/commonjs/hooks/useAudioInput.js.map +1 -0
- package/lib/commonjs/hooks/useSystemVolume.js +4 -5
- package/lib/commonjs/hooks/useSystemVolume.js.map +1 -1
- package/lib/commonjs/index.js +11 -0
- package/lib/commonjs/index.js.map +1 -1
- package/lib/commonjs/specs/NativeAudioAPIModule.js.map +1 -1
- package/lib/commonjs/specs/NativeAudioAPIModule.web.js +1 -0
- package/lib/commonjs/specs/NativeAudioAPIModule.web.js.map +1 -1
- package/lib/commonjs/system/AudioManager.js +3 -0
- package/lib/commonjs/system/AudioManager.js.map +1 -1
- package/lib/commonjs/web-system/AudioManager.js +1 -0
- package/lib/commonjs/web-system/AudioManager.js.map +1 -1
- package/lib/module/api.js +2 -3
- package/lib/module/api.js.map +1 -1
- package/lib/module/core/AudioDecoder.js +2 -2
- package/lib/module/core/AudioDecoder.js.map +1 -1
- package/lib/module/core/AudioRecorder.js +4 -5
- package/lib/module/core/AudioRecorder.js.map +1 -1
- package/lib/module/hooks/index.js +5 -0
- package/lib/module/hooks/index.js.map +1 -0
- package/lib/module/hooks/useAudioInput.js +66 -0
- package/lib/module/hooks/useAudioInput.js.map +1 -0
- package/lib/module/hooks/useSystemVolume.js +1 -1
- package/lib/module/hooks/useSystemVolume.js.map +1 -1
- package/lib/module/index.js +5 -0
- package/lib/module/index.js.map +1 -1
- package/lib/module/specs/NativeAudioAPIModule.js.map +1 -1
- package/lib/module/specs/NativeAudioAPIModule.web.js +1 -0
- package/lib/module/specs/NativeAudioAPIModule.web.js.map +1 -1
- package/lib/module/system/AudioManager.js +3 -0
- package/lib/module/system/AudioManager.js.map +1 -1
- package/lib/module/web-system/AudioManager.js +1 -0
- package/lib/module/web-system/AudioManager.js.map +1 -1
- package/lib/typescript/api.d.ts +2 -3
- package/lib/typescript/api.d.ts.map +1 -1
- package/lib/typescript/core/AudioDecoder.d.ts.map +1 -1
- package/lib/typescript/core/AudioRecorder.d.ts +2 -2
- package/lib/typescript/core/AudioRecorder.d.ts.map +1 -1
- package/lib/typescript/events/types.d.ts +6 -4
- package/lib/typescript/events/types.d.ts.map +1 -1
- package/lib/typescript/hooks/index.d.ts +3 -0
- package/lib/typescript/hooks/index.d.ts.map +1 -0
- package/lib/typescript/hooks/useAudioInput.d.ts +28 -0
- package/lib/typescript/hooks/useAudioInput.d.ts.map +1 -0
- package/lib/typescript/hooks/useSystemVolume.d.ts.map +1 -1
- package/lib/typescript/index.d.ts +1 -0
- package/lib/typescript/index.d.ts.map +1 -1
- package/lib/typescript/interfaces.d.ts +1 -1
- package/lib/typescript/interfaces.d.ts.map +1 -1
- package/lib/typescript/specs/NativeAudioAPIModule.d.ts +2 -1
- package/lib/typescript/specs/NativeAudioAPIModule.d.ts.map +1 -1
- package/lib/typescript/specs/NativeAudioAPIModule.web.d.ts +1 -0
- package/lib/typescript/specs/NativeAudioAPIModule.web.d.ts.map +1 -1
- package/lib/typescript/system/AudioManager.d.ts +1 -0
- package/lib/typescript/system/AudioManager.d.ts.map +1 -1
- package/lib/typescript/system/types.d.ts +3 -1
- package/lib/typescript/system/types.d.ts.map +1 -1
- package/lib/typescript/types.d.ts +3 -0
- package/lib/typescript/types.d.ts.map +1 -1
- package/lib/typescript/web-system/AudioManager.d.ts +1 -0
- package/lib/typescript/web-system/AudioManager.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/api.ts +5 -4
- package/src/core/AudioDecoder.ts +2 -2
- package/src/core/AudioRecorder.ts +5 -4
- package/src/events/types.ts +16 -12
- package/src/hooks/index.ts +2 -0
- package/src/hooks/useAudioInput.ts +93 -0
- package/src/hooks/useSystemVolume.ts +3 -2
- package/src/index.ts +5 -0
- package/src/interfaces.ts +1 -1
- package/src/specs/NativeAudioAPIModule.ts +2 -1
- package/src/specs/NativeAudioAPIModule.web.ts +2 -0
- package/src/system/AudioManager.ts +4 -0
- package/src/system/types.ts +3 -1
- package/src/types.ts +4 -0
- package/src/web-system/AudioManager.ts +1 -0
|
@@ -102,7 +102,7 @@ Result<NoneType, std::string> AndroidAudioRecorder::openAudioStream() {
|
|
|
102
102
|
/// RN side requires their "file://" prefix, but sometimes it returned raw path.
|
|
103
103
|
/// Most likely this was due to alpha version mistakes, but in case of problems leaving this here. (ㆆ _ ㆆ)
|
|
104
104
|
/// @returns On success, returns the file URI where the recording is being saved (if file output is enabled).
|
|
105
|
-
Result<std::string, std::string> AndroidAudioRecorder::start() {
|
|
105
|
+
Result<std::string, std::string> AndroidAudioRecorder::start(const std::string &fileNameOverride) {
|
|
106
106
|
std::scoped_lock startLock(callbackMutex_, fileWriterMutex_, adapterNodeMutex_);
|
|
107
107
|
|
|
108
108
|
if (!isIdle()) {
|
|
@@ -120,9 +120,12 @@ Result<std::string, std::string> AndroidAudioRecorder::start() {
|
|
|
120
120
|
}
|
|
121
121
|
|
|
122
122
|
if (usesFileOutput()) {
|
|
123
|
-
auto fileResult =
|
|
124
|
-
|
|
125
|
-
|
|
123
|
+
auto fileResult = std::static_pointer_cast<AndroidFileWriterBackend>(fileWriter_)
|
|
124
|
+
->openFile(
|
|
125
|
+
streamSampleRate_,
|
|
126
|
+
streamChannelCount_,
|
|
127
|
+
streamMaxBufferSizeInFrames_,
|
|
128
|
+
fileNameOverride);
|
|
126
129
|
|
|
127
130
|
if (!fileResult.is_ok()) {
|
|
128
131
|
return Result<std::string, std::string>::Err(
|
|
@@ -228,7 +231,7 @@ Result<std::string, std::string> AndroidAudioRecorder::enableFileOutput(
|
|
|
228
231
|
if (!isIdle()) {
|
|
229
232
|
auto fileResult =
|
|
230
233
|
std::static_pointer_cast<AndroidFileWriterBackend>(fileWriter_)
|
|
231
|
-
->openFile(streamSampleRate_, streamChannelCount_, streamMaxBufferSizeInFrames_);
|
|
234
|
+
->openFile(streamSampleRate_, streamChannelCount_, streamMaxBufferSizeInFrames_, "");
|
|
232
235
|
|
|
233
236
|
if (!fileResult.is_ok()) {
|
|
234
237
|
return Result<std::string, std::string>::Err(
|
|
@@ -26,7 +26,7 @@ class AndroidAudioRecorder : public oboe::AudioStreamCallback, public AudioRecor
|
|
|
26
26
|
~AndroidAudioRecorder() override;
|
|
27
27
|
void cleanup();
|
|
28
28
|
|
|
29
|
-
Result<std::string, std::string> start() override;
|
|
29
|
+
Result<std::string, std::string> start(const std::string &fileNameOverride) override;
|
|
30
30
|
Result<std::tuple<std::string, double, double>, std::string> stop() override;
|
|
31
31
|
|
|
32
32
|
Result<std::string, std::string> enableFileOutput(std::shared_ptr<AudioFileProperties> properties) override;
|
|
@@ -17,7 +17,7 @@ class AndroidFileWriterBackend : public AudioFileWriter {
|
|
|
17
17
|
const std::shared_ptr<AudioFileProperties> &fileProperties)
|
|
18
18
|
: AudioFileWriter(audioEventHandlerRegistry, fileProperties) {}
|
|
19
19
|
|
|
20
|
-
virtual OpenFileResult openFile(float streamSampleRate, int32_t streamChannelCount, int32_t streamMaxBufferSize) = 0;
|
|
20
|
+
virtual OpenFileResult openFile(float streamSampleRate, int32_t streamChannelCount, int32_t streamMaxBufferSize, const std::string &fileNameOverride) = 0;
|
|
21
21
|
virtual bool writeAudioData(void *data, int numFrames) = 0;
|
|
22
22
|
|
|
23
23
|
std::string getFilePath() const override { return filePath_; }
|
|
@@ -63,7 +63,8 @@ std::string getFileExtension(const std::shared_ptr<AudioFileProperties> &propert
|
|
|
63
63
|
}
|
|
64
64
|
|
|
65
65
|
Result<std::string, std::string> getFilePath(
|
|
66
|
-
const std::shared_ptr<AudioFileProperties> &properties
|
|
66
|
+
const std::shared_ptr<AudioFileProperties> &properties,
|
|
67
|
+
const std::string &fileNameOverride) {
|
|
67
68
|
std::string directory = getDirectory(properties);
|
|
68
69
|
std::string subDirectory = std::format("{}/{}", directory, properties->subDirectory);
|
|
69
70
|
std::string fileTimestamp = getTimestampString();
|
|
@@ -75,8 +76,11 @@ Result<std::string, std::string> getFilePath(
|
|
|
75
76
|
return Result<std::string, std::string>::Err(result.unwrap_err());
|
|
76
77
|
}
|
|
77
78
|
|
|
78
|
-
auto filePath =
|
|
79
|
-
"{}/{}
|
|
79
|
+
auto filePath = fileNameOverride.length() > 0
|
|
80
|
+
? std::format("{}/{}.{}", subDirectory, fileNameOverride, extension)
|
|
81
|
+
: std::format(
|
|
82
|
+
"{}/{}_{}.{}", subDirectory, properties->fileNamePrefix, fileTimestamp, extension);
|
|
83
|
+
|
|
80
84
|
return Result<std::string, std::string>::Ok(filePath);
|
|
81
85
|
}
|
|
82
86
|
|
|
@@ -15,7 +15,7 @@ std::string getTimestampString();
|
|
|
15
15
|
|
|
16
16
|
std::string getDirectory(const std::shared_ptr<AudioFileProperties> &properties);
|
|
17
17
|
std::string getFileExtension(const std::shared_ptr<AudioFileProperties> &properties);
|
|
18
|
-
Result<std::string, std::string> getFilePath(const std::shared_ptr<AudioFileProperties> &properties);
|
|
18
|
+
Result<std::string, std::string> getFilePath(const std::shared_ptr<AudioFileProperties> &properties, const std::string &fileNameOverride);
|
|
19
19
|
|
|
20
20
|
} // namespace android::fileoptions
|
|
21
21
|
|
|
@@ -53,14 +53,16 @@ FFmpegAudioFileWriter::~FFmpegAudioFileWriter() {
|
|
|
53
53
|
OpenFileResult FFmpegAudioFileWriter::openFile(
|
|
54
54
|
float streamSampleRate,
|
|
55
55
|
int32_t streamChannelCount,
|
|
56
|
-
int32_t streamMaxBufferSize
|
|
56
|
+
int32_t streamMaxBufferSize,
|
|
57
|
+
const std::string &fileNameOverride) {
|
|
57
58
|
streamSampleRate_ = streamSampleRate;
|
|
58
59
|
streamChannelCount_ = streamChannelCount;
|
|
59
60
|
streamMaxBufferSize_ = streamMaxBufferSize;
|
|
60
61
|
framesWritten_.store(0, std::memory_order_release);
|
|
61
62
|
nextPts_ = 0;
|
|
62
63
|
Result<NoneType, std::string> result = Result<NoneType, std::string>::Ok(None);
|
|
63
|
-
Result<std::string, std::string> filePathResult =
|
|
64
|
+
Result<std::string, std::string> filePathResult =
|
|
65
|
+
fileoptions::getFilePath(fileProperties_, fileNameOverride);
|
|
64
66
|
|
|
65
67
|
if (!filePathResult.is_ok()) {
|
|
66
68
|
return OpenFileResult::Err(filePathResult.unwrap_err());
|
|
@@ -29,7 +29,7 @@ class FFmpegAudioFileWriter : public AndroidFileWriterBackend {
|
|
|
29
29
|
const std::shared_ptr<AudioFileProperties> &fileProperties);
|
|
30
30
|
~FFmpegAudioFileWriter();
|
|
31
31
|
|
|
32
|
-
OpenFileResult openFile(float streamSampleRate, int32_t streamChannelCount, int32_t streamMaxBufferSize) override;
|
|
32
|
+
OpenFileResult openFile(float streamSampleRate, int32_t streamChannelCount, int32_t streamMaxBufferSize, const std::string &fileNameOverride) override;
|
|
33
33
|
CloseFileResult closeFile() override;
|
|
34
34
|
|
|
35
35
|
bool writeAudioData(void *data, int numFrames) override;
|
package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.cpp
CHANGED
|
@@ -75,7 +75,8 @@ MiniAudioFileWriter::~MiniAudioFileWriter() {
|
|
|
75
75
|
OpenFileResult MiniAudioFileWriter::openFile(
|
|
76
76
|
float streamSampleRate,
|
|
77
77
|
int32_t streamChannelCount,
|
|
78
|
-
int32_t streamMaxBufferSize
|
|
78
|
+
int32_t streamMaxBufferSize,
|
|
79
|
+
const std::string &fileNameOverride) {
|
|
79
80
|
streamSampleRate_ = streamSampleRate;
|
|
80
81
|
streamChannelCount_ = streamChannelCount;
|
|
81
82
|
streamMaxBufferSize_ = streamMaxBufferSize;
|
|
@@ -95,7 +96,7 @@ OpenFileResult MiniAudioFileWriter::openFile(
|
|
|
95
96
|
"Failed to initialize converter" + std::string(ma_result_description(result)));
|
|
96
97
|
}
|
|
97
98
|
|
|
98
|
-
result = initializeEncoder();
|
|
99
|
+
result = initializeEncoder(fileNameOverride);
|
|
99
100
|
|
|
100
101
|
if (result != MA_SUCCESS) {
|
|
101
102
|
return OpenFileResult ::Err(
|
|
@@ -266,10 +267,10 @@ ma_result MiniAudioFileWriter::initializeConverterIfNeeded() {
|
|
|
266
267
|
/// This method sets up the audio encoder for writing to the file,
|
|
267
268
|
/// it should be called only on the JS thread. (during file opening)
|
|
268
269
|
/// @return MA_SUCCESS if initialization was successful, otherwise an error code.
|
|
269
|
-
ma_result MiniAudioFileWriter::initializeEncoder() {
|
|
270
|
+
ma_result MiniAudioFileWriter::initializeEncoder(const std::string &fileNameOverride) {
|
|
270
271
|
ma_result result;
|
|
271
272
|
Result<std::string, std::string> filePathResult =
|
|
272
|
-
android::fileoptions::getFilePath(fileProperties_);
|
|
273
|
+
android::fileoptions::getFilePath(fileProperties_, fileNameOverride);
|
|
273
274
|
|
|
274
275
|
if (!filePathResult.is_ok()) {
|
|
275
276
|
return MA_ERROR;
|
package/android/src/main/cpp/audioapi/android/core/utils/miniaudioBackend/MiniAudioFileWriter.h
CHANGED
|
@@ -17,7 +17,7 @@ class MiniAudioFileWriter : public AndroidFileWriterBackend {
|
|
|
17
17
|
const std::shared_ptr<AudioFileProperties> &fileProperties);
|
|
18
18
|
~MiniAudioFileWriter();
|
|
19
19
|
|
|
20
|
-
OpenFileResult openFile(float streamSampleRate, int32_t streamChannelCount, int32_t streamMaxBufferSize) override;
|
|
20
|
+
OpenFileResult openFile(float streamSampleRate, int32_t streamChannelCount, int32_t streamMaxBufferSize, const std::string &fileNameOverride) override;
|
|
21
21
|
CloseFileResult closeFile() override;
|
|
22
22
|
|
|
23
23
|
bool writeAudioData(void *data, int numFrames) override;
|
|
@@ -31,7 +31,7 @@ class MiniAudioFileWriter : public AndroidFileWriterBackend {
|
|
|
31
31
|
ma_uint64 processingBufferLength_{0};
|
|
32
32
|
|
|
33
33
|
ma_result initializeConverterIfNeeded();
|
|
34
|
-
ma_result initializeEncoder();
|
|
34
|
+
ma_result initializeEncoder(const std::string &fileNameOverride);
|
|
35
35
|
ma_uint64 convertBuffer(void *data, int numFrames);
|
|
36
36
|
|
|
37
37
|
bool isConverterRequired();
|
|
@@ -149,6 +149,15 @@ class AudioAPIModule(
|
|
|
149
149
|
promise.resolve(MediaSessionManager.getDevicesInfo())
|
|
150
150
|
}
|
|
151
151
|
|
|
152
|
+
override fun setInputDevice(
|
|
153
|
+
deviceId: String?,
|
|
154
|
+
promise: Promise?,
|
|
155
|
+
) {
|
|
156
|
+
// TODO: noop for now, but it should be moved to upcoming
|
|
157
|
+
// audio engine implementation for android (duplex stream)
|
|
158
|
+
promise?.resolve(true)
|
|
159
|
+
}
|
|
160
|
+
|
|
152
161
|
// Notification system methods
|
|
153
162
|
@RequiresPermission(android.Manifest.permission.POST_NOTIFICATIONS)
|
|
154
163
|
override fun showNotification(
|
|
@@ -209,6 +209,7 @@ object MediaSessionManager {
|
|
|
209
209
|
|
|
210
210
|
for (inputDevice in this.audioManager.getDevices(AudioManager.GET_DEVICES_INPUTS)) {
|
|
211
211
|
val deviceInfo = Arguments.createMap()
|
|
212
|
+
deviceInfo.putString("id", inputDevice.getId().toString())
|
|
212
213
|
deviceInfo.putString("name", inputDevice.productName.toString())
|
|
213
214
|
deviceInfo.putString("type", parseDeviceType(inputDevice))
|
|
214
215
|
|
|
@@ -217,6 +218,7 @@ object MediaSessionManager {
|
|
|
217
218
|
|
|
218
219
|
for (outputDevice in this.audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS)) {
|
|
219
220
|
val deviceInfo = Arguments.createMap()
|
|
221
|
+
deviceInfo.putString("id", outputDevice.getId().toString())
|
|
220
222
|
deviceInfo.putString("name", outputDevice.productName.toString())
|
|
221
223
|
deviceInfo.putString("type", parseDeviceType(outputDevice))
|
|
222
224
|
|
|
@@ -86,6 +86,10 @@ public abstract class NativeAudioAPIModuleSpec extends ReactContextBaseJavaModul
|
|
|
86
86
|
@DoNotStrip
|
|
87
87
|
public abstract void getDevicesInfo(Promise promise);
|
|
88
88
|
|
|
89
|
+
@ReactMethod
|
|
90
|
+
@DoNotStrip
|
|
91
|
+
public abstract void setInputDevice(String deviceId, Promise promise);
|
|
92
|
+
|
|
89
93
|
@ReactMethod
|
|
90
94
|
@DoNotStrip
|
|
91
95
|
public abstract void showNotification(String type, String key, ReadableMap options, Promise promise);
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
#include <audioapi/core/sources/AudioBuffer.h>
|
|
7
7
|
#include <audioapi/events/AudioEventHandlerRegistry.h>
|
|
8
8
|
#include <audioapi/utils/AudioFileProperties.h>
|
|
9
|
+
#include <audioapi/jsi/JsiUtils.h>
|
|
9
10
|
#ifdef ANDROID
|
|
10
11
|
#include <audioapi/android/core/AndroidAudioRecorder.h>
|
|
11
12
|
#else
|
|
@@ -41,7 +42,8 @@ AudioRecorderHostObject::AudioRecorderHostObject(
|
|
|
41
42
|
}
|
|
42
43
|
|
|
43
44
|
JSI_HOST_FUNCTION_IMPL(AudioRecorderHostObject, start) {
|
|
44
|
-
auto
|
|
45
|
+
auto fileNameOverride = jsiutils::argToString(runtime, args, count, 0, "");
|
|
46
|
+
auto result = audioRecorder_->start(fileNameOverride);
|
|
45
47
|
auto jsResult = jsi::Object(runtime);
|
|
46
48
|
|
|
47
49
|
jsResult.setProperty(
|
|
@@ -25,7 +25,7 @@ class AudioRecorder {
|
|
|
25
25
|
: audioEventHandlerRegistry_(audioEventHandlerRegistry) {}
|
|
26
26
|
virtual ~AudioRecorder() = default;
|
|
27
27
|
|
|
28
|
-
virtual Result<std::string, std::string> start() = 0;
|
|
28
|
+
virtual Result<std::string, std::string> start(const std::string &fileNameOverride) = 0;
|
|
29
29
|
virtual Result<std::tuple<std::string, double, double>, std::string> stop() = 0;
|
|
30
30
|
|
|
31
31
|
virtual Result<std::string, std::string> enableFileOutput(
|
|
@@ -70,12 +70,18 @@ class StreamerNode : public AudioScheduledSourceNode {
|
|
|
70
70
|
*/
|
|
71
71
|
bool initialize(const std::string &inputUrl);
|
|
72
72
|
|
|
73
|
+
std::string getStreamPath() const {
|
|
74
|
+
return streamPath_;
|
|
75
|
+
}
|
|
76
|
+
|
|
73
77
|
protected:
|
|
74
78
|
std::shared_ptr<AudioBus> processNode(
|
|
75
79
|
const std::shared_ptr<AudioBus> &processingBus,
|
|
76
80
|
int framesToProcess) override;
|
|
77
81
|
|
|
78
82
|
private:
|
|
83
|
+
std::string streamPath_;
|
|
84
|
+
|
|
79
85
|
#if !RN_AUDIO_API_FFMPEG_DISABLED
|
|
80
86
|
AVFormatContext *fmtCtx_;
|
|
81
87
|
AVCodecContext *codecCtx_;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
#include <audioapi/jsi/JsiUtils.h>
|
|
2
|
+
#include <string>
|
|
3
|
+
|
|
4
|
+
namespace audioapi::jsiutils {
|
|
5
|
+
|
|
6
|
+
using namespace facebook;
|
|
7
|
+
|
|
8
|
+
std::string argToString(
|
|
9
|
+
jsi::Runtime &runtime,
|
|
10
|
+
const jsi::Value *args,
|
|
11
|
+
size_t count,
|
|
12
|
+
size_t index,
|
|
13
|
+
const std::string &defaultValue) {
|
|
14
|
+
if (index < count && args[index].isString()) {
|
|
15
|
+
return args[index].asString(runtime).utf8(runtime);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return defaultValue;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
} // namespace audioapi::jsiutils
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
#pragma once
|
|
2
|
+
|
|
3
|
+
#include <jsi/jsi.h>
|
|
4
|
+
#include <string>
|
|
5
|
+
|
|
6
|
+
namespace audioapi::jsiutils {
|
|
7
|
+
|
|
8
|
+
using namespace facebook;
|
|
9
|
+
|
|
10
|
+
std::string argToString(
|
|
11
|
+
jsi::Runtime &runtime,
|
|
12
|
+
const jsi::Value *args,
|
|
13
|
+
size_t count,
|
|
14
|
+
size_t index,
|
|
15
|
+
const std::string &defaultValue = "");
|
|
16
|
+
|
|
17
|
+
} // namespace audioapi::jsiutils
|
|
@@ -224,6 +224,15 @@ RCT_EXPORT_METHOD(
|
|
|
224
224
|
});
|
|
225
225
|
}
|
|
226
226
|
|
|
227
|
+
RCT_EXPORT_METHOD(
|
|
228
|
+
setInputDevice : (NSString *)deviceId resolve : (RCTPromiseResolveBlock)
|
|
229
|
+
resolve reject : (RCTPromiseRejectBlock)reject)
|
|
230
|
+
{
|
|
231
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
|
232
|
+
[self.audioSessionManager setInputDevice:deviceId resolve:resolve reject:reject];
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
|
|
227
236
|
RCT_EXPORT_METHOD(disableSessionManagement)
|
|
228
237
|
{
|
|
229
238
|
[self.audioSessionManager disableSessionManagement];
|
|
@@ -27,7 +27,7 @@ class IOSAudioRecorder : public AudioRecorder {
|
|
|
27
27
|
IOSAudioRecorder(const std::shared_ptr<AudioEventHandlerRegistry> &audioEventHandlerRegistry);
|
|
28
28
|
~IOSAudioRecorder() override;
|
|
29
29
|
|
|
30
|
-
Result<std::string, std::string> start() override;
|
|
30
|
+
Result<std::string, std::string> start(const std::string &fileNameOverride = "") override;
|
|
31
31
|
Result<std::tuple<std::string, double, double>, std::string> stop() override;
|
|
32
32
|
|
|
33
33
|
Result<std::string, std::string> enableFileOutput(
|
|
@@ -71,7 +71,7 @@ IOSAudioRecorder::~IOSAudioRecorder()
|
|
|
71
71
|
/// @brief Starts the audio recording process and prepares necessary resources.
|
|
72
72
|
/// This method should be called from the JS thread only.
|
|
73
73
|
/// @returns Result containing the file path if recording started successfully, or an error message.
|
|
74
|
-
Result<std::string, std::string> IOSAudioRecorder::start()
|
|
74
|
+
Result<std::string, std::string> IOSAudioRecorder::start(const std::string &fileNameOverride)
|
|
75
75
|
{
|
|
76
76
|
if (!isIdle()) {
|
|
77
77
|
return Result<std::string, std::string>::Err("Recorder is already recording");
|
|
@@ -104,7 +104,7 @@ Result<std::string, std::string> IOSAudioRecorder::start()
|
|
|
104
104
|
|
|
105
105
|
if (usesFileOutput()) {
|
|
106
106
|
auto fileResult = std::static_pointer_cast<IOSFileWriter>(fileWriter_)
|
|
107
|
-
->openFile(inputFormat, maxInputBufferLength);
|
|
107
|
+
->openFile(inputFormat, maxInputBufferLength, fileNameOverride);
|
|
108
108
|
|
|
109
109
|
if (fileResult.is_err()) {
|
|
110
110
|
return Result<std::string, std::string>::Err(
|
|
@@ -191,8 +191,9 @@ Result<std::string, std::string> IOSAudioRecorder::enableFileOutput(
|
|
|
191
191
|
fileWriter_ = std::make_shared<IOSFileWriter>(audioEventHandlerRegistry_, properties);
|
|
192
192
|
|
|
193
193
|
if (!isIdle()) {
|
|
194
|
-
auto result =
|
|
195
|
-
|
|
194
|
+
auto result =
|
|
195
|
+
std::static_pointer_cast<IOSFileWriter>(fileWriter_)
|
|
196
|
+
->openFile([nativeRecorder_ getInputFormat], [nativeRecorder_ getBufferSize], "");
|
|
196
197
|
|
|
197
198
|
if (result.is_err()) {
|
|
198
199
|
return Result<std::string, std::string>::Err(
|
|
@@ -22,7 +22,9 @@ NSInteger getBitDepth(const std::shared_ptr<AudioFileProperties> &properties);
|
|
|
22
22
|
float getSampleRate(const std::shared_ptr<AudioFileProperties> &properties);
|
|
23
23
|
|
|
24
24
|
NSDictionary *getFileSettings(const std::shared_ptr<AudioFileProperties> &properties);
|
|
25
|
-
NSURL *getFileURL(
|
|
25
|
+
NSURL *getFileURL(
|
|
26
|
+
const std::shared_ptr<AudioFileProperties> &properties,
|
|
27
|
+
const std::string &fileNameOverride);
|
|
26
28
|
NSSearchPathDirectory getDirectory(const std::shared_ptr<AudioFileProperties> &properties);
|
|
27
29
|
|
|
28
30
|
NSString *getDateString();
|
|
@@ -139,7 +139,9 @@ NSDictionary *getFileSettings(const std::shared_ptr<AudioFileProperties> &proper
|
|
|
139
139
|
return settings;
|
|
140
140
|
}
|
|
141
141
|
|
|
142
|
-
NSURL *getFileURL(
|
|
142
|
+
NSURL *getFileURL(
|
|
143
|
+
const std::shared_ptr<AudioFileProperties> &properties,
|
|
144
|
+
const std::string &fileNameOverride)
|
|
143
145
|
{
|
|
144
146
|
NSError *error = nil;
|
|
145
147
|
|
|
@@ -164,8 +166,12 @@ NSURL *getFileURL(const std::shared_ptr<AudioFileProperties> &properties)
|
|
|
164
166
|
NSString *timestamp = getTimestampString();
|
|
165
167
|
NSString *fileExtension = getFileExtension(properties);
|
|
166
168
|
|
|
167
|
-
NSString *fileName =
|
|
168
|
-
[NSString stringWithFormat:@"
|
|
169
|
+
NSString *fileName = fileNameOverride.length() > 0
|
|
170
|
+
? [NSString stringWithFormat:@"%@.%@",
|
|
171
|
+
[NSString stringWithUTF8String:fileNameOverride.c_str()],
|
|
172
|
+
fileExtension]
|
|
173
|
+
: [NSString stringWithFormat:@"%@_%@.%@", fileNamePrefix, timestamp, fileExtension];
|
|
174
|
+
|
|
169
175
|
return [directoryURL URLByAppendingPathComponent:fileName];
|
|
170
176
|
}
|
|
171
177
|
|
|
@@ -29,7 +29,8 @@ class IOSFileWriter : public AudioFileWriter {
|
|
|
29
29
|
|
|
30
30
|
Result<std::string, std::string> openFile(
|
|
31
31
|
AVAudioFormat *bufferFormat,
|
|
32
|
-
size_t maxInputBufferLength
|
|
32
|
+
size_t maxInputBufferLength,
|
|
33
|
+
const std::string &fileNameOverride);
|
|
33
34
|
Result<std::tuple<double, double>, std::string> closeFile() override;
|
|
34
35
|
|
|
35
36
|
bool writeAudioData(const AudioBufferList *audioBufferList, int numFrames);
|
|
@@ -32,7 +32,10 @@ IOSFileWriter::~IOSFileWriter()
|
|
|
32
32
|
/// @param bufferFormat The audio format of the input buffer.
|
|
33
33
|
/// @param maxInputBufferLength The maximum length of the input buffer in frames.
|
|
34
34
|
/// @returns An OpenFileResult indicating success with the file path or an error message.
|
|
35
|
-
OpenFileResult IOSFileWriter::openFile(
|
|
35
|
+
OpenFileResult IOSFileWriter::openFile(
|
|
36
|
+
AVAudioFormat *bufferFormat,
|
|
37
|
+
size_t maxInputBufferLength,
|
|
38
|
+
const std::string &fileNameOverride)
|
|
36
39
|
{
|
|
37
40
|
@autoreleasepool {
|
|
38
41
|
if (audioFile_ != nil) {
|
|
@@ -44,7 +47,7 @@ OpenFileResult IOSFileWriter::openFile(AVAudioFormat *bufferFormat, size_t maxIn
|
|
|
44
47
|
|
|
45
48
|
NSError *error = nil;
|
|
46
49
|
NSDictionary *settings = ios::fileoptions::getFileSettings(fileProperties_);
|
|
47
|
-
fileURL_ = ios::fileoptions::getFileURL(fileProperties_);
|
|
50
|
+
fileURL_ = ios::fileoptions::getFileURL(fileProperties_, fileNameOverride);
|
|
48
51
|
|
|
49
52
|
if (fileProperties_->sampleRate == 0 || fileProperties_->channelCount == 0) {
|
|
50
53
|
return OpenFileResult::Err(
|
|
@@ -270,8 +270,10 @@ static AudioEngine *_sharedInstance = nil;
|
|
|
270
270
|
[self.audioEngine stop];
|
|
271
271
|
}
|
|
272
272
|
|
|
273
|
-
self.audioEngine = [[AVAudioEngine alloc] init];
|
|
274
273
|
[self rebuildAudioEngine];
|
|
274
|
+
if (self.state == AudioEngineState::AudioEngineStateRunning) {
|
|
275
|
+
[self startEngine];
|
|
276
|
+
}
|
|
275
277
|
}
|
|
276
278
|
|
|
277
279
|
- (void)logAudioEngineState
|
|
@@ -45,6 +45,9 @@
|
|
|
45
45
|
|
|
46
46
|
- (void)getDevicesInfo:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
|
|
47
47
|
- (NSArray<NSDictionary *> *)parseDeviceList:(NSArray<AVAudioSessionPortDescription *> *)devices;
|
|
48
|
+
- (void)setInputDevice:(NSString *)deviceId
|
|
49
|
+
resolve:(RCTPromiseResolveBlock)resolve
|
|
50
|
+
reject:(RCTPromiseRejectBlock)reject;
|
|
48
51
|
|
|
49
52
|
- (bool)isSessionActive;
|
|
50
53
|
|
|
@@ -291,12 +291,48 @@ static AudioSessionManager *_sharedInstance = nil;
|
|
|
291
291
|
[deviceList addObject:@{
|
|
292
292
|
@"name" : device.portName,
|
|
293
293
|
@"category" : device.portType,
|
|
294
|
+
@"id" : device.UID,
|
|
294
295
|
}];
|
|
295
296
|
}
|
|
296
297
|
|
|
297
298
|
return deviceList;
|
|
298
299
|
}
|
|
299
300
|
|
|
301
|
+
- (void)setInputDevice:(NSString *)deviceId
|
|
302
|
+
resolve:(RCTPromiseResolveBlock)resolve
|
|
303
|
+
reject:(RCTPromiseRejectBlock)reject
|
|
304
|
+
{
|
|
305
|
+
NSError *error = nil;
|
|
306
|
+
NSArray<AVAudioSessionPortDescription *> *availableInputs = [self.audioSession availableInputs];
|
|
307
|
+
|
|
308
|
+
AVAudioSessionPortDescription *selectedInput = nil;
|
|
309
|
+
|
|
310
|
+
for (AVAudioSessionPortDescription *input in availableInputs) {
|
|
311
|
+
if ([input.UID isEqualToString:deviceId]) {
|
|
312
|
+
selectedInput = input;
|
|
313
|
+
break;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
if (selectedInput == nil) {
|
|
318
|
+
reject(nil, [NSString stringWithFormat:@"Input device with id %@ not found", deviceId], nil);
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
[self.audioSession setPreferredInput:selectedInput error:&error];
|
|
323
|
+
|
|
324
|
+
if (error != nil) {
|
|
325
|
+
reject(
|
|
326
|
+
nil,
|
|
327
|
+
[NSString
|
|
328
|
+
stringWithFormat:@"Error while setting preferred input: %@", [error debugDescription]],
|
|
329
|
+
error);
|
|
330
|
+
return;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
resolve(@(true));
|
|
334
|
+
}
|
|
335
|
+
|
|
300
336
|
- (AVAudioSessionCategory)categoryFromString:(NSString *)categorySTR
|
|
301
337
|
{
|
|
302
338
|
AVAudioSessionCategory category = 0;
|
|
@@ -362,8 +398,8 @@ static AudioSessionManager *_sharedInstance = nil;
|
|
|
362
398
|
options |= AVAudioSessionCategoryOptionMixWithOthers;
|
|
363
399
|
}
|
|
364
400
|
|
|
365
|
-
if ([option isEqualToString:@"
|
|
366
|
-
options |=
|
|
401
|
+
if ([option isEqualToString:@"allowBluetoothHFP"]) {
|
|
402
|
+
options |= AVAudioSessionCategoryOptionAllowBluetoothHFP;
|
|
367
403
|
}
|
|
368
404
|
|
|
369
405
|
if ([option isEqualToString:@"defaultToSpeaker"]) {
|
package/lib/commonjs/api.js
CHANGED
|
@@ -25,6 +25,7 @@ var _exportNames = {
|
|
|
25
25
|
GainNode: true,
|
|
26
26
|
OfflineAudioContext: true,
|
|
27
27
|
OscillatorNode: true,
|
|
28
|
+
PeriodicWave: true,
|
|
28
29
|
RecorderAdapterNode: true,
|
|
29
30
|
StereoPannerNode: true,
|
|
30
31
|
StreamerNode: true,
|
|
@@ -32,7 +33,6 @@ var _exportNames = {
|
|
|
32
33
|
WorkletNode: true,
|
|
33
34
|
WorkletProcessingNode: true,
|
|
34
35
|
WorkletSourceNode: true,
|
|
35
|
-
useSystemVolume: true,
|
|
36
36
|
AudioManager: true,
|
|
37
37
|
FilePreset: true,
|
|
38
38
|
PlaybackNotificationManager: true,
|
|
@@ -168,6 +168,12 @@ Object.defineProperty(exports, "OscillatorNode", {
|
|
|
168
168
|
return _OscillatorNode.default;
|
|
169
169
|
}
|
|
170
170
|
});
|
|
171
|
+
Object.defineProperty(exports, "PeriodicWave", {
|
|
172
|
+
enumerable: true,
|
|
173
|
+
get: function () {
|
|
174
|
+
return _PeriodicWave.default;
|
|
175
|
+
}
|
|
176
|
+
});
|
|
171
177
|
Object.defineProperty(exports, "PlaybackControlName", {
|
|
172
178
|
enumerable: true,
|
|
173
179
|
get: function () {
|
|
@@ -258,12 +264,6 @@ Object.defineProperty(exports, "decodePCMInBase64", {
|
|
|
258
264
|
return _AudioDecoder.decodePCMInBase64;
|
|
259
265
|
}
|
|
260
266
|
});
|
|
261
|
-
Object.defineProperty(exports, "useSystemVolume", {
|
|
262
|
-
enumerable: true,
|
|
263
|
-
get: function () {
|
|
264
|
-
return _useSystemVolume.default;
|
|
265
|
-
}
|
|
266
|
-
});
|
|
267
267
|
require("./AudioAPIModule");
|
|
268
268
|
var _AnalyserNode = _interopRequireDefault(require("./core/AnalyserNode"));
|
|
269
269
|
var _AudioBuffer = _interopRequireDefault(require("./core/AudioBuffer"));
|
|
@@ -285,6 +285,7 @@ var _DelayNode = _interopRequireDefault(require("./core/DelayNode"));
|
|
|
285
285
|
var _GainNode = _interopRequireDefault(require("./core/GainNode"));
|
|
286
286
|
var _OfflineAudioContext = _interopRequireDefault(require("./core/OfflineAudioContext"));
|
|
287
287
|
var _OscillatorNode = _interopRequireDefault(require("./core/OscillatorNode"));
|
|
288
|
+
var _PeriodicWave = _interopRequireDefault(require("./core/PeriodicWave"));
|
|
288
289
|
var _RecorderAdapterNode = _interopRequireDefault(require("./core/RecorderAdapterNode"));
|
|
289
290
|
var _StereoPannerNode = _interopRequireDefault(require("./core/StereoPannerNode"));
|
|
290
291
|
var _StreamerNode = _interopRequireDefault(require("./core/StreamerNode"));
|
|
@@ -292,7 +293,6 @@ var _WaveShaperNode = _interopRequireDefault(require("./core/WaveShaperNode"));
|
|
|
292
293
|
var _WorkletNode = _interopRequireDefault(require("./core/WorkletNode"));
|
|
293
294
|
var _WorkletProcessingNode = _interopRequireDefault(require("./core/WorkletProcessingNode"));
|
|
294
295
|
var _WorkletSourceNode = _interopRequireDefault(require("./core/WorkletSourceNode"));
|
|
295
|
-
var _useSystemVolume = _interopRequireDefault(require("./hooks/useSystemVolume"));
|
|
296
296
|
var _system = _interopRequireDefault(require("./system"));
|
|
297
297
|
var _errors = require("./errors");
|
|
298
298
|
Object.keys(_errors).forEach(function (key) {
|
package/lib/commonjs/api.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["require","_AnalyserNode","_interopRequireDefault","_AudioBuffer","_AudioBufferQueueSourceNode","_AudioBufferSourceNode","_AudioContext","_AudioDecoder","_AudioDestinationNode","_AudioNode","_AudioParam","_AudioRecorder","_AudioScheduledSourceNode","_AudioStretcher","_BaseAudioContext","_BiquadFilterNode","_ConstantSourceNode","_ConvolverNode","_DelayNode","_GainNode","_OfflineAudioContext","_OscillatorNode","_RecorderAdapterNode","_StereoPannerNode","_StreamerNode","_WaveShaperNode","_WorkletNode","_WorkletProcessingNode","_WorkletSourceNode","
|
|
1
|
+
{"version":3,"names":["require","_AnalyserNode","_interopRequireDefault","_AudioBuffer","_AudioBufferQueueSourceNode","_AudioBufferSourceNode","_AudioContext","_AudioDecoder","_AudioDestinationNode","_AudioNode","_AudioParam","_AudioRecorder","_AudioScheduledSourceNode","_AudioStretcher","_BaseAudioContext","_BiquadFilterNode","_ConstantSourceNode","_ConvolverNode","_DelayNode","_GainNode","_OfflineAudioContext","_OscillatorNode","_PeriodicWave","_RecorderAdapterNode","_StereoPannerNode","_StreamerNode","_WaveShaperNode","_WorkletNode","_WorkletProcessingNode","_WorkletSourceNode","_system","_errors","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_types","_types2","_filePresets","_notification","e","__esModule","default"],"sourceRoot":"../../src","sources":["api.ts"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAAA,OAAA;AAEA,IAAAC,aAAA,GAAAC,sBAAA,CAAAF,OAAA;AACA,IAAAG,YAAA,GAAAD,sBAAA,CAAAF,OAAA;AACA,IAAAI,2BAAA,GAAAF,sBAAA,CAAAF,OAAA;AACA,IAAAK,sBAAA,GAAAH,sBAAA,CAAAF,OAAA;AACA,IAAAM,aAAA,GAAAJ,sBAAA,CAAAF,OAAA;AACA,IAAAO,aAAA,GAAAP,OAAA;AACA,IAAAQ,qBAAA,GAAAN,sBAAA,CAAAF,OAAA;AACA,IAAAS,UAAA,GAAAP,sBAAA,CAAAF,OAAA;AACA,IAAAU,WAAA,GAAAR,sBAAA,CAAAF,OAAA;AACA,IAAAW,cAAA,GAAAT,sBAAA,CAAAF,OAAA;AACA,IAAAY,yBAAA,GAAAV,sBAAA,CAAAF,OAAA;AACA,IAAAa,eAAA,GAAAX,sBAAA,CAAAF,OAAA;AACA,IAAAc,iBAAA,GAAAZ,sBAAA,CAAAF,OAAA;AACA,IAAAe,iBAAA,GAAAb,sBAAA,CAAAF,OAAA;AACA,IAAAgB,mBAAA,GAAAd,sBAAA,CAAAF,OAAA;AACA,IAAAiB,cAAA,GAAAf,sBAAA,CAAAF,OAAA;AACA,IAAAkB,UAAA,GAAAhB,sBAAA,CAAAF,OAAA;AACA,IAAAmB,SAAA,GAAAjB,sBAAA,CAAAF,OAAA;AACA,IAAAoB,oBAAA,GAAAlB,sBAAA,CAAAF,OAAA;AACA,IAAAqB,eAAA,GAAAnB,sBAAA,CAAAF,OAAA;AACA,IAAAsB,aAAA,GAAApB,sBAAA,CAAAF,OAAA;AACA,IAAAuB,oBAAA,GAAArB,sBAAA,CAAAF,OAAA;AACA,IAAAwB,iBAAA,GAAAtB,sBAAA,CAAAF,OAAA;AACA,IAAAyB,aAAA,GAAAvB,sBAAA,CAAAF,OAAA;AACA,IAAA0B,eAAA,GAAAxB,sBAAA,CAAAF,OAAA;AACA,IAAA2B,YAAA,GAAAzB,sBAAA,CAAAF,OAAA;AACA,IAAA4B,sBAAA,GAAA1B,sBAAA,CAAAF,OAAA;AACA,IAAA6B,kBAAA,GAAA3B,sBAAA,CAAAF,OAAA;AACA,IAAA8B,OAAA,GAAA5B,sBAAA,CAAAF,OAAA;AAEA,IAAA+B,OAAA,GAAA/B,OAAA;AAAAgC,MAAA,CAAAC,IAAA,CAAAF,OAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,OAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,OAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AACA,IAAAS,MAAA,GAAA5C,OAAA;AAAAgC,MAAA,CAAAC,IAAA,CAAAW,MAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,MAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,MAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AACA,IAAAU,OAAA,GAAA7C,OAAA;AAAAgC,MAAA,CAAAC,IAAA,CAAAY,OAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,OAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,OAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AACA,IAAAW,YAAA,GAAA5C,sBAAA,CAAAF,OAAA;AAGA,IAAA+C,aAAA,GAAA/C,OAAA;AAG+B,SAAAE,uBAAA8C,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA","ignoreList":[]}
|
|
@@ -6,9 +6,9 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
exports.decodeAudioData = decodeAudioData;
|
|
7
7
|
exports.decodePCMInBase64 = decodePCMInBase64;
|
|
8
8
|
var _reactNative = require("react-native");
|
|
9
|
+
var _errors = require("../errors");
|
|
9
10
|
var _paths = require("../utils/paths");
|
|
10
11
|
var _AudioBuffer = _interopRequireDefault(require("./AudioBuffer"));
|
|
11
|
-
var _errors = require("../errors");
|
|
12
12
|
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
13
13
|
class AudioDecoder {
|
|
14
14
|
static instance = null;
|
|
@@ -38,7 +38,7 @@ class AudioDecoder {
|
|
|
38
38
|
const buffer = await this.decoder.decodeWithMemoryBlock(new Uint8Array(arrayBuffer), sampleRate ?? 0);
|
|
39
39
|
return new _AudioBuffer.default(buffer);
|
|
40
40
|
}
|
|
41
|
-
if (!(typeof
|
|
41
|
+
if (!(typeof stringSource === 'string')) {
|
|
42
42
|
throw new TypeError('Input must be a module, uri or ArrayBuffer');
|
|
43
43
|
}
|
|
44
44
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["_reactNative","require","_paths","_AudioBuffer","_interopRequireDefault","
|
|
1
|
+
{"version":3,"names":["_reactNative","require","_errors","_paths","_AudioBuffer","_interopRequireDefault","e","__esModule","default","AudioDecoder","instance","constructor","decoder","global","createAudioDecoder","decodeAudioDataImplementation","input","sampleRate","fetchOptions","ArrayBuffer","buffer","decodeWithMemoryBlock","Uint8Array","AudioBuffer","stringSource","Image","resolveAssetSource","uri","isBase64Source","AudioApiError","isDataBlobString","isRemoteSource","arrayBuffer","fetch","then","res","TypeError","filePath","startsWith","replace","decodeWithFilePath","getInstance","decodeAudioDataInstance","audioBuffer","decodePCMInBase64Instance","base64String","inputSampleRate","inputChannelCount","interleaved","decodeWithPCMInBase64","decodeAudioData","decodePCMInBase64","isInterleaved"],"sourceRoot":"../../../src","sources":["core/AudioDecoder.ts"],"mappings":";;;;;;;AAAA,IAAAA,YAAA,GAAAC,OAAA;AAEA,IAAAC,OAAA,GAAAD,OAAA;AAGA,IAAAE,MAAA,GAAAF,OAAA;AAKA,IAAAG,YAAA,GAAAC,sBAAA,CAAAJ,OAAA;AAAwC,SAAAI,uBAAAC,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAExC,MAAMG,YAAY,CAAC;EACjB,OAAeC,QAAQ,GAAwB,IAAI;EAG3CC,WAAWA,CAAA,EAAG;IACpB,IAAI,CAACC,OAAO,GAAGC,MAAM,CAACC,kBAAkB,CAAC,CAAC;EAC5C;EAEA,MAAcC,6BAA6BA,CACzCC,KAAsB,EACtBC,UAAmB,EACnBC,YAA0B,EACe;IACzC,IAAIF,KAAK,YAAYG,WAAW,EAAE;MAChC,MAAMC,MAAM,GAAG,MAAM,IAAI,CAACR,OAAO,CAACS,qBAAqB,CACrD,IAAIC,UAAU,CAACN,KAAK,CAAC,EACrBC,UAAU,IAAI,CAChB,CAAC;MACD,OAAO,IAAIM,oBAAW,CAACH,MAAM,CAAC;IAChC;IAEA,MAAMI,YAAY,GAChB,OAAOR,KAAK,KAAK,QAAQ,GAAGS,kBAAK,CAACC,kBAAkB,CAACV,KAAK,CAAC,CAACW,GAAG,GAAGX,KAAK;;IAEzE;IACA,IAAI,IAAAY,qBAAc,EAACJ,YAAY,CAAC,EAAE;MAChC,MAAM,IAAIK,qBAAa,CACrB,mHACF,CAAC;IACH;;IAEA;IACA,IAAI,IAAAC,uBAAgB,EAACN,YAAY,CAAC,EAAE;MAClC,MAAM,IAAIK,qBAAa,CACrB,uDACF,CAAC;IACH;;IAEA;IACA,IAAI,IAAAE,qBAAc,EAACP,YAAY,CAAC,EAAE;MAChC,MAAMQ,WAAW,GAAG,MAAMC,KAAK,CAACT,YAAY,EAAEN,YAAY,CAAC,CAACgB,IAAI,CAAEC,GAAG,IACnEA,GAAG,CAACH,WAAW,CAAC,CAClB,CAAC;MAED,MAAMZ,MAAM,GAAG,MAAM,IAAI,CAACR,OAAO,CAACS,qBAAqB,CACrD,IAAIC,UAAU,CAACU,WAAW,CAAC,EAC3Bf,UAAU,IAAI,CAChB,CAAC;MAED,OAAO,IAAIM,oBAAW,CAACH,MAAM,CAAC;IAChC;IAEA,IAAI,EAAE,OAAOI,YAAY,KAAK,QAAQ,CAAC,EAAE;MACvC,MAAM,IAAIY,SAAS,CAAC,4CAA4C,CAAC;IACnE;;IAEA;IACA,MAAMC,QAAQ,GAAGb,YAAY,CAACc,UAAU,CAAC,SAAS,CAAC,GAC/Cd,YAAY,CAACe,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,GACnCf,YAAY;IAEhB,MAAMJ,MAAM,GAAG,MAAM,IAAI,CAACR,OAAO,CAAC4B,kBAAkB,CAClDH,QAAQ,EACRpB,UAAU,IAAI,CAChB,CAAC;IAED,OAAO,IAAIM,oBAAW,CAACH,MAAM,CAAC;EAChC;EAEA,OAAcqB,WAAWA,CAAA,EAAiB;IACxC,IAAI,CAAChC,YAAY,CAACC,QAAQ,EAAE;MAC1BD,YAAY,CAACC,QAAQ,GAAG,IAAID,YAAY,CAAC,CAAC;IAC5C;IAEA,OAAOA,YAAY,CAACC,QAAQ;EAC9B;EAEA,MAAagC,uBAAuBA,CAClC1B,KAAsB,EACtBC,UAAmB,EACnBC,YAA0B,EACJ;IACtB,MAAMyB,WAAW,GAAG,MAAM,IAAI,CAAC5B,6BAA6B,CAC1DC,KAAK,EACLC,UAAU,EACVC,YACF,CAAC;IAED,IAAI,CAACyB,WAAW,EAAE;MAChB,MAAM,IAAId,qBAAa,CAAC,8BAA8B,CAAC;IACzD;IAEA,OAAOc,WAAW;EACpB;EAEA,MAAaC,yBAAyBA,CACpCC,YAAoB,EACpBC,eAAuB,EACvBC,iBAAyB,EACzBC,WAAoB,EACE;IACtB,MAAM5B,MAAM,GAAG,MAAM,IAAI,CAACR,OAAO,CAACqC,qBAAqB,CACrDJ,YAAY,EACZC,eAAe,EACfC,iBAAiB,EACjBC,WACF,CAAC;IACD,OAAO,IAAIzB,oBAAW,CAACH,MAAM,CAAC;EAChC;AACF;AAEO,eAAe8B,eAAeA,CACnClC,KAAsB,EACtBC,UAAmB,EACnBC,YAA0B,EACJ;EACtB,OAAOT,YAAY,CAACgC,WAAW,CAAC,CAAC,CAACC,uBAAuB,CACvD1B,KAAK,EACLC,UAAU,EACVC,YACF,CAAC;AACH;AAEO,eAAeiC,iBAAiBA,CACrCN,YAAoB,EACpBC,eAAuB,EACvBC,iBAAyB,EACzBK,aAAsB,GAAG,IAAI,EACP;EACtB,OAAO3C,YAAY,CAACgC,WAAW,CAAC,CAAC,CAACG,yBAAyB,CACzDC,YAAY,EACZC,eAAe,EACfC,iBAAiB,EACjBK,aACF,CAAC;AACH","ignoreList":[]}
|