react-native-audio-api 0.11.0-nightly-0a987bc-20251120 → 0.11.0-nightly-6dcac64-20251122

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/RNAudioAPI.podspec +7 -5
  2. package/android/build.gradle +8 -2
  3. package/android/src/main/cpp/audioapi/android/core/utils/AudioDecoder.cpp +18 -1
  4. package/common/cpp/audioapi/HostObjects/BaseAudioContextHostObject.cpp +32 -0
  5. package/common/cpp/audioapi/HostObjects/BaseAudioContextHostObject.h +1 -0
  6. package/common/cpp/audioapi/HostObjects/effects/IIRFilterNodeHostObject.cpp +33 -0
  7. package/common/cpp/audioapi/HostObjects/effects/IIRFilterNodeHostObject.h +20 -0
  8. package/common/cpp/audioapi/HostObjects/sources/StreamerNodeHostObject.cpp +4 -0
  9. package/common/cpp/audioapi/core/BaseAudioContext.cpp +15 -2
  10. package/common/cpp/audioapi/core/BaseAudioContext.h +4 -0
  11. package/common/cpp/audioapi/core/effects/IIRFilterNode.cpp +166 -0
  12. package/common/cpp/audioapi/core/effects/IIRFilterNode.h +75 -0
  13. package/common/cpp/audioapi/core/sources/StreamerNode.h +5 -5
  14. package/common/cpp/audioapi/core/utils/Constants.h +1 -0
  15. package/common/cpp/audioapi/libs/ffmpeg/FFmpegDecoding.cpp +2 -0
  16. package/common/cpp/test/CMakeLists.txt +1 -0
  17. package/common/cpp/test/src/IIRFilterTest.cpp +153 -0
  18. package/ios/audioapi/ios/core/utils/AudioDecoder.mm +12 -0
  19. package/lib/commonjs/core/BaseAudioContext.js +23 -1
  20. package/lib/commonjs/core/BaseAudioContext.js.map +1 -1
  21. package/lib/commonjs/core/IIRFilterNode.js +19 -0
  22. package/lib/commonjs/core/IIRFilterNode.js.map +1 -0
  23. package/lib/commonjs/plugin/withAudioAPI.js +46 -0
  24. package/lib/commonjs/plugin/withAudioAPI.js.map +1 -1
  25. package/lib/commonjs/web-core/AudioContext.js +4 -0
  26. package/lib/commonjs/web-core/AudioContext.js.map +1 -1
  27. package/lib/commonjs/web-core/IIRFilterNode.js +19 -0
  28. package/lib/commonjs/web-core/IIRFilterNode.js.map +1 -0
  29. package/lib/commonjs/web-core/OfflineAudioContext.js +4 -0
  30. package/lib/commonjs/web-core/OfflineAudioContext.js.map +1 -1
  31. package/lib/module/core/BaseAudioContext.js +24 -2
  32. package/lib/module/core/BaseAudioContext.js.map +1 -1
  33. package/lib/module/core/IIRFilterNode.js +13 -0
  34. package/lib/module/core/IIRFilterNode.js.map +1 -0
  35. package/lib/module/plugin/withAudioAPI.js +47 -1
  36. package/lib/module/plugin/withAudioAPI.js.map +1 -1
  37. package/lib/module/web-core/AudioContext.js +4 -0
  38. package/lib/module/web-core/AudioContext.js.map +1 -1
  39. package/lib/module/web-core/IIRFilterNode.js +13 -0
  40. package/lib/module/web-core/IIRFilterNode.js.map +1 -0
  41. package/lib/module/web-core/OfflineAudioContext.js +4 -0
  42. package/lib/module/web-core/OfflineAudioContext.js.map +1 -1
  43. package/lib/typescript/core/BaseAudioContext.d.ts +3 -1
  44. package/lib/typescript/core/BaseAudioContext.d.ts.map +1 -1
  45. package/lib/typescript/core/IIRFilterNode.d.ts +5 -0
  46. package/lib/typescript/core/IIRFilterNode.d.ts.map +1 -0
  47. package/lib/typescript/interfaces.d.ts +5 -1
  48. package/lib/typescript/interfaces.d.ts.map +1 -1
  49. package/lib/typescript/plugin/withAudioAPI.d.ts +1 -0
  50. package/lib/typescript/plugin/withAudioAPI.d.ts.map +1 -1
  51. package/lib/typescript/types.d.ts +4 -0
  52. package/lib/typescript/types.d.ts.map +1 -1
  53. package/lib/typescript/web-core/AudioContext.d.ts +3 -1
  54. package/lib/typescript/web-core/AudioContext.d.ts.map +1 -1
  55. package/lib/typescript/web-core/BaseAudioContext.d.ts +3 -1
  56. package/lib/typescript/web-core/BaseAudioContext.d.ts.map +1 -1
  57. package/lib/typescript/web-core/IIRFilterNode.d.ts +5 -0
  58. package/lib/typescript/web-core/IIRFilterNode.d.ts.map +1 -0
  59. package/lib/typescript/web-core/OfflineAudioContext.d.ts +3 -1
  60. package/lib/typescript/web-core/OfflineAudioContext.d.ts.map +1 -1
  61. package/package.json +1 -1
  62. package/src/core/BaseAudioContext.ts +44 -2
  63. package/src/core/IIRFilterNode.ts +25 -0
  64. package/src/interfaces.ts +13 -1
  65. package/src/plugin/withAudioAPI.ts +61 -0
  66. package/src/types.ts +5 -0
  67. package/src/web-core/AudioContext.tsx +9 -0
  68. package/src/web-core/BaseAudioContext.tsx +7 -1
  69. package/src/web-core/IIRFilterNode.tsx +25 -0
  70. package/src/web-core/OfflineAudioContext.tsx +9 -0
@@ -4,12 +4,14 @@ require_relative './scripts/rnaa_utils'
4
4
  package_json = JSON.parse(File.read(File.join(__dir__, "package.json")))
5
5
 
6
6
  $new_arch_enabled = ENV['RCT_NEW_ARCH_ENABLED'] == '1'
7
+ $RN_AUDIO_API_FFMPEG_DISABLED = ENV['DISABLE_AUDIOAPI_FFMPEG'].nil? ? false : ENV['DISABLE_AUDIOAPI_FFMPEG'] == '1' # false by default
7
8
 
8
9
  folly_flags = "-DFOLLY_MOBILE=1 -DFOLLY_USE_LIBCPP=1 -Wno-comma -Wno-shorten-64-to-32"
9
10
  fabric_flags = $new_arch_enabled ? '-DRCT_NEW_ARCH_ENABLED' : ''
10
11
  version_flag = "-DAUDIOAPI_VERSION=#{package_json['version']}"
11
12
 
12
13
  worklets_preprocessor_flag = check_if_worklets_enabled() ? '-DRN_AUDIO_API_ENABLE_WORKLETS=1' : ''
14
+ ffmpeg_flag = $RN_AUDIO_API_FFMPEG_DISABLED ? '-DRN_AUDIO_API_FFMPEG_DISABLED=1' : ''
13
15
 
14
16
  Pod::Spec.new do |s|
15
17
  s.name = "RNAudioAPI"
@@ -24,6 +26,7 @@ Pod::Spec.new do |s|
24
26
 
25
27
  s.subspec "audioapi" do |ss|
26
28
  ss.source_files = "common/cpp/audioapi/**/*.{cpp,c,h,hpp}"
29
+ ss.exclude_files = $RN_AUDIO_API_FFMPEG_DISABLED ? ["common/cpp/audioapi/libs/ffmpeg/**"] : []
27
30
  ss.header_dir = "audioapi"
28
31
  ss.header_mappings_dir = "common/cpp/audioapi"
29
32
 
@@ -57,7 +60,7 @@ Pod::Spec.new do |s|
57
60
  external_dir_relative = "common/cpp/audioapi/external"
58
61
  lib_dir = "$(PROJECT_DIR)/#{rn_audio_dir_relative}/#{external_dir_relative}/$(PLATFORM_NAME)"
59
62
 
60
- s.ios.vendored_frameworks = [
63
+ s.ios.vendored_frameworks = $RN_AUDIO_API_FFMPEG_DISABLED ? [] : [
61
64
  'common/cpp/audioapi/external/ffmpeg_ios/libavcodec.xcframework',
62
65
  'common/cpp/audioapi/external/ffmpeg_ios/libavformat.xcframework',
63
66
  'common/cpp/audioapi/external/ffmpeg_ios/libavutil.xcframework',
@@ -75,10 +78,9 @@ s.pod_target_xcconfig = {
75
78
  $(PODS_TARGET_SRCROOT)/#{external_dir_relative}/include
76
79
  $(PODS_TARGET_SRCROOT)/#{external_dir_relative}/include/opus
77
80
  $(PODS_TARGET_SRCROOT)/#{external_dir_relative}/include/vorbis
78
- $(PODS_TARGET_SRCROOT)/#{external_dir_relative}/ffmpeg_include
79
- ].join(" "),
80
- 'OTHER_CFLAGS' => "$(inherited) #{folly_flags} #{fabric_flags} #{version_flag} #{worklets_preprocessor_flag}",
81
- 'OTHER_CPLUSPLUSFLAGS' => "$(inherited) #{folly_flags} #{fabric_flags} #{version_flag} #{worklets_preprocessor_flag}",
81
+ ].concat($RN_AUDIO_API_FFMPEG_DISABLED ? [] : ["$(PODS_TARGET_SRCROOT)/#{external_dir_relative}/ffmpeg_include"]).join(" "),
82
+ 'OTHER_CFLAGS' => "$(inherited) #{folly_flags} #{fabric_flags} #{version_flag} #{worklets_preprocessor_flag} #{ffmpeg_flag}",
83
+ 'OTHER_CPLUSPLUSFLAGS' => "$(inherited) #{folly_flags} #{fabric_flags} #{version_flag} #{worklets_preprocessor_flag} #{ffmpeg_flag}",
82
84
  }
83
85
 
84
86
  s.user_target_xcconfig = {
@@ -21,6 +21,10 @@ def isNewArchitectureEnabled() {
21
21
  return rootProject.hasProperty("newArchEnabled") && rootProject.getProperty("newArchEnabled") == "true"
22
22
  }
23
23
 
24
+ def isFFmpegDisabled() {
25
+ return rootProject.hasProperty("disableAudioapiFFmpeg") && rootProject.getProperty("disableAudioapiFFmpeg") == "true"
26
+ }
27
+
24
28
  apply plugin: "com.android.library"
25
29
  apply plugin: 'org.jetbrains.kotlin.android'
26
30
 
@@ -124,6 +128,7 @@ file("$reactNativeRootDir/ReactAndroid/gradle.properties").withInputStream { rea
124
128
  def REACT_NATIVE_VERSION = reactProperties.getProperty("VERSION_NAME")
125
129
  def REACT_NATIVE_MINOR_VERSION = REACT_NATIVE_VERSION.startsWith("0.0.0-") ? 1000 : REACT_NATIVE_VERSION.split("\\.")[1].toInteger()
126
130
  def IS_NEW_ARCHITECTURE_ENABLED = isNewArchitectureEnabled()
131
+ def IS_RN_AUDIO_API_FFMPEG_DISABLED = isFFmpegDisabled()
127
132
 
128
133
  android {
129
134
  sourceSets {
@@ -163,8 +168,9 @@ android {
163
168
  "-DANDROID_TOOLCHAIN=clang",
164
169
  "-DREACT_NATIVE_DIR=${toPlatformFileString(reactNativeRootDir.path)}",
165
170
  "-DRN_AUDIO_API_WORKLETS_ENABLED=${isWorkletsAvailable}",
166
- "-DIS_NEW_ARCHITECTURE_ENABLED=${IS_NEW_ARCHITECTURE_ENABLED}",
167
- "-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON"
171
+ "-DIS_NEW_ARCHITECTURE_ENABLED=${IS_NEW_ARCHITECTURE_ENABLED}",
172
+ "-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON",
173
+ "-DRN_AUDIO_API_FFMPEG_DISABLED=${IS_RN_AUDIO_API_FFMPEG_DISABLED}"
168
174
  ]
169
175
 
170
176
  if (isWorkletsAvailable) {
@@ -12,8 +12,10 @@
12
12
 
13
13
  #ifndef AUDIO_API_TEST_SUITE
14
14
  #include <android/log.h>
15
+ #endif // AUDIO_API_TEST_SUITE
16
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
15
17
  #include <audioapi/libs/ffmpeg/FFmpegDecoding.h>
16
- #endif
18
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
17
19
 
18
20
  #include <memory>
19
21
  #include <string>
@@ -73,6 +75,7 @@ std::shared_ptr<AudioBuffer> AudioDecoder::decodeWithFilePath(
73
75
  float sampleRate) {
74
76
  #ifndef AUDIO_API_TEST_SUITE
75
77
  if (AudioDecoder::pathHasExtension(path, {".mp4", ".m4a", ".aac"})) {
78
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
76
79
  auto buffer = ffmpegdecoder::decodeWithFilePath(path, static_cast<int>(sampleRate));
77
80
  if (buffer == nullptr) {
78
81
  __android_log_print(
@@ -80,6 +83,14 @@ std::shared_ptr<AudioBuffer> AudioDecoder::decodeWithFilePath(
80
83
  return nullptr;
81
84
  }
82
85
  return buffer;
86
+ #else
87
+ __android_log_print(
88
+ ANDROID_LOG_ERROR,
89
+ "AudioDecoder",
90
+ "FFmpeg is disabled, cannot decode file: %s",
91
+ path.c_str());
92
+ return nullptr;
93
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
83
94
  }
84
95
  ma_decoder decoder;
85
96
  ma_decoder_config config = ma_decoder_config_init(ma_format_f32, 0, static_cast<int>(sampleRate));
@@ -115,12 +126,18 @@ AudioDecoder::decodeWithMemoryBlock(const void *data, size_t size, float sampleR
115
126
  #ifndef AUDIO_API_TEST_SUITE
116
127
  const AudioFormat format = AudioDecoder::detectAudioFormat(data, size);
117
128
  if (format == AudioFormat::MP4 || format == AudioFormat::M4A || format == AudioFormat::AAC) {
129
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
118
130
  auto buffer = ffmpegdecoder::decodeWithMemoryBlock(data, size, static_cast<int>(sampleRate));
119
131
  if (buffer == nullptr) {
120
132
  __android_log_print(ANDROID_LOG_ERROR, "AudioDecoder", "Failed to decode with FFmpeg");
121
133
  return nullptr;
122
134
  }
123
135
  return buffer;
136
+ #else
137
+ __android_log_print(
138
+ ANDROID_LOG_ERROR, "AudioDecoder", "FFmpeg is disabled, cannot decode memory block");
139
+ return nullptr;
140
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
124
141
  }
125
142
  ma_decoder decoder;
126
143
  ma_decoder_config config = ma_decoder_config_init(ma_format_f32, 0, static_cast<int>(sampleRate));
@@ -7,6 +7,7 @@
7
7
  #include <audioapi/HostObjects/effects/BiquadFilterNodeHostObject.h>
8
8
  #include <audioapi/HostObjects/effects/ConvolverNodeHostObject.h>
9
9
  #include <audioapi/HostObjects/effects/GainNodeHostObject.h>
10
+ #include <audioapi/HostObjects/effects/IIRFilterNodeHostObject.h>
10
11
  #include <audioapi/HostObjects/effects/PeriodicWaveHostObject.h>
11
12
  #include <audioapi/HostObjects/effects/StereoPannerNodeHostObject.h>
12
13
  #include <audioapi/HostObjects/sources/AudioBufferHostObject.h>
@@ -48,6 +49,7 @@ BaseAudioContextHostObject::BaseAudioContextHostObject(
48
49
  JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createGain),
49
50
  JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createStereoPanner),
50
51
  JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createBiquadFilter),
52
+ JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createIIRFilter),
51
53
  JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createBufferSource),
52
54
  JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createBufferQueueSource),
53
55
  JSI_EXPORT_FUNCTION(BaseAudioContextHostObject, createBuffer),
@@ -159,11 +161,15 @@ JSI_HOST_FUNCTION_IMPL(BaseAudioContextHostObject, createOscillator) {
159
161
  }
160
162
 
161
163
  JSI_HOST_FUNCTION_IMPL(BaseAudioContextHostObject, createStreamer) {
164
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
162
165
  auto streamer = context_->createStreamer();
163
166
  auto streamerHostObject = std::make_shared<StreamerNodeHostObject>(streamer);
164
167
  auto object = jsi::Object::createFromHostObject(runtime, streamerHostObject);
165
168
  object.setExternalMemoryPressure(runtime, StreamerNodeHostObject::getSizeInBytes());
166
169
  return object;
170
+ #else
171
+ return jsi::Value::undefined();
172
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
167
173
  }
168
174
 
169
175
  JSI_HOST_FUNCTION_IMPL(BaseAudioContextHostObject, createConstantSource) {
@@ -190,6 +196,32 @@ JSI_HOST_FUNCTION_IMPL(BaseAudioContextHostObject, createBiquadFilter) {
190
196
  return jsi::Object::createFromHostObject(runtime, biquadFilterHostObject);
191
197
  }
192
198
 
199
+ JSI_HOST_FUNCTION_IMPL(BaseAudioContextHostObject, createIIRFilter) {
200
+ auto feedforwardArray = args[0].asObject(runtime).asArray(runtime);
201
+ auto feedbackArray = args[1].asObject(runtime).asArray(runtime);
202
+
203
+ size_t feedforwardLength = feedforwardArray.length(runtime);
204
+ size_t feedbackLength = feedbackArray.length(runtime);
205
+
206
+ std::vector<float> feedforward;
207
+ std::vector<float> feedback;
208
+
209
+ feedforward.reserve(feedforwardLength);
210
+ feedback.reserve(feedbackLength);
211
+
212
+ for (size_t i = 0; i < feedforwardLength; ++i) {
213
+ feedforward.push_back(feedforwardArray.getValueAtIndex(runtime, i).asNumber());
214
+ }
215
+
216
+ for (size_t i = 0; i < feedbackLength; ++i) {
217
+ feedback.push_back(feedbackArray.getValueAtIndex(runtime, i).asNumber());
218
+ }
219
+
220
+ auto iirFilter = context_->createIIRFilter(feedforward, feedback);
221
+ auto iirFilterHostObject = std::make_shared<IIRFilterNodeHostObject>(iirFilter);
222
+ return jsi::Object::createFromHostObject(runtime, iirFilterHostObject);
223
+ }
224
+
193
225
  JSI_HOST_FUNCTION_IMPL(BaseAudioContextHostObject, createBufferSource) {
194
226
  auto pitchCorrection = args[0].asBool();
195
227
  auto bufferSource = context_->createBufferSource(pitchCorrection);
@@ -36,6 +36,7 @@ class BaseAudioContextHostObject : public JsiHostObject {
36
36
  JSI_HOST_FUNCTION_DECL(createGain);
37
37
  JSI_HOST_FUNCTION_DECL(createStereoPanner);
38
38
  JSI_HOST_FUNCTION_DECL(createBiquadFilter);
39
+ JSI_HOST_FUNCTION_DECL(createIIRFilter);
39
40
  JSI_HOST_FUNCTION_DECL(createBufferSource);
40
41
  JSI_HOST_FUNCTION_DECL(createBufferQueueSource);
41
42
  JSI_HOST_FUNCTION_DECL(createBuffer);
@@ -0,0 +1,33 @@
1
+ #include <audioapi/HostObjects/effects/IIRFilterNodeHostObject.h>
2
+ #include <audioapi/core/effects/IIRFilterNode.h>
3
+ #include <memory>
4
+
5
+ namespace audioapi {
6
+
7
+ IIRFilterNodeHostObject::IIRFilterNodeHostObject(const std::shared_ptr<IIRFilterNode> &node)
8
+ : AudioNodeHostObject(node) {
9
+
10
+ addFunctions(JSI_EXPORT_FUNCTION(IIRFilterNodeHostObject, getFrequencyResponse));
11
+ }
12
+
13
+ JSI_HOST_FUNCTION_IMPL(IIRFilterNodeHostObject, getFrequencyResponse) {
14
+ auto arrayBufferFrequency =
15
+ args[0].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
16
+ auto frequencyArray = reinterpret_cast<float *>(arrayBufferFrequency.data(runtime));
17
+ auto length = static_cast<size_t>(arrayBufferFrequency.size(runtime));
18
+
19
+ auto arrayBufferMag =
20
+ args[1].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
21
+ auto magResponseOut = reinterpret_cast<float *>(arrayBufferMag.data(runtime));
22
+
23
+ auto arrayBufferPhase =
24
+ args[2].getObject(runtime).getPropertyAsObject(runtime, "buffer").getArrayBuffer(runtime);
25
+ auto phaseResponseOut = reinterpret_cast<float *>(arrayBufferPhase.data(runtime));
26
+
27
+ auto iirFilterNode = std::static_pointer_cast<IIRFilterNode>(node_);
28
+ iirFilterNode->getFrequencyResponse(frequencyArray, magResponseOut, phaseResponseOut, length);
29
+
30
+ return jsi::Value::undefined();
31
+ }
32
+
33
+ } // namespace audioapi
@@ -0,0 +1,20 @@
1
+ #pragma once
2
+
3
+ #include <audioapi/HostObjects/AudioNodeHostObject.h>
4
+
5
+ #include <memory>
6
+ #include <string>
7
+ #include <vector>
8
+
9
+ namespace audioapi {
10
+ using namespace facebook;
11
+
12
+ class IIRFilterNode;
13
+
14
+ class IIRFilterNodeHostObject : public AudioNodeHostObject {
15
+ public:
16
+ explicit IIRFilterNodeHostObject(const std::shared_ptr<IIRFilterNode> &node);
17
+
18
+ JSI_HOST_FUNCTION_DECL(getFrequencyResponse);
19
+ };
20
+ } // namespace audioapi
@@ -13,10 +13,14 @@ StreamerNodeHostObject::StreamerNodeHostObject(const std::shared_ptr<StreamerNod
13
13
  }
14
14
 
15
15
  JSI_HOST_FUNCTION_IMPL(StreamerNodeHostObject, initialize) {
16
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
16
17
  auto streamerNode = std::static_pointer_cast<StreamerNode>(node_);
17
18
  auto path = args[0].getString(runtime).utf8(runtime);
18
19
  auto result = streamerNode->initialize(path);
19
20
  return {result};
21
+ #else
22
+ return false;
23
+ #endif
20
24
  }
21
25
 
22
26
  } // namespace audioapi
@@ -4,6 +4,7 @@
4
4
  #include <audioapi/core/effects/BiquadFilterNode.h>
5
5
  #include <audioapi/core/effects/ConvolverNode.h>
6
6
  #include <audioapi/core/effects/GainNode.h>
7
+ #include <audioapi/core/effects/IIRFilterNode.h>
7
8
  #include <audioapi/core/effects/StereoPannerNode.h>
8
9
  #include <audioapi/core/effects/WorkletNode.h>
9
10
  #include <audioapi/core/effects/WorkletProcessingNode.h>
@@ -13,7 +14,9 @@
13
14
  #include <audioapi/core/sources/ConstantSourceNode.h>
14
15
  #include <audioapi/core/sources/OscillatorNode.h>
15
16
  #include <audioapi/core/sources/RecorderAdapterNode.h>
17
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
16
18
  #include <audioapi/core/sources/StreamerNode.h>
19
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
17
20
  #include <audioapi/core/sources/WorkletSourceNode.h>
18
21
  #include <audioapi/core/utils/AudioDecoder.h>
19
22
  #include <audioapi/core/utils/AudioNodeManager.h>
@@ -121,13 +124,15 @@ std::shared_ptr<ConstantSourceNode> BaseAudioContext::createConstantSource() {
121
124
  return constantSource;
122
125
  }
123
126
 
124
- #ifndef AUDIO_API_TEST_SUITE
125
127
  std::shared_ptr<StreamerNode> BaseAudioContext::createStreamer() {
128
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
126
129
  auto streamer = std::make_shared<StreamerNode>(this);
127
130
  nodeManager_->addSourceNode(streamer);
128
131
  return streamer;
132
+ #else
133
+ return nullptr;
134
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
129
135
  }
130
- #endif
131
136
 
132
137
  std::shared_ptr<GainNode> BaseAudioContext::createGain() {
133
138
  auto gain = std::make_shared<GainNode>(this);
@@ -147,6 +152,14 @@ std::shared_ptr<BiquadFilterNode> BaseAudioContext::createBiquadFilter() {
147
152
  return biquadFilter;
148
153
  }
149
154
 
155
+ std::shared_ptr<IIRFilterNode> BaseAudioContext::createIIRFilter(
156
+ const std::vector<float> &feedforward,
157
+ const std::vector<float> &feedback) {
158
+ auto iirFilter = std::make_shared<IIRFilterNode>(this, feedforward, feedback);
159
+ nodeManager_->addProcessingNode(iirFilter);
160
+ return iirFilter;
161
+ }
162
+
150
163
  std::shared_ptr<AudioBufferSourceNode> BaseAudioContext::createBufferSource(bool pitchCorrection) {
151
164
  auto bufferSource = std::make_shared<AudioBufferSourceNode>(this, pitchCorrection);
152
165
  nodeManager_->addSourceNode(bufferSource);
@@ -23,6 +23,7 @@ class ConstantSourceNode;
23
23
  class StereoPannerNode;
24
24
  class AudioNodeManager;
25
25
  class BiquadFilterNode;
26
+ class IIRFilterNode;
26
27
  class AudioDestinationNode;
27
28
  class AudioBufferSourceNode;
28
29
  class AudioBufferQueueSourceNode;
@@ -70,6 +71,9 @@ class BaseAudioContext {
70
71
  std::shared_ptr<GainNode> createGain();
71
72
  std::shared_ptr<StereoPannerNode> createStereoPanner();
72
73
  std::shared_ptr<BiquadFilterNode> createBiquadFilter();
74
+ std::shared_ptr<IIRFilterNode> createIIRFilter(
75
+ const std::vector<float> &feedforward,
76
+ const std::vector<float> &feedback);
73
77
  std::shared_ptr<AudioBufferSourceNode> createBufferSource(bool pitchCorrection);
74
78
  std::shared_ptr<AudioBufferQueueSourceNode> createBufferQueueSource(bool pitchCorrection);
75
79
  static std::shared_ptr<AudioBuffer>
@@ -0,0 +1,166 @@
1
+ /*
2
+ * Copyright 2016 The Chromium Authors. All rights reserved.
3
+ * Copyright (C) 2020 Apple Inc. All rights reserved.
4
+ *
5
+ * Redistribution and use in source and binary forms, with or without
6
+ * modification, are permitted provided that the following conditions
7
+ * are met:
8
+ * 1. Redistributions of source code must retain the above copyright
9
+ * notice, this list of conditions and the following disclaimer.
10
+ * 2. Redistributions in binary form must reproduce the above copyright
11
+ * notice, this list of conditions and the following disclaimer in the
12
+ * documentation and/or other materials provided with the distribution.
13
+ *
14
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
15
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17
+ * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
18
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
19
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
20
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
21
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
23
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24
+ */
25
+
26
+ #include <audioapi/core/BaseAudioContext.h>
27
+ #include <audioapi/core/effects/IIRFilterNode.h>
28
+ #include <audioapi/core/utils/Constants.h>
29
+ #include <audioapi/utils/AudioArray.h>
30
+ #include <audioapi/utils/AudioBus.h>
31
+ #include <algorithm>
32
+ #include <memory>
33
+ #include <vector>
34
+
35
+ namespace audioapi {
36
+
37
+ IIRFilterNode::IIRFilterNode(
38
+ BaseAudioContext *context,
39
+ const std::vector<float> &feedforward,
40
+ const std::vector<float> &feedback)
41
+ : AudioNode(context), feedforward_(feedforward), feedback_(feedback) {
42
+ isInitialized_ = true;
43
+ channelCountMode_ = ChannelCountMode::MAX;
44
+
45
+ int maxChannels = MAX_CHANNEL_COUNT;
46
+ xBuffers_.resize(maxChannels);
47
+ yBuffers_.resize(maxChannels);
48
+ bufferIndices.resize(maxChannels, 0);
49
+
50
+ for (int c = 0; c < maxChannels; ++c) {
51
+ xBuffers_[c].resize(bufferLength, 0.0f);
52
+ yBuffers_[c].resize(bufferLength, 0.0f);
53
+ }
54
+
55
+ size_t feedforwardLength = feedforward_.size();
56
+ size_t feedbackLength = feedback_.size();
57
+
58
+ if (feedback_[0] != 1) {
59
+ float scale = feedback_[0];
60
+ for (unsigned k = 1; k < feedbackLength; ++k)
61
+ feedback_[k] /= scale;
62
+
63
+ for (unsigned k = 0; k < feedforwardLength; ++k)
64
+ feedforward_[k] /= scale;
65
+
66
+ feedback_[0] = 1.0f;
67
+ }
68
+ }
69
+
70
+ // Compute Z-transform of the filter
71
+ //
72
+ // frequency response - H(z)
73
+ // sum(b[k]*z^(-k), k, 0, M)
74
+ // H(z) = -------------------------------
75
+ // sum(a[k]*z^(-k), k, 0, N)
76
+ //
77
+ // sum(b[k]*z1^k, k, 0, M)
78
+ // = -------------------------------
79
+ // sum(a[k]*z1^k, k, 0, N)
80
+ //
81
+ // where z1 = 1/z and z = e^(j * pi * frequency)
82
+ // z1 = e^(-j * pi * frequency)
83
+ //
84
+ // phase response - angle of the frequency response
85
+ //
86
+
87
+ void IIRFilterNode::getFrequencyResponse(
88
+ const float *frequencyArray,
89
+ float *magResponseOutput,
90
+ float *phaseResponseOutput,
91
+ size_t length) {
92
+ float nyquist = context_->getNyquistFrequency();
93
+
94
+ for (size_t k = 0; k < length; ++k) {
95
+ float normalizedFreq = frequencyArray[k] / nyquist;
96
+
97
+ if (normalizedFreq < 0.0f || normalizedFreq > 1.0f) {
98
+ // Out-of-bounds frequencies should return NaN.
99
+ magResponseOutput[k] = std::nanf("");
100
+ phaseResponseOutput[k] = std::nanf("");
101
+ continue;
102
+ }
103
+
104
+ float omega = -PI * normalizedFreq;
105
+ auto z = std::complex<float>(std::cos(omega), std::sin(omega));
106
+
107
+ auto numerator = IIRFilterNode::evaluatePolynomial(feedforward_, z, feedforward_.size() - 1);
108
+ auto denominator = IIRFilterNode::evaluatePolynomial(feedback_, z, feedback_.size() - 1);
109
+ auto response = numerator / denominator;
110
+
111
+ magResponseOutput[k] = static_cast<float>(std::abs(response));
112
+ phaseResponseOutput[k] = static_cast<float>(atan2(imag(response), real(response)));
113
+ }
114
+ }
115
+
116
+ // y[n] = sum(b[k] * x[n - k], k = 0, M) - sum(a[k] * y[n - k], k = 1, N)
117
+ // where b[k] are the feedforward coefficients and a[k] are the feedback coefficients of the filter
118
+
119
+ // TODO: tail
120
+
121
+ std::shared_ptr<AudioBus> IIRFilterNode::processNode(
122
+ const std::shared_ptr<AudioBus> &processingBus,
123
+ int framesToProcess) {
124
+ int numChannels = processingBus->getNumberOfChannels();
125
+
126
+ size_t feedforwardLength = feedforward_.size();
127
+ size_t feedbackLength = feedback_.size();
128
+ int minLength = std::min(feedbackLength, feedforwardLength);
129
+
130
+ int mask = bufferLength - 1;
131
+
132
+ for (int c = 0; c < numChannels; ++c) {
133
+ auto channelData = processingBus->getChannel(c)->getData();
134
+ auto &x = xBuffers_[c];
135
+ auto &y = yBuffers_[c];
136
+ size_t bufferIndex = bufferIndices[c];
137
+
138
+ for (int n = 0; n < framesToProcess; ++n) {
139
+ float yn = feedforward_[0] * channelData[n];
140
+
141
+ for (int k = 1; k < minLength; ++k) {
142
+ int m = (bufferIndex - k) & mask;
143
+ yn = std::fma(feedforward_[k], x[m], yn);
144
+ yn = std::fma(-feedback_[k], y[m], yn);
145
+ }
146
+
147
+ for (int k = minLength; k < feedforwardLength; ++k) {
148
+ yn = std::fma(feedforward_[k], x[(bufferIndex - k) & mask], yn);
149
+ }
150
+ for (int k = minLength; k < feedbackLength; ++k) {
151
+ yn = std::fma(-feedback_[k], y[(bufferIndex - k) & (bufferLength - 1)], yn);
152
+ }
153
+
154
+ channelData[n] = yn;
155
+
156
+ x[bufferIndex] = channelData[n];
157
+ y[bufferIndex] = yn;
158
+
159
+ bufferIndex = (bufferIndex + 1) & (bufferLength - 1);
160
+ }
161
+ bufferIndices[c] = bufferIndex;
162
+ }
163
+ return processingBus;
164
+ }
165
+
166
+ } // namespace audioapi
@@ -0,0 +1,75 @@
1
+ /*
2
+ * Copyright 2016 The Chromium Authors. All rights reserved.
3
+ * Copyright (C) 2020 Apple Inc. All rights reserved.
4
+ *
5
+ * Redistribution and use in source and binary forms, with or without
6
+ * modification, are permitted provided that the following conditions
7
+ * are met:
8
+ * 1. Redistributions of source code must retain the above copyright
9
+ * notice, this list of conditions and the following disclaimer.
10
+ * 2. Redistributions in binary form must reproduce the above copyright
11
+ * notice, this list of conditions and the following disclaimer in the
12
+ * documentation and/or other materials provided with the distribution.
13
+ *
14
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
15
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17
+ * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
18
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
19
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
20
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
21
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
23
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24
+ */
25
+
26
+ #pragma once
27
+
28
+ #include <audioapi/core/AudioNode.h>
29
+ #include <complex>
30
+ #include <vector>
31
+
32
+ #include <memory>
33
+
34
+ namespace audioapi {
35
+
36
+ class IIRFilterNode : public AudioNode {
37
+
38
+ public:
39
+ explicit IIRFilterNode(
40
+ BaseAudioContext *context,
41
+ const std::vector<float> &feedforward,
42
+ const std::vector<float> &feedback);
43
+
44
+ void getFrequencyResponse(
45
+ const float *frequencyArray,
46
+ float *magResponseOutput,
47
+ float *phaseResponseOutput,
48
+ size_t length);
49
+
50
+ protected:
51
+ std::shared_ptr<AudioBus> processNode(
52
+ const std::shared_ptr<AudioBus> &processingBus,
53
+ int framesToProcess) override;
54
+
55
+ private:
56
+ static constexpr size_t bufferLength = 32;
57
+ size_t m_bufferIndex = 0;
58
+
59
+ std::vector<float> feedforward_;
60
+ std::vector<float> feedback_;
61
+
62
+ std::vector<std::vector<float>> xBuffers_; // xBuffers_[channel][index]
63
+ std::vector<std::vector<float>> yBuffers_;
64
+ std::vector<size_t> bufferIndices;
65
+
66
+ static std::complex<float>
67
+ evaluatePolynomial(const std::vector<float> coefficients, std::complex<float> z, int order) {
68
+ // Use Horner's method to evaluate the polynomial P(z) = sum(coef[k]*z^k, k, 0, order);
69
+ std::complex<float> result = 0;
70
+ for (int k = order; k >= 0; --k)
71
+ result = result * z + std::complex<float>(coefficients[k]);
72
+ return result;
73
+ }
74
+ };
75
+ } // namespace audioapi
@@ -13,7 +13,7 @@
13
13
  #include <audioapi/core/sources/AudioScheduledSourceNode.h>
14
14
  #include <audioapi/utils/AudioBus.h>
15
15
 
16
- #ifndef AUDIO_API_TEST_SUITE
16
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
17
17
  extern "C" {
18
18
  #include <libavcodec/avcodec.h>
19
19
  #include <libavformat/avformat.h>
@@ -22,7 +22,7 @@ extern "C" {
22
22
  #include <libavutil/samplefmt.h>
23
23
  #include <libswresample/swresample.h>
24
24
  }
25
- #endif
25
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
26
26
 
27
27
  #include <atomic>
28
28
  #include <cmath>
@@ -36,7 +36,7 @@ static constexpr audioapi::channels::spsc::OverflowStrategy STREAMER_NODE_SPSC_O
36
36
  audioapi::channels::spsc::OverflowStrategy::WAIT_ON_FULL;
37
37
  static constexpr audioapi::channels::spsc::WaitStrategy STREAMER_NODE_SPSC_WAIT_STRATEGY =
38
38
  audioapi::channels::spsc::WaitStrategy::ATOMIC_WAIT;
39
- #endif
39
+ #endif // AUDIO_API_TEST_SUITE
40
40
 
41
41
  static constexpr bool VERBOSE = false;
42
42
  static constexpr int CHANNEL_CAPACITY = 32;
@@ -78,7 +78,7 @@ class StreamerNode : public AudioScheduledSourceNode {
78
78
  int framesToProcess) override;
79
79
 
80
80
  private:
81
- #ifndef AUDIO_API_TEST_SUITE
81
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
82
82
  AVFormatContext *fmtCtx_;
83
83
  AVCodecContext *codecCtx_;
84
84
  const AVCodec *decoder_;
@@ -149,6 +149,6 @@ class StreamerNode : public AudioScheduledSourceNode {
149
149
  * @return true if successful, false otherwise
150
150
  */
151
151
  bool setupDecoder();
152
- #endif // AUDIO_API_TEST_SUITE
152
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
153
153
  };
154
154
  } // namespace audioapi
@@ -10,6 +10,7 @@ namespace audioapi {
10
10
  // audio
11
11
  static constexpr int RENDER_QUANTUM_SIZE = 128;
12
12
  static constexpr size_t MAX_FFT_SIZE = 32768;
13
+ static constexpr int MAX_CHANNEL_COUNT = 32;
13
14
 
14
15
  // stretcher
15
16
  static constexpr float UPPER_FREQUENCY_LIMIT_DETECTION = 333.0f;
@@ -9,7 +9,9 @@
9
9
  */
10
10
 
11
11
  #include <audioapi/core/sources/AudioBuffer.h>
12
+ #if !RN_AUDIO_API_FFMPEG_DISABLED
12
13
  #include <audioapi/libs/ffmpeg/FFmpegDecoding.h>
14
+ #endif // RN_AUDIO_API_FFMPEG_DISABLED
13
15
  #include <audioapi/utils/AudioArray.h>
14
16
  #include <audioapi/utils/AudioBus.h>
15
17
  #include <functional>
@@ -81,6 +81,7 @@ add_executable(
81
81
  add_compile_definitions(AUDIO_API_TEST_SUITE)
82
82
  add_compile_definitions(RN_AUDIO_API_ENABLE_WORKLETS=0)
83
83
  add_compile_definitions(RN_AUDIO_API_TEST=1)
84
+ add_compile_definitions(RN_AUDIO_API_FFMPEG_DISABLED=1)
84
85
 
85
86
  target_link_libraries(tests
86
87
  rnaudioapi