@livekit/react-native 2.5.1 → 2.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/README.md +4 -3
  2. package/android/build.gradle +2 -1
  3. package/android/src/main/java/com/livekit/reactnative/LiveKitReactNative.kt +61 -5
  4. package/android/src/main/java/com/livekit/reactnative/LivekitReactNativeModule.kt +81 -4
  5. package/android/src/main/java/com/livekit/reactnative/audio/events/Events.kt +6 -0
  6. package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioFormat.kt +2 -0
  7. package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioProcessingController.kt +27 -0
  8. package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioProcessorInterface.kt +52 -0
  9. package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioRecordSamplesDispatcher.kt +72 -0
  10. package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioSinkManager.kt +75 -0
  11. package/android/src/main/java/com/livekit/reactnative/audio/processing/CustomAudioProcessingFactory.kt +78 -0
  12. package/android/src/main/java/com/livekit/reactnative/audio/processing/MultibandVolumeProcessor.kt +181 -0
  13. package/android/src/main/java/com/livekit/reactnative/audio/processing/VolumeProcessor.kt +67 -0
  14. package/android/src/main/java/com/livekit/reactnative/audio/processing/fft/FFTAudioAnalyzer.kt +224 -0
  15. package/ios/AudioUtils.swift +49 -0
  16. package/ios/LKAudioProcessingAdapter.h +26 -0
  17. package/ios/LKAudioProcessingAdapter.m +117 -0
  18. package/ios/LKAudioProcessingManager.h +34 -0
  19. package/ios/LKAudioProcessingManager.m +63 -0
  20. package/ios/LiveKitReactNativeModule.swift +234 -0
  21. package/ios/LivekitReactNative-Bridging-Header.h +5 -2
  22. package/ios/LivekitReactNative.h +2 -6
  23. package/ios/LivekitReactNative.m +3 -166
  24. package/ios/LivekitReactNativeModule.m +42 -0
  25. package/ios/Logging.swift +4 -0
  26. package/ios/audio/AVAudioPCMBuffer.swift +136 -0
  27. package/ios/audio/AudioProcessing.swift +163 -0
  28. package/ios/audio/AudioRendererManager.swift +71 -0
  29. package/ios/audio/FFTProcessor.swift +147 -0
  30. package/ios/audio/MultibandVolumeAudioRenderer.swift +67 -0
  31. package/ios/audio/RingBuffer.swift +51 -0
  32. package/ios/audio/VolumeAudioRenderer.swift +50 -0
  33. package/lib/commonjs/LKNativeModule.js +18 -0
  34. package/lib/commonjs/LKNativeModule.js.map +1 -0
  35. package/lib/commonjs/audio/AudioSession.js +9 -17
  36. package/lib/commonjs/audio/AudioSession.js.map +1 -1
  37. package/lib/commonjs/components/BarVisualizer.js +192 -0
  38. package/lib/commonjs/components/BarVisualizer.js.map +1 -0
  39. package/lib/commonjs/events/EventEmitter.js +45 -0
  40. package/lib/commonjs/events/EventEmitter.js.map +1 -0
  41. package/lib/commonjs/hooks/useMultibandTrackVolume.js +64 -0
  42. package/lib/commonjs/hooks/useMultibandTrackVolume.js.map +1 -0
  43. package/lib/commonjs/hooks/useTrackVolume.js +45 -0
  44. package/lib/commonjs/hooks/useTrackVolume.js.map +1 -0
  45. package/lib/commonjs/hooks.js +24 -0
  46. package/lib/commonjs/hooks.js.map +1 -1
  47. package/lib/commonjs/index.js +14 -0
  48. package/lib/commonjs/index.js.map +1 -1
  49. package/lib/module/LKNativeModule.js +12 -0
  50. package/lib/module/LKNativeModule.js.map +1 -0
  51. package/lib/module/audio/AudioSession.js +9 -17
  52. package/lib/module/audio/AudioSession.js.map +1 -1
  53. package/lib/module/components/BarVisualizer.js +182 -0
  54. package/lib/module/components/BarVisualizer.js.map +1 -0
  55. package/lib/module/events/EventEmitter.js +36 -0
  56. package/lib/module/events/EventEmitter.js.map +1 -0
  57. package/lib/module/hooks/useMultibandTrackVolume.js +58 -0
  58. package/lib/module/hooks/useMultibandTrackVolume.js.map +1 -0
  59. package/lib/module/hooks/useTrackVolume.js +39 -0
  60. package/lib/module/hooks/useTrackVolume.js.map +1 -0
  61. package/lib/module/hooks.js +2 -0
  62. package/lib/module/hooks.js.map +1 -1
  63. package/lib/module/index.js +3 -0
  64. package/lib/module/index.js.map +1 -1
  65. package/lib/typescript/lib/commonjs/LKNativeModule.d.ts +3 -0
  66. package/lib/typescript/lib/commonjs/components/BarVisualizer.d.ts +32 -0
  67. package/lib/typescript/lib/commonjs/events/EventEmitter.d.ts +4 -0
  68. package/lib/typescript/lib/commonjs/hooks/useMultibandTrackVolume.d.ts +8 -0
  69. package/lib/typescript/lib/commonjs/hooks/useTrackVolume.d.ts +8 -0
  70. package/lib/typescript/lib/module/LKNativeModule.d.ts +2 -0
  71. package/lib/typescript/lib/module/components/BarVisualizer.d.ts +10 -0
  72. package/lib/typescript/lib/module/events/EventEmitter.d.ts +3 -0
  73. package/lib/typescript/lib/module/hooks/useMultibandTrackVolume.d.ts +7 -0
  74. package/lib/typescript/lib/module/hooks/useTrackVolume.d.ts +7 -0
  75. package/lib/typescript/lib/module/hooks.d.ts +2 -0
  76. package/lib/typescript/lib/module/index.d.ts +1 -0
  77. package/lib/typescript/src/LKNativeModule.d.ts +2 -0
  78. package/lib/typescript/src/components/BarVisualizer.d.ts +49 -0
  79. package/lib/typescript/src/events/EventEmitter.d.ts +6 -0
  80. package/lib/typescript/src/hooks/useMultibandTrackVolume.d.ts +31 -0
  81. package/lib/typescript/src/hooks/useTrackVolume.d.ts +9 -0
  82. package/lib/typescript/src/hooks.d.ts +2 -0
  83. package/lib/typescript/src/index.d.ts +1 -0
  84. package/livekit-react-native.podspec +26 -6
  85. package/package.json +5 -5
  86. package/src/LKNativeModule.ts +19 -0
  87. package/src/audio/AudioSession.ts +9 -24
  88. package/src/components/BarVisualizer.tsx +252 -0
  89. package/src/events/EventEmitter.ts +51 -0
  90. package/src/hooks/useMultibandTrackVolume.ts +97 -0
  91. package/src/hooks/useTrackVolume.ts +62 -0
  92. package/src/hooks.ts +2 -0
  93. package/src/index.tsx +3 -0
  94. package/ios/AudioUtils.h +0 -9
  95. package/ios/AudioUtils.m +0 -48
@@ -0,0 +1,63 @@
1
+ #import "LKAudioProcessingManager.h"
2
+ #import "LKAudioProcessingAdapter.h"
3
+
4
+ @implementation LKAudioProcessingManager
5
+
6
+ + (instancetype)sharedInstance {
7
+ static dispatch_once_t onceToken;
8
+ static LKAudioProcessingManager* sharedInstance = nil;
9
+ dispatch_once(&onceToken, ^{
10
+ sharedInstance = [[self alloc] init];
11
+ });
12
+ return sharedInstance;
13
+ }
14
+
15
+ - (instancetype)init {
16
+ if (self = [super init]) {
17
+ _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init];
18
+ _capturePostProcessingAdapter = [[LKAudioProcessingAdapter alloc] init];
19
+ _renderPreProcessingAdapter = [[LKAudioProcessingAdapter alloc] init];
20
+ _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter;
21
+ _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter;
22
+ }
23
+ return self;
24
+ }
25
+
26
+ - (void)addLocalAudioRenderer:(nonnull id<RTCAudioRenderer>)renderer {
27
+ [_capturePostProcessingAdapter addAudioRenderer:renderer];
28
+ }
29
+
30
+ - (void)removeLocalAudioRenderer:(nonnull id<RTCAudioRenderer>)renderer {
31
+ [_capturePostProcessingAdapter removeAudioRenderer:renderer];
32
+ }
33
+
34
+ - (void)addRemoteAudioRenderer:(nonnull id<RTCAudioRenderer>)renderer {
35
+ [_renderPreProcessingAdapter addAudioRenderer:renderer];
36
+ }
37
+
38
+ - (void)removeRemoteAudioRenderer:(nonnull id<RTCAudioRenderer>)renderer {
39
+ [_renderPreProcessingAdapter removeAudioRenderer:renderer];
40
+ }
41
+
42
+ - (void)addCapturePostProcessor:(nonnull id<LKExternalAudioProcessingDelegate>)processor {
43
+ [_capturePostProcessingAdapter addProcessing:processor];
44
+ }
45
+
46
+ - (void)removeCapturePostProcessor:(nonnull id<LKExternalAudioProcessingDelegate>)processor {
47
+ [_capturePostProcessingAdapter removeProcessing:processor];
48
+ }
49
+
50
+ - (void)addRenderPreProcessor:(nonnull id<LKExternalAudioProcessingDelegate>)processor {
51
+ [_renderPreProcessingAdapter addProcessing:processor];
52
+ }
53
+
54
+ - (void)removeRenderPreProcessor:(nonnull id<LKExternalAudioProcessingDelegate>)processor {
55
+ [_renderPreProcessingAdapter removeProcessing:processor];
56
+ }
57
+
58
+ - (void)clearProcessors {
59
+ // TODO
60
+ }
61
+
62
+
63
+ @end
@@ -0,0 +1,234 @@
1
+ import livekit_react_native_webrtc
2
+ import AVFoundation
3
+ import AVFAudio
4
+ import React
5
+
6
+ struct LKEvents {
7
+ static let kEventVolumeProcessed = "LK_VOLUME_PROCESSED";
8
+ static let kEventMultibandProcessed = "LK_MULTIBAND_PROCESSED";
9
+ }
10
+
11
+ @objc(LivekitReactNativeModule)
12
+ public class LivekitReactNativeModule: RCTEventEmitter {
13
+
14
+ // This cannot be initialized in init as self.bridge is given afterwards.
15
+ private var _audioRendererManager: AudioRendererManager? = nil
16
+ public var audioRendererManager: AudioRendererManager {
17
+ get {
18
+ if _audioRendererManager == nil {
19
+ _audioRendererManager = AudioRendererManager(bridge: self.bridge)
20
+ }
21
+
22
+ return _audioRendererManager!
23
+ }
24
+ }
25
+
26
+ @objc
27
+ public override init() {
28
+ super.init()
29
+ let config = RTCAudioSessionConfiguration()
30
+ config.category = AVAudioSession.Category.playAndRecord.rawValue
31
+ config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
32
+ config.mode = AVAudioSession.Mode.videoChat.rawValue
33
+
34
+ RTCAudioSessionConfiguration.setWebRTC(config)
35
+ }
36
+
37
+ @objc
38
+ override public static func requiresMainQueueSetup() -> Bool {
39
+ return false
40
+ }
41
+
42
+ @objc
43
+ public static func setup() {
44
+ let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
45
+ let simulcastVideoEncoderFactory = RTCVideoEncoderFactorySimulcast(primary: videoEncoderFactory, fallback: videoEncoderFactory)
46
+ let options = WebRTCModuleOptions.sharedInstance()
47
+ options.videoEncoderFactory = simulcastVideoEncoderFactory
48
+ options.audioProcessingModule = LKAudioProcessingManager.sharedInstance().audioProcessingModule
49
+ }
50
+
51
+ @objc(configureAudio:)
52
+ public func configureAudio(_ config: NSDictionary) {
53
+ guard let iOSConfig = config["ios"] as? NSDictionary
54
+ else {
55
+ return
56
+ }
57
+
58
+ let defaultOutput = iOSConfig["defaultOutput"] as? String ?? "speaker"
59
+
60
+ let rtcConfig = RTCAudioSessionConfiguration()
61
+ rtcConfig.category = AVAudioSession.Category.playAndRecord.rawValue
62
+
63
+ if (defaultOutput == "earpiece") {
64
+ rtcConfig.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP];
65
+ rtcConfig.mode = AVAudioSession.Mode.voiceChat.rawValue
66
+ } else {
67
+ rtcConfig.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
68
+ rtcConfig.mode = AVAudioSession.Mode.videoChat.rawValue
69
+ }
70
+ RTCAudioSessionConfiguration.setWebRTC(rtcConfig)
71
+ }
72
+
73
+ @objc(startAudioSession)
74
+ public func startAudioSession() {
75
+ // intentionally left empty
76
+ }
77
+
78
+ @objc(stopAudioSession)
79
+ public func stopAudioSession() {
80
+ // intentionally left empty
81
+ }
82
+
83
+ @objc(showAudioRoutePicker)
84
+ public func showAudioRoutePicker() {
85
+ if #available(iOS 11.0, *) {
86
+ let routePickerView = AVRoutePickerView()
87
+ let subviews = routePickerView.subviews
88
+ for subview in subviews {
89
+ if subview.isKind(of: UIButton.self) {
90
+ let button = subview as! UIButton
91
+ button.sendActions(for: .touchUpInside)
92
+ break
93
+ }
94
+ }
95
+ }
96
+ }
97
+
98
+ @objc(getAudioOutputsWithResolver:withRejecter:)
99
+ public func getAudioOutputs(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock){
100
+ resolve(["default", "force_speaker"])
101
+ }
102
+
103
+ @objc(selectAudioOutput:withResolver:withRejecter:)
104
+ public func selectAudioOutput(_ deviceId: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
105
+ let session = AVAudioSession.sharedInstance()
106
+ do {
107
+ if (deviceId == "default") {
108
+ try session.overrideOutputAudioPort(.none)
109
+ } else if (deviceId == "force_speaker") {
110
+ try session.overrideOutputAudioPort(.speaker)
111
+ }
112
+ } catch {
113
+ reject("selectAudioOutput error", error.localizedDescription, error)
114
+ return
115
+ }
116
+
117
+ resolve(nil)
118
+ }
119
+
120
+ @objc(setAppleAudioConfiguration:)
121
+ public func setAppleAudioConfiguration(_ configuration: NSDictionary) {
122
+ let session = RTCAudioSession.sharedInstance()
123
+ let config = RTCAudioSessionConfiguration.webRTC()
124
+
125
+ let appleAudioCategory = configuration["audioCategory"] as? String
126
+ let appleAudioCategoryOptions = configuration["audioCategoryOptions"] as? [String]
127
+ let appleAudioMode = configuration["audioMode"] as? String
128
+
129
+ session.lockForConfiguration()
130
+
131
+ var categoryChanged = false
132
+
133
+ if let appleAudioCategoryOptions = appleAudioCategoryOptions {
134
+ categoryChanged = true
135
+
136
+ var newOptions: AVAudioSession.CategoryOptions = []
137
+ for option in appleAudioCategoryOptions {
138
+ if option == "mixWithOthers" {
139
+ newOptions.insert(.mixWithOthers)
140
+ } else if option == "duckOthers" {
141
+ newOptions.insert(.duckOthers)
142
+ } else if option == "allowBluetooth" {
143
+ newOptions.insert(.allowBluetooth)
144
+ } else if option == "allowBluetoothA2DP" {
145
+ newOptions.insert(.allowBluetoothA2DP)
146
+ } else if option == "allowAirPlay" {
147
+ newOptions.insert(.allowAirPlay)
148
+ } else if option == "defaultToSpeaker" {
149
+ newOptions.insert(.defaultToSpeaker)
150
+ }
151
+ }
152
+ config.categoryOptions = newOptions
153
+ }
154
+
155
+ if let appleAudioCategory = appleAudioCategory {
156
+ categoryChanged = true
157
+ config.category = AudioUtils.audioSessionCategoryFromString(appleAudioCategory).rawValue
158
+ }
159
+
160
+ if categoryChanged {
161
+ do {
162
+ try session.setCategory(AVAudioSession.Category(rawValue: config.category), with: config.categoryOptions)
163
+ } catch {
164
+ NSLog("Error setting category: %@", error.localizedDescription)
165
+ }
166
+ }
167
+
168
+ if let appleAudioMode = appleAudioMode {
169
+ let mode = AudioUtils.audioSessionModeFromString(appleAudioMode)
170
+ config.mode = mode.rawValue
171
+ do {
172
+ try session.setMode(mode)
173
+ } catch {
174
+ NSLog("Error setting mode: %@", error.localizedDescription)
175
+ }
176
+ }
177
+
178
+ session.unlockForConfiguration()
179
+ }
180
+
181
+ @objc(createVolumeProcessor:trackId:)
182
+ public func createVolumeProcessor(_ pcId: NSNumber, trackId: String) -> String {
183
+ let renderer = VolumeAudioRenderer(intervalMs: 40.0, eventEmitter: self)
184
+ let reactTag = self.audioRendererManager.registerRenderer(renderer)
185
+ renderer.reactTag = reactTag
186
+ self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId)
187
+
188
+ return reactTag
189
+ }
190
+
191
+ @objc(deleteVolumeProcessor:pcId:trackId:)
192
+ public func deleteVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? {
193
+ self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId)
194
+ self.audioRendererManager.unregisterRenderer(forReactTag: reactTag)
195
+
196
+ return nil
197
+ }
198
+
199
+ @objc(createMultibandVolumeProcessor:pcId:trackId:)
200
+ public func createMultibandVolumeProcessor(_ options: NSDictionary, pcId: NSNumber, trackId: String) -> String {
201
+ let bands = (options["bands"] as? NSString)?.integerValue ?? 5
202
+ let minFrequency = (options["minFrequency"] as? NSString)?.floatValue ?? 1000
203
+ let maxFrequency = (options["maxFrequency"] as? NSString)?.floatValue ?? 8000
204
+ let intervalMs = (options["updateInterval"] as? NSString)?.floatValue ?? 40
205
+
206
+ let renderer = MultibandVolumeAudioRenderer(
207
+ bands: bands,
208
+ minFrequency: minFrequency,
209
+ maxFrequency: maxFrequency,
210
+ intervalMs: intervalMs,
211
+ eventEmitter: self
212
+ )
213
+ let reactTag = self.audioRendererManager.registerRenderer(renderer)
214
+ renderer.reactTag = reactTag
215
+ self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId)
216
+
217
+ return reactTag
218
+ }
219
+
220
+ @objc(deleteMultibandVolumeProcessor:pcId:trackId:)
221
+ public func deleteMultibandVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? {
222
+ self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId)
223
+ self.audioRendererManager.unregisterRenderer(forReactTag: reactTag)
224
+
225
+ return nil
226
+ }
227
+
228
+ override public func supportedEvents() -> [String]! {
229
+ return [
230
+ LKEvents.kEventVolumeProcessed,
231
+ LKEvents.kEventMultibandProcessed,
232
+ ]
233
+ }
234
+ }
@@ -1,2 +1,5 @@
1
- #import <React/RCTBridgeModule.h>
2
- #import <React/RCTViewManager.h>
1
+ #import "RCTBridgeModule.h"
2
+ #import "RCTEventEmitter.h"
3
+ #import "RCTViewManager.h"
4
+ #import "WebRTCModule.h"
5
+ #import "WebRTCModule+RTCMediaStream.h"
@@ -2,13 +2,9 @@
2
2
  // LivekitReactNative.h
3
3
  // LivekitReactNative
4
4
  //
5
- // Created by David Liu on 9/4/22.
6
- // Copyright © 2022 LiveKit. All rights reserved.
5
+ // Copyright © 2022-2025 LiveKit. All rights reserved.
7
6
  //
8
- #import <React/RCTBridgeModule.h>
9
-
10
- @interface LivekitReactNative : NSObject <RCTBridgeModule>
11
7
 
8
+ @interface LivekitReactNative : NSObject
12
9
  +(void)setup;
13
-
14
10
  @end
@@ -1,179 +1,16 @@
1
- #import "AudioUtils.h"
2
- #import "LivekitReactNative.h"
3
1
  #import "WebRTCModule.h"
4
2
  #import "WebRTCModuleOptions.h"
5
- #import <WebRTC/RTCAudioSession.h>
6
- #import <WebRTC/RTCAudioSessionConfiguration.h>
7
- #import <AVFAudio/AVFAudio.h>
8
- #import <AVKit/AVKit.h>
3
+ #import "LivekitReactNative.h"
4
+ #import "LKAudioProcessingManager.h"
9
5
 
10
6
  @implementation LivekitReactNative
11
- RCT_EXPORT_MODULE();
12
-
13
- -(instancetype)init {
14
- if(self = [super init]) {
15
-
16
- RTCAudioSessionConfiguration* config = [[RTCAudioSessionConfiguration alloc] init];
17
- [config setCategory:AVAudioSessionCategoryPlayAndRecord];
18
- [config setCategoryOptions:
19
- AVAudioSessionCategoryOptionAllowAirPlay|
20
- AVAudioSessionCategoryOptionAllowBluetooth|
21
- AVAudioSessionCategoryOptionAllowBluetoothA2DP|
22
- AVAudioSessionCategoryOptionDefaultToSpeaker
23
- ];
24
- [config setMode:AVAudioSessionModeVideoChat];
25
- [RTCAudioSessionConfiguration setWebRTCConfiguration: config];
26
- return self;
27
- } else {
28
- return nil;
29
- }
30
- }
31
-
32
- +(BOOL)requiresMainQueueSetup {
33
- return NO;
34
- }
35
7
 
36
8
  +(void)setup {
37
9
  RTCDefaultVideoEncoderFactory *videoEncoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init];
38
10
  RTCVideoEncoderFactorySimulcast *simulcastVideoEncoderFactory = [[RTCVideoEncoderFactorySimulcast alloc] initWithPrimary:videoEncoderFactory fallback:videoEncoderFactory];
39
11
  WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
40
12
  options.videoEncoderFactory = simulcastVideoEncoderFactory;
13
+ options.audioProcessingModule = LKAudioProcessingManager.sharedInstance.audioProcessingModule;
41
14
  }
42
15
 
43
- /// Configure default audio config for WebRTC
44
- RCT_EXPORT_METHOD(configureAudio:(NSDictionary *) config){
45
- NSDictionary *iOSConfig = [config objectForKey:@"ios"];
46
- if(iOSConfig == nil) {
47
- return;
48
- }
49
-
50
- NSString * defaultOutput = [iOSConfig objectForKey:@"defaultOutput"];
51
- if (defaultOutput == nil) {
52
- defaultOutput = @"speaker";
53
- }
54
-
55
- RTCAudioSessionConfiguration* rtcConfig = [[RTCAudioSessionConfiguration alloc] init];
56
- [rtcConfig setCategory:AVAudioSessionCategoryPlayAndRecord];
57
-
58
- if([defaultOutput isEqualToString:@"earpiece"]){
59
- [rtcConfig setCategoryOptions:
60
- AVAudioSessionCategoryOptionAllowAirPlay|
61
- AVAudioSessionCategoryOptionAllowBluetooth|
62
- AVAudioSessionCategoryOptionAllowBluetoothA2DP];
63
- [rtcConfig setMode:AVAudioSessionModeVoiceChat];
64
- } else {
65
- [rtcConfig setCategoryOptions:
66
- AVAudioSessionCategoryOptionAllowAirPlay|
67
- AVAudioSessionCategoryOptionAllowBluetooth|
68
- AVAudioSessionCategoryOptionAllowBluetoothA2DP|
69
- AVAudioSessionCategoryOptionDefaultToSpeaker];
70
- [rtcConfig setMode:AVAudioSessionModeVideoChat];
71
- }
72
- [RTCAudioSessionConfiguration setWebRTCConfiguration: rtcConfig];
73
- }
74
-
75
- RCT_EXPORT_METHOD(startAudioSession){
76
- }
77
-
78
- RCT_EXPORT_METHOD(stopAudioSession){
79
-
80
- }
81
-
82
- RCT_EXPORT_METHOD(showAudioRoutePicker){
83
- if (@available(iOS 11.0, *)) {
84
- AVRoutePickerView *routePickerView = [[AVRoutePickerView alloc] init];
85
- NSArray<UIView *> *subviews = routePickerView.subviews;
86
- for (int i = 0; i < subviews.count; i++) {
87
- UIView *subview = [subviews objectAtIndex:i];
88
- if([subview isKindOfClass:[UIButton class]]) {
89
- UIButton *button = (UIButton *) subview;
90
- [button sendActionsForControlEvents:UIControlEventTouchUpInside];
91
- break;
92
- }
93
- }
94
- }
95
- }
96
-
97
- RCT_EXPORT_METHOD(getAudioOutputsWithResolver:(RCTPromiseResolveBlock)resolve
98
- withRejecter:(RCTPromiseRejectBlock)reject){
99
- resolve(@[@"default", @"force_speaker"]);
100
- }
101
- RCT_EXPORT_METHOD(selectAudioOutput:(NSString *)deviceId
102
- withResolver:(RCTPromiseResolveBlock)resolve
103
- withRejecter:(RCTPromiseRejectBlock)reject){
104
-
105
- AVAudioSession *session = [AVAudioSession sharedInstance];
106
- NSError *error = nil;
107
-
108
- if ([deviceId isEqualToString:@"default"]) {
109
- [session overrideOutputAudioPort:AVAudioSessionPortOverrideNone error:&error];
110
- } else if ([deviceId isEqualToString:@"force_speaker"]) {
111
- [session overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:&error];
112
- }
113
-
114
- if (error != nil) {
115
- reject(@"selectAudioOutput error", error.localizedDescription, error);
116
- } else {
117
- resolve(nil);
118
- }
119
- }
120
-
121
-
122
- /// Configure audio config for WebRTC
123
- RCT_EXPORT_METHOD(setAppleAudioConfiguration:(NSDictionary *) configuration){
124
- RTCAudioSession* session = [RTCAudioSession sharedInstance];
125
- RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
126
-
127
- NSString* appleAudioCategory = configuration[@"audioCategory"];
128
- NSArray* appleAudioCategoryOptions = configuration[@"audioCategoryOptions"];
129
- NSString* appleAudioMode = configuration[@"audioMode"];
130
-
131
- [session lockForConfiguration];
132
-
133
- NSError* error = nil;
134
- BOOL categoryChanged = NO;
135
- if(appleAudioCategoryOptions != nil) {
136
- categoryChanged = YES;
137
- config.categoryOptions = 0;
138
- for(NSString* option in appleAudioCategoryOptions) {
139
- if([@"mixWithOthers" isEqualToString:option]) {
140
- config.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers;
141
- } else if([@"duckOthers" isEqualToString:option]) {
142
- config.categoryOptions |= AVAudioSessionCategoryOptionDuckOthers;
143
- } else if([@"allowBluetooth" isEqualToString:option]) {
144
- config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetooth;
145
- } else if([@"allowBluetoothA2DP" isEqualToString:option]) {
146
- config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetoothA2DP;
147
- } else if([@"allowAirPlay" isEqualToString:option]) {
148
- config.categoryOptions |= AVAudioSessionCategoryOptionAllowAirPlay;
149
- } else if([@"defaultToSpeaker" isEqualToString:option]) {
150
- config.categoryOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker;
151
- }
152
- }
153
- }
154
-
155
- if(appleAudioCategory != nil) {
156
- categoryChanged = YES;
157
- config.category = [AudioUtils audioSessionCategoryFromString:appleAudioCategory];
158
- }
159
-
160
- if(categoryChanged) {
161
- [session setCategory:config.category withOptions:config.categoryOptions error:&error];
162
- if(error != nil) {
163
- NSLog(@"Error setting category: %@", [error localizedDescription]);
164
- error = nil;
165
- }
166
- }
167
-
168
- if(appleAudioMode != nil) {
169
- config.mode = [AudioUtils audioSessionModeFromString:appleAudioMode];
170
- [session setMode:config.mode error:&error];
171
- if(error != nil) {
172
- NSLog(@"Error setting category: %@", [error localizedDescription]);
173
- error = nil;
174
- }
175
- }
176
-
177
- [session unlockForConfiguration];
178
- }
179
16
  @end
@@ -0,0 +1,42 @@
1
+ #import <React/RCTBridgeModule.h>
2
+ #import <React/RCTEventEmitter.h>
3
+ #import "WebRTCModule.h"
4
+
5
+ @interface RCT_EXTERN_MODULE(LivekitReactNativeModule, RCTEventEmitter)
6
+
7
+ RCT_EXTERN_METHOD(configureAudio:(NSDictionary *) config)
8
+ RCT_EXTERN_METHOD(startAudioSession)
9
+ RCT_EXTERN_METHOD(stopAudioSession)
10
+
11
+ RCT_EXTERN_METHOD(showAudioRoutePicker)
12
+ RCT_EXTERN_METHOD(getAudioOutputsWithResolver:(RCTPromiseResolveBlock)resolve
13
+ withRejecter:(RCTPromiseRejectBlock)reject)
14
+ RCT_EXTERN_METHOD(selectAudioOutput:(NSString *)deviceId
15
+ withResolver:(RCTPromiseResolveBlock)resolve
16
+ withRejecter:(RCTPromiseRejectBlock)reject)
17
+
18
+
19
+ /// Configure audio config for WebRTC
20
+ RCT_EXTERN_METHOD(setAppleAudioConfiguration:(NSDictionary *) configuration)
21
+
22
+
23
+ RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(createVolumeProcessor:(nonnull NSNumber *)pcId
24
+ trackId:(nonnull NSString *)trackId)
25
+
26
+ RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(deleteVolumeProcessor:(nonnull NSString *)reactTag
27
+ pcId:(nonnull NSNumber *)pcId
28
+ trackId:(nonnull NSString *)trackId)
29
+
30
+ RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(createMultibandVolumeProcessor:(NSDictionary *)options
31
+ pcId:(nonnull NSNumber *)pcId
32
+ trackId:(nonnull NSString *)trackId)
33
+
34
+ RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(deleteMultibandVolumeProcessor:(nonnull NSString *)reactTag
35
+ pcId:(nonnull NSNumber *)pcId
36
+ trackId:(nonnull NSString *)trackId)
37
+
38
+ +(BOOL)requiresMainQueueSetup {
39
+ return NO;
40
+ }
41
+
42
+ @end
@@ -0,0 +1,4 @@
1
+ public func lklog(_ object: Any, functionName: String = #function, fileName: String = #file, lineNumber: Int = #line) {
2
+ let className = (fileName as NSString).lastPathComponent
3
+ print("\(className).\(functionName):\(lineNumber) : \(object)\n")
4
+ }
@@ -0,0 +1,136 @@
1
+ /*
2
+ * Copyright 2025 LiveKit
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import Accelerate
18
+ import AVFoundation
19
+
20
+ public extension AVAudioPCMBuffer {
21
+ func resample(toSampleRate targetSampleRate: Double) -> AVAudioPCMBuffer? {
22
+ let sourceFormat = format
23
+
24
+ if sourceFormat.sampleRate == targetSampleRate {
25
+ // Already targetSampleRate.
26
+ return self
27
+ }
28
+
29
+ // Define the source format (from the input buffer) and the target format.
30
+ guard let targetFormat = AVAudioFormat(commonFormat: sourceFormat.commonFormat,
31
+ sampleRate: targetSampleRate,
32
+ channels: sourceFormat.channelCount,
33
+ interleaved: sourceFormat.isInterleaved)
34
+ else {
35
+ print("Failed to create target format.")
36
+ return nil
37
+ }
38
+
39
+ guard let converter = AVAudioConverter(from: sourceFormat, to: targetFormat) else {
40
+ print("Failed to create audio converter.")
41
+ return nil
42
+ }
43
+
44
+ let capacity = targetFormat.sampleRate * Double(frameLength) / sourceFormat.sampleRate
45
+
46
+ guard let convertedBuffer = AVAudioPCMBuffer(pcmFormat: targetFormat, frameCapacity: AVAudioFrameCount(capacity)) else {
47
+ print("Failed to create converted buffer.")
48
+ return nil
49
+ }
50
+
51
+ var isDone = false
52
+ let inputBlock: AVAudioConverterInputBlock = { _, outStatus in
53
+ if isDone {
54
+ outStatus.pointee = .noDataNow
55
+ return nil
56
+ }
57
+ outStatus.pointee = .haveData
58
+ isDone = true
59
+ return self
60
+ }
61
+
62
+ var error: NSError?
63
+ let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputBlock)
64
+
65
+ if status == .error {
66
+ print("Conversion failed: \(error?.localizedDescription ?? "Unknown error")")
67
+ return nil
68
+ }
69
+
70
+ // Adjust frame length to the actual amount of data written
71
+ convertedBuffer.frameLength = convertedBuffer.frameCapacity
72
+
73
+ return convertedBuffer
74
+ }
75
+
76
+ /// Convert PCM buffer to specified common format.
77
+ /// Currently supports conversion from Int16 to Float32.
78
+ func convert(toCommonFormat commonFormat: AVAudioCommonFormat) -> AVAudioPCMBuffer? {
79
+ // Check if conversion is needed
80
+ guard format.commonFormat != commonFormat else {
81
+ return self
82
+ }
83
+
84
+ // Check if the conversion is supported
85
+ guard format.commonFormat == .pcmFormatInt16, commonFormat == .pcmFormatFloat32 else {
86
+ print("Unsupported conversion: only Int16 to Float32 is supported")
87
+ return nil
88
+ }
89
+
90
+ // Create output format
91
+ guard let outputFormat = AVAudioFormat(commonFormat: commonFormat,
92
+ sampleRate: format.sampleRate,
93
+ channels: format.channelCount,
94
+ interleaved: false)
95
+ else {
96
+ print("Failed to create output audio format")
97
+ return nil
98
+ }
99
+
100
+ // Create output buffer
101
+ guard let outputBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat,
102
+ frameCapacity: frameCapacity)
103
+ else {
104
+ print("Failed to create output PCM buffer")
105
+ return nil
106
+ }
107
+
108
+ outputBuffer.frameLength = frameLength
109
+
110
+ let channelCount = Int(format.channelCount)
111
+ let frameCount = Int(frameLength)
112
+
113
+ // Ensure the source buffer has Int16 data
114
+ guard let int16Data = int16ChannelData else {
115
+ print("Source buffer doesn't contain Int16 data")
116
+ return nil
117
+ }
118
+
119
+ // Ensure the output buffer has Float32 data
120
+ guard let floatData = outputBuffer.floatChannelData else {
121
+ print("Failed to get float channel data from output buffer")
122
+ return nil
123
+ }
124
+
125
+ // Convert Int16 to Float32 and normalize to [-1.0, 1.0]
126
+ let scale = Float(Int16.max)
127
+ var scalar = 1.0 / scale
128
+
129
+ for channel in 0 ..< channelCount {
130
+ vDSP_vflt16(int16Data[channel], 1, floatData[channel], 1, vDSP_Length(frameCount))
131
+ vDSP_vsmul(floatData[channel], 1, &scalar, floatData[channel], 1, vDSP_Length(frameCount))
132
+ }
133
+
134
+ return outputBuffer
135
+ }
136
+ }