@livekit/react-native 2.5.1 → 2.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -3
- package/android/build.gradle +2 -1
- package/android/src/main/java/com/livekit/reactnative/LiveKitReactNative.kt +61 -5
- package/android/src/main/java/com/livekit/reactnative/LivekitReactNativeModule.kt +81 -4
- package/android/src/main/java/com/livekit/reactnative/audio/events/Events.kt +6 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioFormat.kt +2 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioProcessingController.kt +27 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioProcessorInterface.kt +52 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioRecordSamplesDispatcher.kt +72 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/AudioSinkManager.kt +75 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/CustomAudioProcessingFactory.kt +78 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/MultibandVolumeProcessor.kt +181 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/VolumeProcessor.kt +67 -0
- package/android/src/main/java/com/livekit/reactnative/audio/processing/fft/FFTAudioAnalyzer.kt +224 -0
- package/ios/AudioUtils.swift +49 -0
- package/ios/LKAudioProcessingAdapter.h +26 -0
- package/ios/LKAudioProcessingAdapter.m +117 -0
- package/ios/LKAudioProcessingManager.h +34 -0
- package/ios/LKAudioProcessingManager.m +63 -0
- package/ios/LiveKitReactNativeModule.swift +234 -0
- package/ios/LivekitReactNative-Bridging-Header.h +5 -2
- package/ios/LivekitReactNative.h +2 -6
- package/ios/LivekitReactNative.m +3 -166
- package/ios/LivekitReactNativeModule.m +42 -0
- package/ios/Logging.swift +4 -0
- package/ios/audio/AVAudioPCMBuffer.swift +136 -0
- package/ios/audio/AudioProcessing.swift +163 -0
- package/ios/audio/AudioRendererManager.swift +71 -0
- package/ios/audio/FFTProcessor.swift +147 -0
- package/ios/audio/MultibandVolumeAudioRenderer.swift +67 -0
- package/ios/audio/RingBuffer.swift +51 -0
- package/ios/audio/VolumeAudioRenderer.swift +50 -0
- package/lib/commonjs/LKNativeModule.js +18 -0
- package/lib/commonjs/LKNativeModule.js.map +1 -0
- package/lib/commonjs/audio/AudioSession.js +9 -17
- package/lib/commonjs/audio/AudioSession.js.map +1 -1
- package/lib/commonjs/components/BarVisualizer.js +192 -0
- package/lib/commonjs/components/BarVisualizer.js.map +1 -0
- package/lib/commonjs/events/EventEmitter.js +45 -0
- package/lib/commonjs/events/EventEmitter.js.map +1 -0
- package/lib/commonjs/hooks/useMultibandTrackVolume.js +64 -0
- package/lib/commonjs/hooks/useMultibandTrackVolume.js.map +1 -0
- package/lib/commonjs/hooks/useTrackVolume.js +45 -0
- package/lib/commonjs/hooks/useTrackVolume.js.map +1 -0
- package/lib/commonjs/hooks.js +24 -0
- package/lib/commonjs/hooks.js.map +1 -1
- package/lib/commonjs/index.js +14 -0
- package/lib/commonjs/index.js.map +1 -1
- package/lib/module/LKNativeModule.js +12 -0
- package/lib/module/LKNativeModule.js.map +1 -0
- package/lib/module/audio/AudioSession.js +9 -17
- package/lib/module/audio/AudioSession.js.map +1 -1
- package/lib/module/components/BarVisualizer.js +182 -0
- package/lib/module/components/BarVisualizer.js.map +1 -0
- package/lib/module/events/EventEmitter.js +36 -0
- package/lib/module/events/EventEmitter.js.map +1 -0
- package/lib/module/hooks/useMultibandTrackVolume.js +58 -0
- package/lib/module/hooks/useMultibandTrackVolume.js.map +1 -0
- package/lib/module/hooks/useTrackVolume.js +39 -0
- package/lib/module/hooks/useTrackVolume.js.map +1 -0
- package/lib/module/hooks.js +2 -0
- package/lib/module/hooks.js.map +1 -1
- package/lib/module/index.js +3 -0
- package/lib/module/index.js.map +1 -1
- package/lib/typescript/lib/commonjs/LKNativeModule.d.ts +3 -0
- package/lib/typescript/lib/commonjs/components/BarVisualizer.d.ts +32 -0
- package/lib/typescript/lib/commonjs/events/EventEmitter.d.ts +4 -0
- package/lib/typescript/lib/commonjs/hooks/useMultibandTrackVolume.d.ts +8 -0
- package/lib/typescript/lib/commonjs/hooks/useTrackVolume.d.ts +8 -0
- package/lib/typescript/lib/module/LKNativeModule.d.ts +2 -0
- package/lib/typescript/lib/module/components/BarVisualizer.d.ts +10 -0
- package/lib/typescript/lib/module/events/EventEmitter.d.ts +3 -0
- package/lib/typescript/lib/module/hooks/useMultibandTrackVolume.d.ts +7 -0
- package/lib/typescript/lib/module/hooks/useTrackVolume.d.ts +7 -0
- package/lib/typescript/lib/module/hooks.d.ts +2 -0
- package/lib/typescript/lib/module/index.d.ts +1 -0
- package/lib/typescript/src/LKNativeModule.d.ts +2 -0
- package/lib/typescript/src/components/BarVisualizer.d.ts +49 -0
- package/lib/typescript/src/events/EventEmitter.d.ts +6 -0
- package/lib/typescript/src/hooks/useMultibandTrackVolume.d.ts +31 -0
- package/lib/typescript/src/hooks/useTrackVolume.d.ts +9 -0
- package/lib/typescript/src/hooks.d.ts +2 -0
- package/lib/typescript/src/index.d.ts +1 -0
- package/livekit-react-native.podspec +26 -6
- package/package.json +5 -5
- package/src/LKNativeModule.ts +19 -0
- package/src/audio/AudioSession.ts +9 -24
- package/src/components/BarVisualizer.tsx +252 -0
- package/src/events/EventEmitter.ts +51 -0
- package/src/hooks/useMultibandTrackVolume.ts +97 -0
- package/src/hooks/useTrackVolume.ts +62 -0
- package/src/hooks.ts +2 -0
- package/src/index.tsx +3 -0
- package/ios/AudioUtils.h +0 -9
- package/ios/AudioUtils.m +0 -48
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Copyright 2025 LiveKit
|
|
3
|
+
*
|
|
4
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
* you may not use this file except in compliance with the License.
|
|
6
|
+
* You may obtain a copy of the License at
|
|
7
|
+
*
|
|
8
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
*
|
|
10
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
* See the License for the specific language governing permissions and
|
|
14
|
+
* limitations under the License.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import Accelerate
|
|
18
|
+
import AVFoundation
|
|
19
|
+
import Foundation
|
|
20
|
+
import livekit_react_native_webrtc
|
|
21
|
+
|
|
22
|
+
public struct AudioLevel {
|
|
23
|
+
/// Linear Scale RMS Value
|
|
24
|
+
public let average: Float
|
|
25
|
+
public let peak: Float
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
public extension RTCAudioBuffer {
|
|
29
|
+
/// Convert to AVAudioPCMBuffer Int16 format.
|
|
30
|
+
@objc
|
|
31
|
+
func toAVAudioPCMBuffer() -> AVAudioPCMBuffer? {
|
|
32
|
+
guard let audioFormat = AVAudioFormat(commonFormat: .pcmFormatInt16,
|
|
33
|
+
sampleRate: Double(frames * 100),
|
|
34
|
+
channels: AVAudioChannelCount(channels),
|
|
35
|
+
interleaved: false),
|
|
36
|
+
let pcmBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat,
|
|
37
|
+
frameCapacity: AVAudioFrameCount(frames))
|
|
38
|
+
else { return nil }
|
|
39
|
+
|
|
40
|
+
pcmBuffer.frameLength = AVAudioFrameCount(frames)
|
|
41
|
+
|
|
42
|
+
guard let targetBufferPointer = pcmBuffer.int16ChannelData else { return nil }
|
|
43
|
+
|
|
44
|
+
for i in 0 ..< channels {
|
|
45
|
+
let sourceBuffer = rawBuffer(forChannel: i)
|
|
46
|
+
let targetBuffer = targetBufferPointer[i]
|
|
47
|
+
// sourceBuffer is in the format of [Int16] but is stored in 32-bit alignment, we need to pack the Int16 data correctly.
|
|
48
|
+
|
|
49
|
+
for frame in 0 ..< frames {
|
|
50
|
+
// Cast and pack the source 32-bit Int16 data into the target 16-bit buffer
|
|
51
|
+
let clampedValue = max(Float(Int16.min), min(Float(Int16.max), sourceBuffer[frame]))
|
|
52
|
+
targetBuffer[frame] = Int16(clampedValue)
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return pcmBuffer
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
public extension AVAudioPCMBuffer {
|
|
61
|
+
/// Computes Peak and Linear Scale RMS Value (Average) for all channels.
|
|
62
|
+
func audioLevels() -> [AudioLevel] {
|
|
63
|
+
var result: [AudioLevel] = []
|
|
64
|
+
guard let data = floatChannelData else {
|
|
65
|
+
// Not containing float data
|
|
66
|
+
return result
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
for i in 0 ..< Int(format.channelCount) {
|
|
70
|
+
let channelData = data[i]
|
|
71
|
+
var max: Float = 0.0
|
|
72
|
+
vDSP_maxv(channelData, stride, &max, vDSP_Length(frameLength))
|
|
73
|
+
var rms: Float = 0.0
|
|
74
|
+
vDSP_rmsqv(channelData, stride, &rms, vDSP_Length(frameLength))
|
|
75
|
+
|
|
76
|
+
// No conversion to dB, return linear scale values directly
|
|
77
|
+
result.append(AudioLevel(average: rms, peak: max))
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return result
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
public extension Sequence where Iterator.Element == AudioLevel {
|
|
85
|
+
/// Combines all elements into a single audio level by computing the average value of all elements.
|
|
86
|
+
func combine() -> AudioLevel? {
|
|
87
|
+
var count = 0
|
|
88
|
+
let totalSums: (averageSum: Float, peakSum: Float) = reduce((averageSum: 0.0, peakSum: 0.0)) { totals, audioLevel in
|
|
89
|
+
count += 1
|
|
90
|
+
return (totals.averageSum + audioLevel.average,
|
|
91
|
+
totals.peakSum + audioLevel.peak)
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
guard count > 0 else { return nil }
|
|
95
|
+
|
|
96
|
+
return AudioLevel(average: totalSums.averageSum / Float(count),
|
|
97
|
+
peak: totalSums.peakSum / Float(count))
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
public class AudioVisualizeProcessor {
|
|
102
|
+
static let bufferSize = 1024
|
|
103
|
+
|
|
104
|
+
// MARK: - Public
|
|
105
|
+
|
|
106
|
+
public let minFrequency: Float
|
|
107
|
+
public let maxFrequency: Float
|
|
108
|
+
public let minDB: Float
|
|
109
|
+
public let maxDB: Float
|
|
110
|
+
public let bandsCount: Int
|
|
111
|
+
|
|
112
|
+
private var bands: [Float]?
|
|
113
|
+
|
|
114
|
+
// MARK: - Private
|
|
115
|
+
|
|
116
|
+
private let ringBuffer = RingBuffer<Float>(size: AudioVisualizeProcessor.bufferSize)
|
|
117
|
+
private let processor: FFTProcessor
|
|
118
|
+
|
|
119
|
+
public init(minFrequency: Float = 10,
|
|
120
|
+
maxFrequency: Float = 8000,
|
|
121
|
+
minDB: Float = -32.0,
|
|
122
|
+
maxDB: Float = 32.0,
|
|
123
|
+
bandsCount: Int = 100)
|
|
124
|
+
{
|
|
125
|
+
self.minFrequency = minFrequency
|
|
126
|
+
self.maxFrequency = maxFrequency
|
|
127
|
+
self.minDB = minDB
|
|
128
|
+
self.maxDB = maxDB
|
|
129
|
+
self.bandsCount = bandsCount
|
|
130
|
+
|
|
131
|
+
processor = FFTProcessor(bufferSize: Self.bufferSize)
|
|
132
|
+
bands = [Float](repeating: 0.0, count: bandsCount)
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
public func process(pcmBuffer: AVAudioPCMBuffer) -> [Float]? {
|
|
136
|
+
guard let pcmBuffer = pcmBuffer.convert(toCommonFormat: .pcmFormatFloat32) else { return nil }
|
|
137
|
+
guard let floatChannelData = pcmBuffer.floatChannelData else { return nil }
|
|
138
|
+
|
|
139
|
+
// Get the float array.
|
|
140
|
+
let floats = Array(UnsafeBufferPointer(start: floatChannelData[0], count: Int(pcmBuffer.frameLength)))
|
|
141
|
+
ringBuffer.write(floats)
|
|
142
|
+
|
|
143
|
+
// Get full-size buffer if available, otherwise return
|
|
144
|
+
guard let buffer = ringBuffer.read() else { return nil }
|
|
145
|
+
|
|
146
|
+
// Process FFT and compute frequency bands
|
|
147
|
+
let fftRes = processor.process(buffer: buffer)
|
|
148
|
+
let bands = fftRes.computeBands(
|
|
149
|
+
minFrequency: minFrequency,
|
|
150
|
+
maxFrequency: maxFrequency,
|
|
151
|
+
bandsCount: bandsCount,
|
|
152
|
+
sampleRate: Float(pcmBuffer.format.sampleRate)
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
let headroom = maxDB - minDB
|
|
156
|
+
|
|
157
|
+
// Normalize magnitudes (already in decibels)
|
|
158
|
+
return bands.magnitudes.map { magnitude in
|
|
159
|
+
let adjustedMagnitude = max(0, magnitude + abs(minDB))
|
|
160
|
+
return min(1.0, adjustedMagnitude / headroom)
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import livekit_react_native_webrtc
|
|
2
|
+
|
|
3
|
+
@objc
|
|
4
|
+
public class AudioRendererManager: NSObject {
|
|
5
|
+
private let bridge: RCTBridge
|
|
6
|
+
public private(set) var renderers: [String: RTCAudioRenderer] = [:]
|
|
7
|
+
|
|
8
|
+
init(bridge: RCTBridge) {
|
|
9
|
+
self.bridge = bridge
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
@objc
|
|
13
|
+
public func registerRenderer(_ audioRenderer: RTCAudioRenderer) -> String {
|
|
14
|
+
let reactTag = NSUUID().uuidString
|
|
15
|
+
self.renderers[reactTag] = audioRenderer
|
|
16
|
+
return reactTag
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
@objc
|
|
20
|
+
public func unregisterRenderer(forReactTag: String) {
|
|
21
|
+
self.renderers.removeValue(forKey: forReactTag)
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
@objc
|
|
25
|
+
public func unregisterRenderer(_ audioRenderer: RTCAudioRenderer) {
|
|
26
|
+
self.renderers = self.renderers.filter({ $0.value !== audioRenderer })
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
@objc
|
|
30
|
+
public func attach(renderer: RTCAudioRenderer, pcId: NSNumber, trackId: String) {
|
|
31
|
+
let webrtcModule = self.bridge.module(for: WebRTCModule.self) as! WebRTCModule
|
|
32
|
+
guard let track = webrtcModule.track(forId: trackId, pcId: pcId) as? RTCAudioTrack
|
|
33
|
+
else {
|
|
34
|
+
lklog("couldn't find audio track: pcId: \(pcId), trackId: \(trackId)")
|
|
35
|
+
return
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (pcId == -1) {
|
|
39
|
+
LKAudioProcessingManager.sharedInstance().addLocalAudioRenderer(renderer);
|
|
40
|
+
} else {
|
|
41
|
+
track.add(renderer)
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
@objc
|
|
46
|
+
public func detach(rendererByTag reactTag:String, pcId: NSNumber, trackId: String){
|
|
47
|
+
guard let renderer = self.renderers[reactTag]
|
|
48
|
+
else {
|
|
49
|
+
lklog("couldn't find renderer: tag: \(reactTag)")
|
|
50
|
+
return
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
detach(renderer: renderer, pcId: pcId, trackId: trackId)
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
@objc
|
|
57
|
+
public func detach(renderer: RTCAudioRenderer, pcId: NSNumber, trackId: String) {
|
|
58
|
+
let webrtcModule = self.bridge.module(for: WebRTCModule.self) as! WebRTCModule
|
|
59
|
+
guard let track = webrtcModule.track(forId: trackId, pcId: pcId) as? RTCAudioTrack
|
|
60
|
+
else {
|
|
61
|
+
lklog("couldn't find audio track: pcId: \(pcId), trackId: \(trackId)")
|
|
62
|
+
return
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (pcId == -1) {
|
|
66
|
+
LKAudioProcessingManager.sharedInstance().removeLocalAudioRenderer(renderer);
|
|
67
|
+
} else {
|
|
68
|
+
track.remove(renderer)
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Copyright 2025 LiveKit
|
|
3
|
+
*
|
|
4
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
* you may not use this file except in compliance with the License.
|
|
6
|
+
* You may obtain a copy of the License at
|
|
7
|
+
*
|
|
8
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
*
|
|
10
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
* See the License for the specific language governing permissions and
|
|
14
|
+
* limitations under the License.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import Accelerate
|
|
18
|
+
import AVFoundation
|
|
19
|
+
|
|
20
|
+
extension Float {
|
|
21
|
+
var nyquistFrequency: Float { self / 2.0 }
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
public struct FFTComputeBandsResult {
|
|
25
|
+
let count: Int
|
|
26
|
+
let magnitudes: [Float]
|
|
27
|
+
let frequencies: [Float]
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
public class FFTResult {
|
|
31
|
+
public let magnitudes: [Float]
|
|
32
|
+
|
|
33
|
+
init(magnitudes: [Float]) {
|
|
34
|
+
self.magnitudes = magnitudes
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
func computeBands(minFrequency: Float, maxFrequency: Float, bandsCount: Int, sampleRate: Float) -> FFTComputeBandsResult {
|
|
38
|
+
let actualMaxFrequency = min(sampleRate.nyquistFrequency, maxFrequency)
|
|
39
|
+
var bandMagnitudes = [Float](repeating: 0.0, count: bandsCount)
|
|
40
|
+
var bandFrequencies = [Float](repeating: 0.0, count: bandsCount)
|
|
41
|
+
|
|
42
|
+
let magLowerRange = _magnitudeIndex(for: minFrequency, sampleRate: sampleRate)
|
|
43
|
+
let magUpperRange = _magnitudeIndex(for: actualMaxFrequency, sampleRate: sampleRate)
|
|
44
|
+
let ratio = Float(magUpperRange - magLowerRange) / Float(bandsCount)
|
|
45
|
+
|
|
46
|
+
return magnitudes.withUnsafeBufferPointer { magnitudesPtr in
|
|
47
|
+
for i in 0 ..< bandsCount {
|
|
48
|
+
let magsStartIdx = vDSP_Length(floorf(Float(i) * ratio)) + magLowerRange
|
|
49
|
+
let magsEndIdx = vDSP_Length(floorf(Float(i + 1) * ratio)) + magLowerRange
|
|
50
|
+
|
|
51
|
+
let count = magsEndIdx - magsStartIdx
|
|
52
|
+
if count > 0 {
|
|
53
|
+
var sum: Float = 0
|
|
54
|
+
vDSP_sve(magnitudesPtr.baseAddress! + Int(magsStartIdx), 1, &sum, count)
|
|
55
|
+
bandMagnitudes[i] = sum / Float(count)
|
|
56
|
+
} else {
|
|
57
|
+
bandMagnitudes[i] = magnitudes[Int(magsStartIdx)]
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Compute average frequency
|
|
61
|
+
let bandwidth = sampleRate.nyquistFrequency / Float(magnitudes.count)
|
|
62
|
+
bandFrequencies[i] = (bandwidth * Float(magsStartIdx) + bandwidth * Float(magsEndIdx)) / 2
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return FFTComputeBandsResult(count: bandsCount, magnitudes: bandMagnitudes, frequencies: bandFrequencies)
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
@inline(__always) private func _magnitudeIndex(for frequency: Float, sampleRate: Float) -> vDSP_Length {
|
|
70
|
+
vDSP_Length(Float(magnitudes.count) * frequency / sampleRate.nyquistFrequency)
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
class FFTProcessor {
|
|
75
|
+
public enum WindowType {
|
|
76
|
+
case none
|
|
77
|
+
case hanning
|
|
78
|
+
case hamming
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
public let bufferSize: vDSP_Length
|
|
82
|
+
public let windowType: WindowType
|
|
83
|
+
|
|
84
|
+
private let bufferHalfSize: vDSP_Length
|
|
85
|
+
private let bufferLog2Size: vDSP_Length
|
|
86
|
+
private var window: [Float] = []
|
|
87
|
+
private var fftSetup: FFTSetup
|
|
88
|
+
private var realBuffer: [Float]
|
|
89
|
+
private var imaginaryBuffer: [Float]
|
|
90
|
+
private var zeroDBReference: Float = 1.0
|
|
91
|
+
|
|
92
|
+
init(bufferSize: Int, windowType: WindowType = .hanning) {
|
|
93
|
+
self.bufferSize = vDSP_Length(bufferSize)
|
|
94
|
+
self.windowType = windowType
|
|
95
|
+
|
|
96
|
+
bufferHalfSize = vDSP_Length(bufferSize / 2)
|
|
97
|
+
bufferLog2Size = vDSP_Length(log2f(Float(bufferSize)))
|
|
98
|
+
|
|
99
|
+
realBuffer = [Float](repeating: 0.0, count: Int(bufferHalfSize))
|
|
100
|
+
imaginaryBuffer = [Float](repeating: 0.0, count: Int(bufferHalfSize))
|
|
101
|
+
window = [Float](repeating: 1.0, count: Int(bufferSize))
|
|
102
|
+
|
|
103
|
+
fftSetup = vDSP_create_fftsetup(UInt(bufferLog2Size), FFTRadix(FFT_RADIX2))!
|
|
104
|
+
|
|
105
|
+
switch windowType {
|
|
106
|
+
case .none:
|
|
107
|
+
break
|
|
108
|
+
case .hanning:
|
|
109
|
+
vDSP_hann_window(&window, vDSP_Length(bufferSize), Int32(vDSP_HANN_NORM))
|
|
110
|
+
case .hamming:
|
|
111
|
+
vDSP_hamm_window(&window, vDSP_Length(bufferSize), 0)
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
deinit {
|
|
116
|
+
vDSP_destroy_fftsetup(fftSetup)
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
func process(buffer: [Float]) -> FFTResult {
|
|
120
|
+
precondition(buffer.count == Int(bufferSize), "Input buffer size mismatch.")
|
|
121
|
+
|
|
122
|
+
var windowedBuffer = [Float](repeating: 0.0, count: Int(bufferSize))
|
|
123
|
+
|
|
124
|
+
vDSP_vmul(buffer, 1, window, 1, &windowedBuffer, 1, bufferSize)
|
|
125
|
+
|
|
126
|
+
return realBuffer.withUnsafeMutableBufferPointer { realPtr in
|
|
127
|
+
imaginaryBuffer.withUnsafeMutableBufferPointer { imagPtr in
|
|
128
|
+
var complexBuffer = DSPSplitComplex(realp: realPtr.baseAddress!, imagp: imagPtr.baseAddress!)
|
|
129
|
+
|
|
130
|
+
windowedBuffer.withUnsafeBufferPointer { bufferPtr in
|
|
131
|
+
let complexPtr = UnsafeRawPointer(bufferPtr.baseAddress!).bindMemory(to: DSPComplex.self, capacity: Int(bufferHalfSize))
|
|
132
|
+
vDSP_ctoz(complexPtr, 2, &complexBuffer, 1, bufferHalfSize)
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
vDSP_fft_zrip(fftSetup, &complexBuffer, 1, bufferLog2Size, FFTDirection(FFT_FORWARD))
|
|
136
|
+
|
|
137
|
+
var magnitudes = [Float](repeating: 0.0, count: Int(bufferHalfSize))
|
|
138
|
+
vDSP_zvabs(&complexBuffer, 1, &magnitudes, 1, bufferHalfSize)
|
|
139
|
+
|
|
140
|
+
// Convert magnitudes to decibels
|
|
141
|
+
vDSP_vdbcon(magnitudes, 1, &zeroDBReference, &magnitudes, 1, vDSP_Length(magnitudes.count), 1)
|
|
142
|
+
|
|
143
|
+
return FFTResult(magnitudes: magnitudes)
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import livekit_react_native_webrtc
|
|
2
|
+
import React
|
|
3
|
+
|
|
4
|
+
@objc
|
|
5
|
+
public class MultibandVolumeAudioRenderer: BaseMultibandVolumeAudioRenderer {
|
|
6
|
+
private let eventEmitter: RCTEventEmitter
|
|
7
|
+
|
|
8
|
+
@objc
|
|
9
|
+
public var reactTag: String? = nil
|
|
10
|
+
|
|
11
|
+
@objc
|
|
12
|
+
public init(
|
|
13
|
+
bands: Int,
|
|
14
|
+
minFrequency: Float,
|
|
15
|
+
maxFrequency: Float,
|
|
16
|
+
intervalMs: Float,
|
|
17
|
+
eventEmitter: RCTEventEmitter
|
|
18
|
+
) {
|
|
19
|
+
self.eventEmitter = eventEmitter
|
|
20
|
+
super.init(bands: bands,
|
|
21
|
+
minFrequency: minFrequency,
|
|
22
|
+
maxFrequency: maxFrequency,
|
|
23
|
+
intervalMs: intervalMs)
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
override func onMagnitudesCalculated(_ magnitudes: [Float]) {
|
|
27
|
+
guard !magnitudes.isEmpty, let reactTag = self.reactTag
|
|
28
|
+
else { return }
|
|
29
|
+
eventEmitter.sendEvent(withName: LKEvents.kEventMultibandProcessed, body: [
|
|
30
|
+
"magnitudes": magnitudes,
|
|
31
|
+
"id": reactTag
|
|
32
|
+
])
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
public class BaseMultibandVolumeAudioRenderer: NSObject, RTCAudioRenderer {
|
|
38
|
+
private let frameInterval: Int
|
|
39
|
+
private var skippedFrames = 0
|
|
40
|
+
private let audioProcessor: AudioVisualizeProcessor
|
|
41
|
+
|
|
42
|
+
init(
|
|
43
|
+
bands: Int,
|
|
44
|
+
minFrequency: Float,
|
|
45
|
+
maxFrequency: Float,
|
|
46
|
+
intervalMs: Float
|
|
47
|
+
) {
|
|
48
|
+
self.frameInterval = Int((intervalMs / 10.0).rounded())
|
|
49
|
+
self.audioProcessor = AudioVisualizeProcessor(minFrequency: minFrequency, maxFrequency: maxFrequency, bandsCount: bands)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
public func render(pcmBuffer: AVAudioPCMBuffer) {
|
|
53
|
+
if(skippedFrames < frameInterval - 1) {
|
|
54
|
+
skippedFrames += 1
|
|
55
|
+
return
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
skippedFrames = 0
|
|
59
|
+
guard let magnitudes = audioProcessor.process(pcmBuffer: pcmBuffer)
|
|
60
|
+
else {
|
|
61
|
+
return
|
|
62
|
+
}
|
|
63
|
+
onMagnitudesCalculated(magnitudes)
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
func onMagnitudesCalculated(_ magnitudes: [Float]) { }
|
|
67
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Copyright 2025 LiveKit
|
|
3
|
+
*
|
|
4
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
* you may not use this file except in compliance with the License.
|
|
6
|
+
* You may obtain a copy of the License at
|
|
7
|
+
*
|
|
8
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
*
|
|
10
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
* See the License for the specific language governing permissions and
|
|
14
|
+
* limitations under the License.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import Foundation
|
|
18
|
+
|
|
19
|
+
// Simple ring-buffer used for internal audio processing. Not thread-safe.
|
|
20
|
+
class RingBuffer<T: Numeric> {
|
|
21
|
+
private var _isFull = false
|
|
22
|
+
private var _buffer: [T]
|
|
23
|
+
private var _head: Int = 0
|
|
24
|
+
|
|
25
|
+
init(size: Int) {
|
|
26
|
+
_buffer = [T](repeating: 0, count: size)
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
func write(_ value: T) {
|
|
30
|
+
_buffer[_head] = value
|
|
31
|
+
_head = (_head + 1) % _buffer.count
|
|
32
|
+
if _head == 0 { _isFull = true }
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
func write(_ sequence: [T]) {
|
|
36
|
+
for value in sequence {
|
|
37
|
+
write(value)
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
func read() -> [T]? {
|
|
42
|
+
guard _isFull else { return nil }
|
|
43
|
+
|
|
44
|
+
if _head == 0 {
|
|
45
|
+
return _buffer // Return the entire buffer if _head is at the start
|
|
46
|
+
} else {
|
|
47
|
+
// Return the buffer in the correct order
|
|
48
|
+
return Array(_buffer[_head ..< _buffer.count] + _buffer[0 ..< _head])
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import livekit_react_native_webrtc
|
|
2
|
+
import React
|
|
3
|
+
|
|
4
|
+
@objc
|
|
5
|
+
public class VolumeAudioRenderer: BaseVolumeAudioRenderer {
|
|
6
|
+
private let eventEmitter: RCTEventEmitter
|
|
7
|
+
|
|
8
|
+
@objc
|
|
9
|
+
public var reactTag: String? = nil
|
|
10
|
+
|
|
11
|
+
@objc
|
|
12
|
+
public init(intervalMs: Double, eventEmitter: RCTEventEmitter) {
|
|
13
|
+
self.eventEmitter = eventEmitter
|
|
14
|
+
super.init(intervalMs: intervalMs)
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
override public func onVolumeCalculated(_ audioLevels: [AudioLevel]) {
|
|
18
|
+
guard let rmsAvg = audioLevels.combine()?.average,
|
|
19
|
+
let reactTag = self.reactTag
|
|
20
|
+
else { return }
|
|
21
|
+
eventEmitter.sendEvent(withName: LKEvents.kEventVolumeProcessed, body: [
|
|
22
|
+
"volume": rmsAvg,
|
|
23
|
+
"id": reactTag
|
|
24
|
+
])
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
public class BaseVolumeAudioRenderer: NSObject, RTCAudioRenderer {
|
|
29
|
+
private let frameInterval: Int
|
|
30
|
+
private var skippedFrames = 0
|
|
31
|
+
public init(intervalMs: Double = 30) {
|
|
32
|
+
self.frameInterval = Int((intervalMs / 10.0).rounded())
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
public func render(pcmBuffer: AVAudioPCMBuffer) {
|
|
36
|
+
if(skippedFrames < frameInterval - 1) {
|
|
37
|
+
skippedFrames += 1
|
|
38
|
+
return
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
skippedFrames = 0
|
|
42
|
+
guard let pcmBuffer = pcmBuffer.convert(toCommonFormat: .pcmFormatFloat32) else { return }
|
|
43
|
+
let audioLevels = pcmBuffer.audioLevels()
|
|
44
|
+
onVolumeCalculated(audioLevels)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
public func onVolumeCalculated(_ audioLevels: [AudioLevel]) {
|
|
48
|
+
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.default = void 0;
|
|
7
|
+
var _reactNative = require("react-native");
|
|
8
|
+
const LINKING_ERROR = `The package '@livekit/react-native' doesn't seem to be linked. Make sure: \n\n` + _reactNative.Platform.select({
|
|
9
|
+
ios: "- You have run 'pod install'\n",
|
|
10
|
+
default: ''
|
|
11
|
+
}) + '- You rebuilt the app after installing the package\n' + '- You are not using Expo managed workflow\n';
|
|
12
|
+
const LiveKitModule = _reactNative.NativeModules.LivekitReactNativeModule ? _reactNative.NativeModules.LivekitReactNativeModule : new Proxy({}, {
|
|
13
|
+
get() {
|
|
14
|
+
throw new Error(LINKING_ERROR);
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
var _default = exports.default = LiveKitModule;
|
|
18
|
+
//# sourceMappingURL=LKNativeModule.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_reactNative","require","LINKING_ERROR","Platform","select","ios","default","LiveKitModule","NativeModules","LivekitReactNativeModule","Proxy","get","Error","_default","exports"],"sources":["LKNativeModule.ts"],"sourcesContent":["import { NativeModules, Platform } from 'react-native';\nconst LINKING_ERROR =\n `The package '@livekit/react-native' doesn't seem to be linked. Make sure: \\n\\n` +\n Platform.select({ ios: \"- You have run 'pod install'\\n\", default: '' }) +\n '- You rebuilt the app after installing the package\\n' +\n '- You are not using Expo managed workflow\\n';\n\nconst LiveKitModule = NativeModules.LivekitReactNativeModule\n ? NativeModules.LivekitReactNativeModule\n : new Proxy(\n {},\n {\n get() {\n throw new Error(LINKING_ERROR);\n },\n }\n );\n\nexport default LiveKitModule;\n"],"mappings":";;;;;;AAAA,IAAAA,YAAA,GAAAC,OAAA;AACA,MAAMC,aAAa,GACjB,gFAAgF,GAChFC,qBAAQ,CAACC,MAAM,CAAC;EAAEC,GAAG,EAAE,gCAAgC;EAAEC,OAAO,EAAE;AAAG,CAAC,CAAC,GACvE,sDAAsD,GACtD,6CAA6C;AAE/C,MAAMC,aAAa,GAAGC,0BAAa,CAACC,wBAAwB,GACxDD,0BAAa,CAACC,wBAAwB,GACtC,IAAIC,KAAK,CACP,CAAC,CAAC,EACF;EACEC,GAAGA,CAAA,EAAG;IACJ,MAAM,IAAIC,KAAK,CAACV,aAAa,CAAC;EAChC;AACF,CACF,CAAC;AAAC,IAAAW,QAAA,GAAAC,OAAA,CAAAR,OAAA,GAESC,aAAa","ignoreList":[]}
|
|
@@ -6,19 +6,11 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
exports.default = exports.AndroidAudioTypePresets = void 0;
|
|
7
7
|
exports.getDefaultAppleAudioConfigurationForMode = getDefaultAppleAudioConfigurationForMode;
|
|
8
8
|
var _reactNative = require("react-native");
|
|
9
|
+
var _LKNativeModule = _interopRequireDefault(require("../LKNativeModule"));
|
|
10
|
+
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
9
11
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
|
10
12
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == typeof i ? i : i + ""; }
|
|
11
13
|
function _toPrimitive(t, r) { if ("object" != typeof t || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != typeof i) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
|
12
|
-
const LINKING_ERROR = `The package '@livekit/react-native' doesn't seem to be linked. Make sure: \n\n` + _reactNative.Platform.select({
|
|
13
|
-
ios: "- You have run 'pod install'\n",
|
|
14
|
-
default: ''
|
|
15
|
-
}) + '- You rebuilt the app after installing the package\n' + '- You are not using Expo managed workflow\n';
|
|
16
|
-
const LivekitReactNative = _reactNative.NativeModules.LivekitReactNative ? _reactNative.NativeModules.LivekitReactNative : new Proxy({}, {
|
|
17
|
-
get() {
|
|
18
|
-
throw new Error(LINKING_ERROR);
|
|
19
|
-
}
|
|
20
|
-
});
|
|
21
|
-
|
|
22
14
|
/**
|
|
23
15
|
* Configuration for the underlying AudioSession.
|
|
24
16
|
*
|
|
@@ -95,19 +87,19 @@ exports.default = AudioSession;
|
|
|
95
87
|
* See also useIOSAudioManagement for automatic configuration of iOS audio options.
|
|
96
88
|
*/
|
|
97
89
|
_defineProperty(AudioSession, "configureAudio", async config => {
|
|
98
|
-
await
|
|
90
|
+
await _LKNativeModule.default.configureAudio(config);
|
|
99
91
|
});
|
|
100
92
|
/**
|
|
101
93
|
* Starts an AudioSession.
|
|
102
94
|
*/
|
|
103
95
|
_defineProperty(AudioSession, "startAudioSession", async () => {
|
|
104
|
-
await
|
|
96
|
+
await _LKNativeModule.default.startAudioSession();
|
|
105
97
|
});
|
|
106
98
|
/**
|
|
107
99
|
* Stops the existing AudioSession.
|
|
108
100
|
*/
|
|
109
101
|
_defineProperty(AudioSession, "stopAudioSession", async () => {
|
|
110
|
-
await
|
|
102
|
+
await _LKNativeModule.default.stopAudioSession();
|
|
111
103
|
});
|
|
112
104
|
/**
|
|
113
105
|
* Gets the available audio outputs for use with {@link selectAudioOutput}.
|
|
@@ -137,7 +129,7 @@ _defineProperty(AudioSession, "getAudioOutputs", async () => {
|
|
|
137
129
|
if (_reactNative.Platform.OS === 'ios') {
|
|
138
130
|
return ['default', 'force_speaker'];
|
|
139
131
|
} else if (_reactNative.Platform.OS === 'android') {
|
|
140
|
-
return await
|
|
132
|
+
return await _LKNativeModule.default.getAudioOutputs();
|
|
141
133
|
} else {
|
|
142
134
|
return [];
|
|
143
135
|
}
|
|
@@ -150,7 +142,7 @@ _defineProperty(AudioSession, "getAudioOutputs", async () => {
|
|
|
150
142
|
* @param deviceId A deviceId retrieved from {@link getAudioOutputs}
|
|
151
143
|
*/
|
|
152
144
|
_defineProperty(AudioSession, "selectAudioOutput", async deviceId => {
|
|
153
|
-
await
|
|
145
|
+
await _LKNativeModule.default.selectAudioOutput(deviceId);
|
|
154
146
|
});
|
|
155
147
|
/**
|
|
156
148
|
* iOS only, requires iOS 11+.
|
|
@@ -159,7 +151,7 @@ _defineProperty(AudioSession, "selectAudioOutput", async deviceId => {
|
|
|
159
151
|
*/
|
|
160
152
|
_defineProperty(AudioSession, "showAudioRoutePicker", async () => {
|
|
161
153
|
if (_reactNative.Platform.OS === 'ios') {
|
|
162
|
-
await
|
|
154
|
+
await _LKNativeModule.default.showAudioRoutePicker();
|
|
163
155
|
}
|
|
164
156
|
});
|
|
165
157
|
/**
|
|
@@ -170,7 +162,7 @@ _defineProperty(AudioSession, "showAudioRoutePicker", async () => {
|
|
|
170
162
|
*/
|
|
171
163
|
_defineProperty(AudioSession, "setAppleAudioConfiguration", async config => {
|
|
172
164
|
if (_reactNative.Platform.OS === 'ios') {
|
|
173
|
-
await
|
|
165
|
+
await _LKNativeModule.default.setAppleAudioConfiguration(config);
|
|
174
166
|
}
|
|
175
167
|
});
|
|
176
168
|
//# sourceMappingURL=AudioSession.js.map
|