@edkimmel/expo-audio-stream 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/.eslintrc.js +5 -0
  2. package/.yarnrc.yml +8 -0
  3. package/NATIVE_EVENTS.md +270 -0
  4. package/README.md +289 -0
  5. package/android/build.gradle +92 -0
  6. package/android/src/main/AndroidManifest.xml +4 -0
  7. package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
  8. package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
  9. package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
  10. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
  11. package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
  12. package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
  13. package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
  14. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
  15. package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
  16. package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
  17. package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
  18. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
  19. package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
  20. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
  21. package/app.plugin.js +1 -0
  22. package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
  23. package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
  24. package/build/ExpoPlayAudioStreamModule.js +5 -0
  25. package/build/ExpoPlayAudioStreamModule.js.map +1 -0
  26. package/build/events.d.ts +36 -0
  27. package/build/events.d.ts.map +1 -0
  28. package/build/events.js +25 -0
  29. package/build/events.js.map +1 -0
  30. package/build/index.d.ts +125 -0
  31. package/build/index.d.ts.map +1 -0
  32. package/build/index.js +222 -0
  33. package/build/index.js.map +1 -0
  34. package/build/pipeline/index.d.ts +81 -0
  35. package/build/pipeline/index.d.ts.map +1 -0
  36. package/build/pipeline/index.js +140 -0
  37. package/build/pipeline/index.js.map +1 -0
  38. package/build/pipeline/types.d.ts +132 -0
  39. package/build/pipeline/types.d.ts.map +1 -0
  40. package/build/pipeline/types.js +5 -0
  41. package/build/pipeline/types.js.map +1 -0
  42. package/build/types.d.ts +221 -0
  43. package/build/types.d.ts.map +1 -0
  44. package/build/types.js +10 -0
  45. package/build/types.js.map +1 -0
  46. package/expo-module.config.json +9 -0
  47. package/ios/AudioPipeline.swift +562 -0
  48. package/ios/AudioUtils.swift +356 -0
  49. package/ios/ExpoPlayAudioStream.podspec +27 -0
  50. package/ios/ExpoPlayAudioStreamModule.swift +436 -0
  51. package/ios/ExpoPlayAudioStreamView.swift +7 -0
  52. package/ios/JitterBuffer.swift +208 -0
  53. package/ios/Logger.swift +7 -0
  54. package/ios/Microphone.swift +221 -0
  55. package/ios/MicrophoneDataDelegate.swift +4 -0
  56. package/ios/PipelineIntegration.swift +214 -0
  57. package/ios/RecordingResult.swift +10 -0
  58. package/ios/RecordingSettings.swift +11 -0
  59. package/ios/SharedAudioEngine.swift +484 -0
  60. package/ios/SoundConfig.swift +45 -0
  61. package/ios/SoundPlayer.swift +408 -0
  62. package/ios/SoundPlayerDelegate.swift +7 -0
  63. package/package.json +49 -0
  64. package/plugin/build/index.d.ts +5 -0
  65. package/plugin/build/index.js +28 -0
  66. package/plugin/src/index.ts +53 -0
  67. package/plugin/tsconfig.json +9 -0
  68. package/plugin/tsconfig.tsbuildinfo +1 -0
  69. package/src/ExpoPlayAudioStreamModule.ts +5 -0
  70. package/src/events.ts +66 -0
  71. package/src/index.ts +359 -0
  72. package/src/pipeline/index.ts +216 -0
  73. package/src/pipeline/types.ts +169 -0
  74. package/src/types.ts +270 -0
  75. package/tsconfig.json +9 -0
@@ -0,0 +1,356 @@
1
+ import AVFoundation
2
+ import ExpoModulesCore
3
+ import Accelerate
4
+
5
+ public enum SoundPlayerError: Error {
6
+ case invalidBase64String
7
+ case couldNotPlayAudio
8
+ case decodeError(details: String)
9
+ case unsupportedFormat
10
+ }
11
+
12
+ enum AudioProcessingError: Error {
13
+ case invalidBase64
14
+ }
15
+
16
+ class AudioUtils {
17
+ static func removeRIFFHeaderIfNeeded(from audioData: Data) -> Data? {
18
+ let headerSize = 44 // The "RIFF" header is 44 bytes
19
+ guard audioData.count > headerSize, audioData.starts(with: "RIFF".data(using: .ascii)!) else {
20
+ return audioData
21
+ }
22
+ return audioData.subdata(in: headerSize..<audioData.count)
23
+ }
24
+
25
+ static func convertPCMDataToBuffer(_ pcmData: Data, audioFormat: AVAudioFormat) -> AVAudioPCMBuffer? {
26
+ // Prepare buffer for Float32 samples
27
+ guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: AVAudioFrameCount(pcmData.count / 2)) else {
28
+ print("Failed to create audio buffer.")
29
+ return nil
30
+ }
31
+
32
+ var int16Samples = [Int16](repeating: 0, count: pcmData.count / 2)
33
+ let _ = int16Samples.withUnsafeMutableBytes { buffer in
34
+ pcmData.copyBytes(to: buffer)
35
+ }
36
+
37
+ // Conversion to Float32
38
+ let floatSamples = int16Samples.map { Float($0) / 32768.0 }
39
+
40
+ pcmBuffer.frameLength = pcmBuffer.frameCapacity
41
+ if let channelData = pcmBuffer.floatChannelData {
42
+ for i in 0..<floatSamples.count {
43
+ channelData.pointee[i] = floatSamples[i]
44
+ }
45
+ }
46
+
47
+ return pcmBuffer
48
+ }
49
+
50
+ /// Resamples the audio buffer using vDSP. If it fails, falls back to manual resampling.
51
+ /// - Parameters:
52
+ /// - buffer: The original audio buffer to be resampled.
53
+ /// - originalSampleRate: The sample rate of the original audio buffer.
54
+ /// - targetSampleRate: The desired sample rate to resample to.
55
+ /// - Returns: A new audio buffer resampled to the target sample rate, or nil if resampling fails.
56
+ static func resampleAudioBuffer(_ buffer: AVAudioPCMBuffer, from originalSampleRate: Double, to targetSampleRate: Double) -> AVAudioPCMBuffer? {
57
+ guard let channelData = buffer.floatChannelData else { return nil }
58
+
59
+ let sourceFrameCount = Int(buffer.frameLength)
60
+ let sourceChannels = Int(buffer.format.channelCount)
61
+
62
+ // Calculate the number of frames in the target buffer
63
+ let targetFrameCount = Int(Double(sourceFrameCount) * targetSampleRate / originalSampleRate)
64
+
65
+ // Create a new audio buffer for the resampled data
66
+ guard let targetBuffer = AVAudioPCMBuffer(pcmFormat: buffer.format, frameCapacity: AVAudioFrameCount(targetFrameCount)) else { return nil }
67
+ targetBuffer.frameLength = AVAudioFrameCount(targetFrameCount)
68
+
69
+ let resamplingFactor = Float(targetSampleRate / originalSampleRate) // Factor to resample the audio
70
+
71
+ for channel in 0..<sourceChannels {
72
+ let input = UnsafeBufferPointer(start: channelData[channel], count: sourceFrameCount) // Original channel data
73
+ let output = UnsafeMutableBufferPointer(start: targetBuffer.floatChannelData![channel], count: targetFrameCount) // Buffer for resampled data
74
+
75
+ var y: [Float] = Array(repeating: 0, count: targetFrameCount) // Temporary array for resampled data
76
+
77
+ // Resample using vDSP_vgenp which performs interpolation
78
+ let indices = [Float](stride(from: 0, to: Float(sourceFrameCount), by: resamplingFactor))
79
+ indices.withUnsafeBufferPointer { indicesPtr in
80
+ vDSP_vgenp(input.baseAddress!, 1, indicesPtr.baseAddress!, 1, &y, 1, vDSP_Length(targetFrameCount), vDSP_Length(sourceFrameCount))
81
+ }
82
+
83
+ for i in 0..<targetFrameCount {
84
+ output[i] = y[i]
85
+ }
86
+ }
87
+ return targetBuffer
88
+ }
89
+
90
+ static func tryConvertToFormat(
91
+ inputBuffer buffer: AVAudioPCMBuffer,
92
+ desiredSampleRate sampleRate: Double,
93
+ desiredChannel channels: AVAudioChannelCount,
94
+ bitDepth: Int? = nil
95
+ ) -> AVAudioPCMBuffer? {
96
+ var error: NSError? = nil
97
+ let depth = bitDepth ?? 16
98
+ let commonFormat: AVAudioCommonFormat = getCommonFormat(depth: depth)
99
+ guard let nativeInputFormat = AVAudioFormat(commonFormat: commonFormat, sampleRate: buffer.format.sampleRate, channels: 1, interleaved: true) else {
100
+ Logger.debug("AudioSessionManager: Failed to convert to desired format. AudioFormat is corrupted.")
101
+ return nil
102
+ }
103
+ let desiredFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: channels, interleaved: false)!
104
+ let inputAudioConverter = AVAudioConverter(from: nativeInputFormat, to: desiredFormat)!
105
+
106
+ let convertedBuffer = AVAudioPCMBuffer(pcmFormat: desiredFormat, frameCapacity: 1024)!
107
+ let status = inputAudioConverter.convert(to: convertedBuffer, error: &error, withInputFrom: {inNumPackets, outStatus in
108
+ outStatus.pointee = .haveData
109
+ buffer.frameLength = inNumPackets
110
+ return buffer
111
+ })
112
+ if status == .haveData {
113
+ return convertedBuffer
114
+ }
115
+ return nil
116
+ }
117
+
118
+ static func getCommonFormat(depth: Int) -> AVAudioCommonFormat {
119
+ var commonFormat: AVAudioCommonFormat = .pcmFormatInt16
120
+ switch depth {
121
+ case 16:
122
+ commonFormat = .pcmFormatInt16
123
+ case 32:
124
+ commonFormat = .pcmFormatInt32
125
+ default:
126
+ Logger.debug("Unsupported bit depth. Defaulting to 16-bit PCM")
127
+ commonFormat = .pcmFormatInt16
128
+ }
129
+
130
+ return commonFormat
131
+ }
132
+
133
+
134
+ static func calculatePowerLevel(from buffer: AVAudioPCMBuffer) -> Float {
135
+ let format = buffer.format.commonFormat
136
+ let length = Int(buffer.frameLength)
137
+ let channelCount = Int(buffer.format.channelCount)
138
+
139
+ var totalRMS: Float = 0.0
140
+
141
+ if format == .pcmFormatFloat32, let channelData = buffer.floatChannelData {
142
+ // Process Float32 PCM
143
+ for channel in 0..<channelCount {
144
+ let data = channelData[channel]
145
+ var sum: Float = 0.0
146
+
147
+ for sample in 0..<length {
148
+ sum += data[sample] * data[sample]
149
+ }
150
+
151
+ let channelRMS = sqrt(sum / Float(length))
152
+ totalRMS += channelRMS
153
+ }
154
+ } else if format == .pcmFormatInt16, let channelData = buffer.int16ChannelData {
155
+ // Process Int16 PCM
156
+ for channel in 0..<channelCount {
157
+ let data = channelData[channel]
158
+ var sum: Float = 0.0
159
+
160
+ for sample in 0..<length {
161
+ let normalizedSample = Float(data[sample]) / Float(Int16.max) // Convert to -1.0 to 1.0 range
162
+ sum += normalizedSample * normalizedSample
163
+ }
164
+
165
+ let channelRMS = sqrt(sum / Float(length))
166
+ totalRMS += channelRMS
167
+ }
168
+ } else {
169
+ return -160.0 // Unsupported format
170
+ }
171
+
172
+ let avgRMS = totalRMS / Float(channelCount)
173
+ return avgRMS > 0 ? 20 * log10(avgRMS) : -160.0
174
+ }
175
+
176
+ /// Removes WAV/RIFF header from audio data if present
177
+ /// - Parameter data: The input audio data that might contain a WAV/RIFF header
178
+ /// - Returns: Audio data with WAV/RIFF header removed, or original data if no header found
179
+ static func removeWavHeader(from data: Data) -> Data? {
180
+ // Check if data starts with "RIFF" and is long enough to contain a WAV header
181
+ guard data.count >= 44,
182
+ let riffString = String(data: data.prefix(4), encoding: .ascii),
183
+ riffString == "RIFF",
184
+ let waveString = String(data: data[8..<12], encoding: .ascii),
185
+ waveString == "WAVE" else {
186
+ // If not a WAV file, return original data
187
+ return data
188
+ }
189
+
190
+ // Find the "data" chunk
191
+ var offset = 12 // Start after RIFF header and WAVE identifier
192
+ while offset < data.count - 8 { // Need at least 8 bytes for chunk header
193
+ let chunkID = String(data: data[offset..<offset+4], encoding: .ascii) ?? ""
194
+ let chunkSize = Int(data[offset+4..<offset+8].withUnsafeBytes { $0.load(as: UInt32.self) }.littleEndian)
195
+
196
+ if chunkID == "data" {
197
+ // Found the data chunk, return everything after its header
198
+ let dataStart = offset + 8
199
+ return data.subdata(in: dataStart..<data.count)
200
+ }
201
+
202
+ // Move to next chunk (chunk header is 8 bytes + chunk size)
203
+ offset += 8 + chunkSize
204
+ // Ensure chunk alignment to 2 bytes
205
+ if chunkSize % 2 != 0 { offset += 1 }
206
+ }
207
+
208
+ Logger.debug("[AudioUtils] Failed to find data chunk in WAV file")
209
+ return nil
210
+ }
211
+
212
+ /// Checks if data contains WAV/RIFF header
213
+ /// - Parameter data: The data to check
214
+ /// - Returns: true if data starts with RIFF....WAVE
215
+ static private func isWavFormat(_ data: Data) -> Bool {
216
+ guard data.count >= 12,
217
+ let riffString = String(data: data.prefix(4), encoding: .ascii),
218
+ riffString == "RIFF",
219
+ let waveString = String(data: data[8..<12], encoding: .ascii),
220
+ waveString == "WAVE" else {
221
+ return false
222
+ }
223
+ return true
224
+ }
225
+
226
+ /// Processes a raw Float32LE (pcm_f32le) base64 encoded audio chunk and converts it to an AVAudioPCMBuffer
227
+ /// - Parameters:
228
+ /// - base64String: Base64 encoded raw Float32LE PCM audio data or WAV file (automatically detected)
229
+ /// - audioFormat: Target audio format for the buffer (should be Float32)
230
+ /// - Returns: AVAudioPCMBuffer containing the processed audio data, or nil if processing fails
231
+ static func processFloat32LEAudioChunk(_ base64String: String, audioFormat: AVAudioFormat) -> AVAudioPCMBuffer? {
232
+ // Verify format is Float32
233
+ guard audioFormat.commonFormat == .pcmFormatFloat32 else {
234
+ Logger.debug("[AudioUtils] Invalid format: expected Float32 format")
235
+ return nil
236
+ }
237
+
238
+ // Decode base64 string to raw data
239
+ guard let data = Data(base64Encoded: base64String) else {
240
+ Logger.debug("[AudioUtils] Failed to decode base64 string")
241
+ return nil
242
+ }
243
+
244
+ // Automatically detect and remove WAV header if present
245
+ let audioData: Data
246
+ if isWavFormat(data) {
247
+ Logger.debug("[AudioUtils] WAV format detected, removing header")
248
+ guard let pcmData = removeWavHeader(from: data) else {
249
+ Logger.debug("[AudioUtils] Failed to process WAV header")
250
+ return nil
251
+ }
252
+ audioData = pcmData
253
+ } else {
254
+ Logger.debug("[AudioUtils] Raw PCM format detected")
255
+ audioData = data
256
+ }
257
+
258
+ // Create buffer for Float32 samples
259
+ let frameCount = AVAudioFrameCount(audioData.count / 4) // 4 bytes per sample for Float32 audio
260
+ let intFrameCount = Int(frameCount)
261
+ guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: frameCount) else {
262
+ Logger.debug("[AudioUtils] Failed to create audio buffer")
263
+ return nil
264
+ }
265
+
266
+ // Copy float samples directly from data
267
+ pcmBuffer.frameLength = frameCount
268
+ if let channelData = pcmBuffer.floatChannelData {
269
+ audioData.withUnsafeBytes { (bytes: UnsafeRawBufferPointer) -> Void in
270
+ guard let addr = bytes.baseAddress else { return }
271
+ let ptr = addr.assumingMemoryBound(to: Float.self)
272
+ for i in 0..<intFrameCount {
273
+ channelData.pointee[i] = ptr[i]
274
+ }
275
+ }
276
+ }
277
+
278
+ return pcmBuffer
279
+ }
280
+
281
+ /// Processes a raw PCM_S16LE (16-bit Little Endian) base64 encoded audio chunk and converts it to an AVAudioPCMBuffer
282
+ /// - Parameters:
283
+ /// - base64String: Base64 encoded raw PCM_S16LE audio data
284
+ /// - audioFormat: Target audio format for the buffer (should be Float32)
285
+ /// - Returns: AVAudioPCMBuffer containing the processed audio data, or nil if processing fails
286
+ static func processPCM16LEAudioChunk(_ base64String: String, audioFormat: AVAudioFormat) -> AVAudioPCMBuffer? {
287
+ // Verify format is Float32
288
+ guard audioFormat.commonFormat == .pcmFormatFloat32 else {
289
+ Logger.debug("[AudioUtils] Invalid format: expected Float32 format")
290
+ return nil
291
+ }
292
+
293
+ // ✅ Add size check to prevent excessive memory allocation
294
+ guard base64String.count < 500_000 else {
295
+ Logger.debug("[AudioUtils] Base64 string too large: \(base64String.count) characters")
296
+ return nil
297
+ }
298
+
299
+ // ✅ Wrap decoding in autoreleasepool for immediate cleanup
300
+ let data: Data
301
+ do {
302
+ data = try autoreleasepool {
303
+ guard let decodedData = Data(base64Encoded: base64String) else {
304
+ throw AudioProcessingError.invalidBase64
305
+ }
306
+ return decodedData
307
+ }
308
+ } catch {
309
+ Logger.debug("[AudioUtils] Failed to decode base64 string")
310
+ return nil
311
+ }
312
+
313
+ // ✅ Validate decoded data size
314
+ guard data.count > 0 && data.count < 2_000_000 else {
315
+ Logger.debug("[AudioUtils] Invalid decoded data size: \(data.count) bytes")
316
+ return nil
317
+ }
318
+
319
+ // Automatically detect and remove WAV header if present
320
+ let audioData: Data
321
+ if isWavFormat(data) {
322
+ Logger.debug("[AudioUtils] WAV format detected, removing header")
323
+ guard let pcmData = removeWavHeader(from: data) else {
324
+ Logger.debug("[AudioUtils] Failed to process WAV header")
325
+ return nil
326
+ }
327
+ audioData = pcmData
328
+ } else {
329
+ Logger.debug("[AudioUtils] Raw PCM format detected")
330
+ audioData = data
331
+ }
332
+
333
+ // Create buffer for Float32 samples
334
+ let frameCount = AVAudioFrameCount(audioData.count / 2) // 2 bytes per sample for 16-bit audio
335
+ let intFrameCount = Int(frameCount)
336
+ guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: frameCount) else {
337
+ Logger.debug("[AudioUtils] Failed to create audio buffer")
338
+ return nil
339
+ }
340
+
341
+ pcmBuffer.frameLength = frameCount
342
+ if let channelData = pcmBuffer.floatChannelData {
343
+ audioData.withUnsafeBytes { ptr in
344
+ guard let addr = ptr.baseAddress else { return }
345
+ let int16ptr = addr.assumingMemoryBound(to: Int16.self)
346
+ for i in 0..<intFrameCount {
347
+ // Read as little endian Int16 and convert to normalized float (-1.0 to 1.0)
348
+ let int16Sample = Int16(littleEndian: int16ptr[i])
349
+ channelData.pointee[i] = Float(int16Sample) / 32768.0
350
+ }
351
+ }
352
+ }
353
+
354
+ return pcmBuffer
355
+ }
356
+ }
@@ -0,0 +1,27 @@
1
+ require 'json'
2
+
3
+ package = JSON.parse(File.read(File.join(__dir__, '..', 'package.json')))
4
+
5
+ Pod::Spec.new do |s|
6
+ s.name = 'ExpoPlayAudioStream'
7
+ s.version = package['version']
8
+ s.summary = package['description']
9
+ s.description = package['description']
10
+ s.license = package['license']
11
+ s.author = package['author']
12
+ s.homepage = package['homepage']
13
+ s.platforms = { :ios => '15.1', :tvos => '15.1' }
14
+ s.swift_version = '5.9'
15
+ s.source = { git: 'https://github.com/edkimmel/expo-audio-stream' }
16
+ s.static_framework = true
17
+
18
+ s.dependency 'ExpoModulesCore'
19
+
20
+ # Swift/Objective-C compatibility
21
+ s.pod_target_xcconfig = {
22
+ 'DEFINES_MODULE' => 'YES',
23
+ 'SWIFT_COMPILATION_MODE' => 'wholemodule'
24
+ }
25
+
26
+ s.source_files = "**/*.{h,m,swift}"
27
+ end