@edkimmel/expo-audio-stream 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/.eslintrc.js +5 -0
  2. package/.yarnrc.yml +8 -0
  3. package/NATIVE_EVENTS.md +270 -0
  4. package/README.md +289 -0
  5. package/android/build.gradle +92 -0
  6. package/android/src/main/AndroidManifest.xml +4 -0
  7. package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
  8. package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
  9. package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
  10. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
  11. package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
  12. package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
  13. package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
  14. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
  15. package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
  16. package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
  17. package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
  18. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
  19. package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
  20. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
  21. package/app.plugin.js +1 -0
  22. package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
  23. package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
  24. package/build/ExpoPlayAudioStreamModule.js +5 -0
  25. package/build/ExpoPlayAudioStreamModule.js.map +1 -0
  26. package/build/events.d.ts +36 -0
  27. package/build/events.d.ts.map +1 -0
  28. package/build/events.js +25 -0
  29. package/build/events.js.map +1 -0
  30. package/build/index.d.ts +125 -0
  31. package/build/index.d.ts.map +1 -0
  32. package/build/index.js +222 -0
  33. package/build/index.js.map +1 -0
  34. package/build/pipeline/index.d.ts +81 -0
  35. package/build/pipeline/index.d.ts.map +1 -0
  36. package/build/pipeline/index.js +140 -0
  37. package/build/pipeline/index.js.map +1 -0
  38. package/build/pipeline/types.d.ts +132 -0
  39. package/build/pipeline/types.d.ts.map +1 -0
  40. package/build/pipeline/types.js +5 -0
  41. package/build/pipeline/types.js.map +1 -0
  42. package/build/types.d.ts +221 -0
  43. package/build/types.d.ts.map +1 -0
  44. package/build/types.js +10 -0
  45. package/build/types.js.map +1 -0
  46. package/expo-module.config.json +9 -0
  47. package/ios/AudioPipeline.swift +562 -0
  48. package/ios/AudioUtils.swift +356 -0
  49. package/ios/ExpoPlayAudioStream.podspec +27 -0
  50. package/ios/ExpoPlayAudioStreamModule.swift +436 -0
  51. package/ios/ExpoPlayAudioStreamView.swift +7 -0
  52. package/ios/JitterBuffer.swift +208 -0
  53. package/ios/Logger.swift +7 -0
  54. package/ios/Microphone.swift +221 -0
  55. package/ios/MicrophoneDataDelegate.swift +4 -0
  56. package/ios/PipelineIntegration.swift +214 -0
  57. package/ios/RecordingResult.swift +10 -0
  58. package/ios/RecordingSettings.swift +11 -0
  59. package/ios/SharedAudioEngine.swift +484 -0
  60. package/ios/SoundConfig.swift +45 -0
  61. package/ios/SoundPlayer.swift +408 -0
  62. package/ios/SoundPlayerDelegate.swift +7 -0
  63. package/package.json +49 -0
  64. package/plugin/build/index.d.ts +5 -0
  65. package/plugin/build/index.js +28 -0
  66. package/plugin/src/index.ts +53 -0
  67. package/plugin/tsconfig.json +9 -0
  68. package/plugin/tsconfig.tsbuildinfo +1 -0
  69. package/src/ExpoPlayAudioStreamModule.ts +5 -0
  70. package/src/events.ts +66 -0
  71. package/src/index.ts +359 -0
  72. package/src/pipeline/index.ts +216 -0
  73. package/src/pipeline/types.ts +169 -0
  74. package/src/types.ts +270 -0
  75. package/tsconfig.json +9 -0
@@ -0,0 +1,436 @@
1
+ import Foundation
2
+ import AVFoundation
3
+ import ExpoModulesCore
4
+
5
+ let audioDataEvent: String = "AudioData"
6
+ let soundIsPlayedEvent: String = "SoundChunkPlayed"
7
+ let soundIsStartedEvent: String = "SoundStarted"
8
+ let deviceReconnectedEvent: String = "DeviceReconnected"
9
+
10
+
11
+ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPlayerDelegate, PipelineEventSender {
12
+ private var _microphone: Microphone?
13
+ private var _soundPlayer: SoundPlayer?
14
+ private var _pipelineIntegration: PipelineIntegration?
15
+
16
+ /// Single shared AVAudioEngine used by both SoundPlayer and AudioPipeline.
17
+ private let sharedAudioEngine = SharedAudioEngine()
18
+
19
+ private var microphone: Microphone {
20
+ if _microphone == nil {
21
+ _microphone = Microphone()
22
+ _microphone?.delegate = self
23
+ }
24
+ return _microphone!
25
+ }
26
+
27
+ private var soundPlayer: SoundPlayer {
28
+ if _soundPlayer == nil {
29
+ _soundPlayer = SoundPlayer()
30
+ _soundPlayer?.delegate = self
31
+ _soundPlayer?.setSharedEngine(sharedAudioEngine)
32
+ }
33
+ return _soundPlayer!
34
+ }
35
+
36
+ private var pipelineIntegration: PipelineIntegration {
37
+ if _pipelineIntegration == nil {
38
+ _pipelineIntegration = PipelineIntegration(eventSender: self, sharedEngine: sharedAudioEngine)
39
+ }
40
+ return _pipelineIntegration!
41
+ }
42
+
43
+ private var isAudioSessionInitialized: Bool = false
44
+
45
+ // ── PipelineEventSender conformance ───────────────────────────────
46
+ func sendPipelineEvent(_ eventName: String, _ params: [String: Any]) {
47
+ sendEvent(eventName, params)
48
+ }
49
+
50
+ public func definition() -> ModuleDefinition {
51
+ Name("ExpoPlayAudioStream")
52
+
53
+ // Defines event names that the module can send to JavaScript.
54
+ Events([
55
+ audioDataEvent,
56
+ soundIsPlayedEvent,
57
+ soundIsStartedEvent,
58
+ deviceReconnectedEvent,
59
+ PipelineIntegration.EVENT_STATE_CHANGED,
60
+ PipelineIntegration.EVENT_PLAYBACK_STARTED,
61
+ PipelineIntegration.EVENT_ERROR,
62
+ PipelineIntegration.EVENT_ZOMBIE_DETECTED,
63
+ PipelineIntegration.EVENT_UNDERRUN,
64
+ PipelineIntegration.EVENT_DRAINED,
65
+ PipelineIntegration.EVENT_AUDIO_FOCUS_LOST,
66
+ PipelineIntegration.EVENT_AUDIO_FOCUS_RESUMED,
67
+ ])
68
+
69
+ Function("destroy") {
70
+ self._pipelineIntegration?.destroy()
71
+ self._pipelineIntegration = nil
72
+ self.sharedAudioEngine.teardown()
73
+ if self.isAudioSessionInitialized {
74
+ let audioSession = AVAudioSession.sharedInstance()
75
+ try? audioSession.setActive(false, options: .notifyOthersOnDeactivation)
76
+ self.isAudioSessionInitialized = false
77
+ }
78
+ self._microphone = nil
79
+ self._soundPlayer = nil
80
+ }
81
+
82
+ /// Prompts the user to select the microphone mode.
83
+ Function("promptMicrophoneModes") {
84
+ promptForMicrophoneModes()
85
+ }
86
+
87
+ /// Requests microphone permission from the user.
88
+ AsyncFunction("requestPermissionsAsync") { (promise: Promise) in
89
+ checkMicrophonePermission { granted in
90
+ promise.resolve([
91
+ "granted": granted,
92
+ "canAskAgain": true,
93
+ "status": granted ? "granted" : "denied"
94
+ ])
95
+ }
96
+ }
97
+
98
+ /// Gets the current microphone permission status.
99
+ AsyncFunction("getPermissionsAsync") { (promise: Promise) in
100
+ let status = AVAudioSession.sharedInstance().recordPermission
101
+ let granted = status == .granted
102
+ let canAskAgain = status == .undetermined
103
+ promise.resolve([
104
+ "granted": granted,
105
+ "canAskAgain": canAskAgain,
106
+ "status": granted ? "granted" : (canAskAgain ? "undetermined" : "denied")
107
+ ])
108
+ }
109
+
110
+ AsyncFunction("playSound") { (base64Chunk: String, turnId: String, encoding: String?, promise: Promise) in
111
+ Logger.debug("Play sound")
112
+ do {
113
+ if !isAudioSessionInitialized {
114
+ try ensureAudioSessionInitialized()
115
+ }
116
+
117
+ // Ensure shared engine is configured (playSound may be called without setSoundConfig)
118
+ if !self.sharedAudioEngine.isConfigured {
119
+ try self.sharedAudioEngine.configure(playbackMode: .regular)
120
+ self.sharedAudioEngine.addDelegate(self.soundPlayer)
121
+ }
122
+
123
+ // Determine the audio format based on the encoding parameter
124
+ let commonFormat: AVAudioCommonFormat
125
+ switch encoding {
126
+ case "pcm_f32le":
127
+ commonFormat = .pcmFormatFloat32
128
+ case "pcm_s16le", nil:
129
+ commonFormat = .pcmFormatInt16
130
+ default:
131
+ Logger.debug("[ExpoPlayAudioStreamModule] Unsupported encoding: \(encoding ?? "nil"), defaulting to PCM_S16LE")
132
+ commonFormat = .pcmFormatInt16
133
+ }
134
+
135
+ try soundPlayer.play(audioChunk: base64Chunk, turnId: turnId, resolver: {
136
+ _ in promise.resolve(nil)
137
+ }, rejecter: {code, message, error in
138
+ promise.reject(code ?? "ERR_UNKNOWN", message ?? "Unknown error")
139
+ }, commonFormat: commonFormat)
140
+ } catch {
141
+ print("Error enqueuing audio: \(error.localizedDescription)")
142
+ }
143
+ }
144
+
145
+ AsyncFunction("stopSound") { (promise: Promise) in
146
+ soundPlayer.stop(promise)
147
+ }
148
+
149
+ AsyncFunction("clearSoundQueueByTurnId") { (turnId: String, promise: Promise) in
150
+ soundPlayer.clearSoundQueue(turnIdToClear: turnId, resolver: promise)
151
+ }
152
+
153
+ AsyncFunction("startMicrophone") { (options: [String: Any], promise: Promise) in
154
+ // Create recording settings
155
+ // Extract settings from provided options, using default values if necessary
156
+ let sampleRate = options["sampleRate"] as? Double ?? 16000.0 // it fails if not 48000, why?
157
+ let numberOfChannels = options["channelConfig"] as? Int ?? 1 // Mono channel configuration
158
+ let bitDepth = options["audioFormat"] as? Int ?? 16 // 16bits
159
+ let interval = options["interval"] as? Int ?? 1000
160
+
161
+ let settings = RecordingSettings(
162
+ sampleRate: sampleRate,
163
+ desiredSampleRate: sampleRate,
164
+ numberOfChannels: numberOfChannels,
165
+ bitDepth: bitDepth,
166
+ maxRecentDataDuration: nil,
167
+ pointsPerSecond: nil
168
+ )
169
+
170
+ if !isAudioSessionInitialized {
171
+ do {
172
+ try ensureAudioSessionInitialized(settings: settings)
173
+ } catch {
174
+ promise.reject("ERROR", "Failed to init audio session \(error.localizedDescription)")
175
+ return
176
+ }
177
+ }
178
+
179
+ if let result = self.microphone.startRecording(settings: settings, intervalMilliseconds: interval) {
180
+ if let resError = result.error {
181
+ promise.reject("ERROR", resError)
182
+ } else {
183
+ let resultDict: [String: Any] = [
184
+ "fileUri": result.fileUri ?? "",
185
+ "channels": result.channels ?? 1,
186
+ "bitDepth": result.bitDepth ?? 16,
187
+ "sampleRate": result.sampleRate ?? 48000,
188
+ "mimeType": result.mimeType ?? "",
189
+ ]
190
+ promise.resolve(resultDict)
191
+ }
192
+ } else {
193
+ promise.reject("ERROR", "Failed to start recording.")
194
+ }
195
+ }
196
+
197
+ /// Stops the microphone recording and releases associated resources
198
+ /// - Parameter promise: A promise to resolve when microphone recording is stopped
199
+ /// - Note: This method stops the active recording session, processes any remaining audio data,
200
+ /// and releases hardware resources. It should be called when the app no longer needs
201
+ /// microphone access to conserve battery and system resources.
202
+ AsyncFunction("stopMicrophone") { (promise: Promise) in
203
+ microphone.stopRecording(resolver: promise)
204
+ }
205
+
206
+ Function("toggleSilence") { (isSilent: Bool) in
207
+ microphone.toggleSilence(isSilent: isSilent)
208
+ }
209
+
210
+ /// Sets the sound player configuration
211
+ /// - Parameters:
212
+ /// - config: A dictionary containing configuration options:
213
+ /// - `sampleRate`: The sample rate for audio playback (default is 16000.0).
214
+ /// - `playbackMode`: The playback mode ("regular", "voiceProcessing", or "conversation").
215
+ /// - `useDefault`: When true, resets to default configuration regardless of other parameters.
216
+ /// - promise: A promise to resolve when configuration is updated or reject with an error.
217
+ AsyncFunction("setSoundConfig") { (config: [String: Any], promise: Promise) in
218
+ // Check if we should use default configuration
219
+ let useDefault = config["useDefault"] as? Bool ?? false
220
+
221
+ do {
222
+ if !isAudioSessionInitialized {
223
+ try ensureAudioSessionInitialized()
224
+ }
225
+
226
+ if useDefault {
227
+ // Reset to default configuration — configure engine for regular mode
228
+ Logger.debug("[ExpoPlayAudioStreamModule] Resetting sound configuration to default values")
229
+ try self.sharedAudioEngine.configure(playbackMode: .regular)
230
+ self.sharedAudioEngine.addDelegate(self.soundPlayer)
231
+ try soundPlayer.resetConfigToDefault()
232
+ } else {
233
+ // Extract configuration values from the provided dictionary
234
+ let sampleRate = config["sampleRate"] as? Double ?? 16000.0
235
+ let playbackModeString = config["playbackMode"] as? String ?? "regular"
236
+
237
+ // Convert string playback mode to enum
238
+ let playbackMode: PlaybackMode
239
+ switch playbackModeString {
240
+ case "voiceProcessing":
241
+ playbackMode = .voiceProcessing
242
+ case "conversation":
243
+ playbackMode = .conversation
244
+ default:
245
+ playbackMode = .regular
246
+ }
247
+
248
+ // Configure shared engine first (handles voice processing)
249
+ try self.sharedAudioEngine.configure(playbackMode: playbackMode)
250
+ self.sharedAudioEngine.addDelegate(self.soundPlayer)
251
+
252
+ // Create a new SoundConfig object
253
+ let soundConfig = SoundConfig(sampleRate: sampleRate, playbackMode: playbackMode)
254
+
255
+ // Update the sound player configuration (attaches node to shared engine)
256
+ Logger.debug("[ExpoPlayAudioStreamModule] Setting sound configuration - sampleRate: \(sampleRate), playbackMode: \(playbackModeString)")
257
+ try soundPlayer.updateConfig(soundConfig)
258
+ }
259
+
260
+ promise.resolve(nil)
261
+ } catch {
262
+ promise.reject("ERROR_CONFIG_UPDATE", "Failed to set sound configuration: \(error.localizedDescription)")
263
+ }
264
+ }
265
+
266
+ // ── Pipeline functions ────────────────────────────────────────────
267
+
268
+ AsyncFunction("connectPipeline") { (options: [String: Any], promise: Promise) in
269
+ do {
270
+ if !self.isAudioSessionInitialized {
271
+ try self.ensureAudioSessionInitialized()
272
+ }
273
+
274
+ // Parse playback mode from options to configure shared engine
275
+ let playbackModeString = options["playbackMode"] as? String ?? "regular"
276
+ let playbackMode: PlaybackMode
277
+ switch playbackModeString {
278
+ case "voiceProcessing":
279
+ playbackMode = .voiceProcessing
280
+ case "conversation":
281
+ playbackMode = .conversation
282
+ default:
283
+ playbackMode = .regular
284
+ }
285
+
286
+ // Configure shared engine (handles voice processing)
287
+ try self.sharedAudioEngine.configure(playbackMode: playbackMode)
288
+
289
+ let result = try self.pipelineIntegration.connect(options: options)
290
+
291
+ // Set the AudioPipeline as the active delegate for route/interruption callbacks
292
+ self.pipelineIntegration.setAsActiveDelegate(on: self.sharedAudioEngine)
293
+
294
+ promise.resolve(result)
295
+ } catch {
296
+ promise.reject("PIPELINE_CONNECT_ERROR", error.localizedDescription)
297
+ }
298
+ }
299
+
300
+ AsyncFunction("pushPipelineAudio") { (options: [String: Any], promise: Promise) in
301
+ do {
302
+ try self.pipelineIntegration.pushAudio(options: options)
303
+ promise.resolve(nil)
304
+ } catch {
305
+ promise.reject("PIPELINE_PUSH_ERROR", error.localizedDescription)
306
+ }
307
+ }
308
+
309
+ Function("pushPipelineAudioSync") { (options: [String: Any]) -> Bool in
310
+ return self.pipelineIntegration.pushAudioSync(options: options)
311
+ }
312
+
313
+ AsyncFunction("disconnectPipeline") { (promise: Promise) in
314
+ self.pipelineIntegration.removeAsDelegate(from: self.sharedAudioEngine)
315
+ self.pipelineIntegration.disconnect()
316
+ promise.resolve(nil)
317
+ }
318
+
319
+ AsyncFunction("invalidatePipelineTurn") { (options: [String: Any], promise: Promise) in
320
+ do {
321
+ try self.pipelineIntegration.invalidateTurn(options: options)
322
+ promise.resolve(nil)
323
+ } catch {
324
+ promise.reject("PIPELINE_INVALIDATE_ERROR", error.localizedDescription)
325
+ }
326
+ }
327
+
328
+ Function("getPipelineTelemetry") { () -> [String: Any] in
329
+ return self.pipelineIntegration.getTelemetry()
330
+ }
331
+
332
+ Function("getPipelineState") { () -> String in
333
+ return self.pipelineIntegration.getState()
334
+ }
335
+ }
336
+
337
+ private func ensureAudioSessionInitialized(settings recordingSettings: RecordingSettings? = nil) throws {
338
+ if self.isAudioSessionInitialized { return }
339
+
340
+ let audioSession = AVAudioSession.sharedInstance()
341
+ try audioSession.setCategory(
342
+ .playAndRecord, mode: .videoChat,
343
+ options: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP])
344
+ if let settings = recordingSettings {
345
+ try audioSession.setPreferredSampleRate(settings.sampleRate)
346
+ // Set IO buffer duration short enough to support the desired emission interval.
347
+ // Use the hardware sample rate (not the desired rate) since this is a hardware-level setting.
348
+ let hwSampleRate = audioSession.sampleRate > 0 ? audioSession.sampleRate : 48000.0
349
+ let preferredDuration = 512.0 / hwSampleRate // ~10.7ms at 48kHz
350
+ try audioSession.setPreferredIOBufferDuration(preferredDuration)
351
+ }
352
+ try audioSession.setActive(true)
353
+ isAudioSessionInitialized = true
354
+ }
355
+
356
+ // used for voice isolation, experimental
357
+ private func promptForMicrophoneModes() {
358
+ guard #available(iOS 15.0, *) else {
359
+ return
360
+ }
361
+
362
+ if AVCaptureDevice.preferredMicrophoneMode == .voiceIsolation {
363
+ return
364
+ }
365
+
366
+ AVCaptureDevice.showSystemUserInterface(.microphoneModes)
367
+ }
368
+
369
+ /// Checks microphone permission and calls the completion handler with the result.
370
+ private func checkMicrophonePermission(completion: @escaping (Bool) -> Void) {
371
+ switch AVAudioSession.sharedInstance().recordPermission {
372
+ case .granted:
373
+ completion(true)
374
+ case .denied:
375
+ completion(false)
376
+ case .undetermined:
377
+ AVAudioSession.sharedInstance().requestRecordPermission { granted in
378
+ DispatchQueue.main.async {
379
+ completion(granted)
380
+ }
381
+ }
382
+ @unknown default:
383
+ completion(false)
384
+ }
385
+ }
386
+
387
+ func onMicrophoneData(_ microphoneData: Data, _ soundLevel: Float?) {
388
+ let encodedData = microphoneData.base64EncodedString()
389
+ // Construct the event payload similar to Android
390
+ let eventBody: [String: Any] = [
391
+ "fileUri": "",
392
+ "lastEmittedSize": 0,
393
+ "position": 0,
394
+ "encoded": encodedData,
395
+ "deltaSize": 0,
396
+ "totalSize": 0,
397
+ "mimeType": "",
398
+ "soundLevel": soundLevel ?? -160
399
+ ]
400
+ // Emit the event to JavaScript
401
+ sendEvent(audioDataEvent, eventBody)
402
+ }
403
+
404
+ func onMicrophoneError(_ error: String, _ errorMessage: String) {
405
+ let eventBody: [String: Any] = [
406
+ "error": error,
407
+ "errorMessage": errorMessage,
408
+ "streamUuid": ""
409
+ ]
410
+ sendEvent(audioDataEvent, eventBody)
411
+ }
412
+
413
+ func onDeviceReconnected(_ reason: AVAudioSession.RouteChangeReason) {
414
+ let reasonString: String
415
+ switch reason {
416
+ case .newDeviceAvailable:
417
+ reasonString = "newDeviceAvailable"
418
+ case .oldDeviceUnavailable:
419
+ reasonString = "oldDeviceUnavailable"
420
+ case .unknown, .categoryChange, .override, .wakeFromSleep, .noSuitableRouteForCategory, .routeConfigurationChange:
421
+ reasonString = "unknown"
422
+ @unknown default:
423
+ reasonString = "unknown"
424
+ }
425
+
426
+ sendEvent(deviceReconnectedEvent, ["reason": reasonString])
427
+ }
428
+
429
+ func onSoundChunkPlayed(_ isFinal: Bool) {
430
+ sendEvent(soundIsPlayedEvent, ["isFinal": isFinal])
431
+ }
432
+
433
+ func onSoundStartedPlaying() {
434
+ sendEvent(soundIsStartedEvent)
435
+ }
436
+ }
@@ -0,0 +1,7 @@
1
+ import ExpoModulesCore
2
+
3
+ // This view will be used as a native component. Make sure to inherit from `ExpoView`
4
+ // to apply the proper styling (e.g. border radius and shadows).
5
+ class ExpoPlayAudioStreamView: ExpoView {
6
+
7
+ }
@@ -0,0 +1,208 @@
1
+ import Foundation
2
+
3
+ /// Lock-based chunk queue for PCM audio (16-bit signed, little-endian).
4
+ ///
5
+ /// Single producer (bridge thread) writes decoded PCM via `write()`.
6
+ /// Single consumer (scheduling thread) drains via `read()`.
7
+ /// All shared state is guarded by an NSLock.
8
+ ///
9
+ /// Features:
10
+ /// - Chunk queue: incoming `[Int16]` chunks are stored by reference
11
+ /// (copy-on-write, zero actual copy since neither side mutates after write).
12
+ /// No fixed capacity limit.
13
+ /// - Priming gate: `read()` returns silence until `targetBufferMs` of audio has
14
+ /// accumulated (or `markEndOfStream()` force-primes so the tail drains).
15
+ /// - Silence-fill on underflow: when the buffer has fewer samples than the
16
+ /// consumer requested, the remainder is filled with silence and an underrun
17
+ /// is counted.
18
+ class JitterBuffer {
19
+ private let sampleRate: Int
20
+ private let channels: Int
21
+ private let targetBufferMs: Int
22
+
23
+ // ── Chunk queue storage ──────────────────────────────────────────────
24
+ private var chunks: [[Int16]] = []
25
+ private var readCursor: Int = 0 // offset into the head chunk
26
+ private var count: Int = 0 // total live samples across all chunks
27
+
28
+ // ── Priming gate ────────────────────────────────────────────────────
29
+ private let primingSamples: Int
30
+ private var primed: Bool = false
31
+
32
+ // ── End-of-stream ───────────────────────────────────────────────────
33
+ private var endOfStream: Bool = false
34
+
35
+ // ── Lock ────────────────────────────────────────────────────────────
36
+ private let lock = NSLock()
37
+
38
+ // ── Telemetry ───────────────────────────────────────────────────────
39
+ private(set) var totalWritten: Int64 = 0
40
+ private(set) var totalRead: Int64 = 0
41
+ private(set) var underrunCount: Int = 0
42
+ private(set) var peakLevel: Int = 0
43
+
44
+ init(sampleRate: Int, channels: Int, targetBufferMs: Int) {
45
+ self.sampleRate = sampleRate
46
+ self.channels = channels
47
+ self.targetBufferMs = targetBufferMs
48
+ self.primingSamples = (sampleRate * channels * targetBufferMs) / 1000
49
+ }
50
+
51
+ // ── Producer API ────────────────────────────────────────────────────
52
+
53
+ /// Enqueue samples into the chunk queue.
54
+ ///
55
+ /// The array is stored directly (Swift CoW means zero actual copy
56
+ /// as long as neither side mutates after the call).
57
+ @discardableResult
58
+ func write(samples: [Int16], offset: Int = 0, length: Int? = nil) -> Int {
59
+ let len = length ?? samples.count
60
+ lock.lock()
61
+ defer { lock.unlock() }
62
+
63
+ let chunk: [Int16]
64
+ if offset == 0 && len == samples.count {
65
+ chunk = samples
66
+ } else {
67
+ chunk = Array(samples[offset..<(offset + len)])
68
+ }
69
+
70
+ chunks.append(chunk)
71
+ count += len
72
+
73
+ // Update peak telemetry
74
+ if count > peakLevel {
75
+ peakLevel = count
76
+ }
77
+ totalWritten += Int64(len)
78
+
79
+ // Check priming
80
+ if !primed && count >= primingSamples {
81
+ primed = true
82
+ }
83
+
84
+ return len
85
+ }
86
+
87
+ // ── Consumer API ────────────────────────────────────────────────────
88
+
89
+ /// Fill `dest` with up to `length` samples from the chunk queue.
90
+ ///
91
+ /// - Not primed & no end-of-stream: fills with silence.
92
+ /// - Primed: copies available samples; remainder is zero-filled
93
+ /// and an underrun is recorded.
94
+ @discardableResult
95
+ func read(dest: inout [Int16], offset: Int = 0, length: Int? = nil) -> Int {
96
+ let len = length ?? dest.count
97
+ lock.lock()
98
+ defer { lock.unlock() }
99
+
100
+ if !primed {
101
+ for i in 0..<len {
102
+ dest[offset + i] = 0
103
+ }
104
+ return len
105
+ }
106
+
107
+ var destPos = offset
108
+ var remaining = len
109
+
110
+ while remaining > 0 && !chunks.isEmpty {
111
+ let chunk = chunks[0]
112
+ let available = chunk.count - readCursor
113
+ let toCopy = min(available, remaining)
114
+
115
+ for i in 0..<toCopy {
116
+ dest[destPos + i] = chunk[readCursor + i]
117
+ }
118
+ readCursor += toCopy
119
+ destPos += toCopy
120
+ remaining -= toCopy
121
+ count -= toCopy
122
+
123
+ if readCursor >= chunk.count {
124
+ chunks.removeFirst()
125
+ readCursor = 0
126
+ }
127
+ }
128
+
129
+ // Silence-fill remainder on underflow
130
+ if remaining > 0 {
131
+ for i in 0..<remaining {
132
+ dest[destPos + i] = 0
133
+ }
134
+ // Only count as underrun if we expected more data (not drained)
135
+ if !endOfStream {
136
+ underrunCount += 1
137
+ }
138
+ }
139
+
140
+ totalRead += Int64(len)
141
+ return len
142
+ }
143
+
144
+ // ── Control API ─────────────────────────────────────────────────────
145
+
146
+ /// Mark that the producer will not write any more data for this turn.
147
+ /// Force-primes the buffer so the consumer can drain whatever remains.
148
+ func markEndOfStream() {
149
+ lock.lock()
150
+ defer { lock.unlock() }
151
+ endOfStream = true
152
+ if !primed {
153
+ primed = true
154
+ }
155
+ }
156
+
157
+ /// `true` after `markEndOfStream()` was called AND the buffer is empty.
158
+ func isDrained() -> Bool {
159
+ lock.lock()
160
+ defer { lock.unlock() }
161
+ return endOfStream && count == 0
162
+ }
163
+
164
+ /// Current buffer level in samples (snapshot).
165
+ func availableSamples() -> Int {
166
+ lock.lock()
167
+ defer { lock.unlock() }
168
+ return count
169
+ }
170
+
171
+ /// Current buffer level converted to milliseconds.
172
+ func bufferedMs() -> Int {
173
+ lock.lock()
174
+ defer { lock.unlock() }
175
+ let denom = sampleRate * channels
176
+ guard denom > 0 else { return 0 }
177
+ return (count * 1000) / denom
178
+ }
179
+
180
+ /// Whether the priming gate is currently open.
181
+ func isPrimed() -> Bool {
182
+ lock.lock()
183
+ defer { lock.unlock() }
184
+ return primed
185
+ }
186
+
187
+ /// Reset the buffer to its initial empty state.
188
+ /// Called on turn changes to discard stale audio.
189
+ func reset() {
190
+ lock.lock()
191
+ defer { lock.unlock() }
192
+ chunks.removeAll()
193
+ readCursor = 0
194
+ count = 0
195
+ primed = false
196
+ endOfStream = false
197
+ }
198
+
199
+ /// Reset all telemetry counters to zero.
200
+ func resetTelemetry() {
201
+ lock.lock()
202
+ defer { lock.unlock() }
203
+ totalWritten = 0
204
+ totalRead = 0
205
+ underrunCount = 0
206
+ peakLevel = 0
207
+ }
208
+ }
@@ -0,0 +1,7 @@
1
+ class Logger {
2
+ static func debug(_ message: @autoclosure () -> String) {
3
+ #if DEBUG
4
+ print(message())
5
+ #endif
6
+ }
7
+ }