@edkimmel/expo-audio-stream 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/.eslintrc.js +5 -0
  2. package/.yarnrc.yml +8 -0
  3. package/NATIVE_EVENTS.md +270 -0
  4. package/README.md +289 -0
  5. package/android/build.gradle +92 -0
  6. package/android/src/main/AndroidManifest.xml +4 -0
  7. package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
  8. package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
  9. package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
  10. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
  11. package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
  12. package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
  13. package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
  14. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
  15. package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
  16. package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
  17. package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
  18. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
  19. package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
  20. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
  21. package/app.plugin.js +1 -0
  22. package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
  23. package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
  24. package/build/ExpoPlayAudioStreamModule.js +5 -0
  25. package/build/ExpoPlayAudioStreamModule.js.map +1 -0
  26. package/build/events.d.ts +36 -0
  27. package/build/events.d.ts.map +1 -0
  28. package/build/events.js +25 -0
  29. package/build/events.js.map +1 -0
  30. package/build/index.d.ts +125 -0
  31. package/build/index.d.ts.map +1 -0
  32. package/build/index.js +222 -0
  33. package/build/index.js.map +1 -0
  34. package/build/pipeline/index.d.ts +81 -0
  35. package/build/pipeline/index.d.ts.map +1 -0
  36. package/build/pipeline/index.js +140 -0
  37. package/build/pipeline/index.js.map +1 -0
  38. package/build/pipeline/types.d.ts +132 -0
  39. package/build/pipeline/types.d.ts.map +1 -0
  40. package/build/pipeline/types.js +5 -0
  41. package/build/pipeline/types.js.map +1 -0
  42. package/build/types.d.ts +221 -0
  43. package/build/types.d.ts.map +1 -0
  44. package/build/types.js +10 -0
  45. package/build/types.js.map +1 -0
  46. package/expo-module.config.json +9 -0
  47. package/ios/AudioPipeline.swift +562 -0
  48. package/ios/AudioUtils.swift +356 -0
  49. package/ios/ExpoPlayAudioStream.podspec +27 -0
  50. package/ios/ExpoPlayAudioStreamModule.swift +436 -0
  51. package/ios/ExpoPlayAudioStreamView.swift +7 -0
  52. package/ios/JitterBuffer.swift +208 -0
  53. package/ios/Logger.swift +7 -0
  54. package/ios/Microphone.swift +221 -0
  55. package/ios/MicrophoneDataDelegate.swift +4 -0
  56. package/ios/PipelineIntegration.swift +214 -0
  57. package/ios/RecordingResult.swift +10 -0
  58. package/ios/RecordingSettings.swift +11 -0
  59. package/ios/SharedAudioEngine.swift +484 -0
  60. package/ios/SoundConfig.swift +45 -0
  61. package/ios/SoundPlayer.swift +408 -0
  62. package/ios/SoundPlayerDelegate.swift +7 -0
  63. package/package.json +49 -0
  64. package/plugin/build/index.d.ts +5 -0
  65. package/plugin/build/index.js +28 -0
  66. package/plugin/src/index.ts +53 -0
  67. package/plugin/tsconfig.json +9 -0
  68. package/plugin/tsconfig.tsbuildinfo +1 -0
  69. package/src/ExpoPlayAudioStreamModule.ts +5 -0
  70. package/src/events.ts +66 -0
  71. package/src/index.ts +359 -0
  72. package/src/pipeline/index.ts +216 -0
  73. package/src/pipeline/types.ts +169 -0
  74. package/src/types.ts +270 -0
  75. package/tsconfig.json +9 -0
@@ -0,0 +1,562 @@
1
+ import AVFoundation
2
+
3
+ /// Pipeline states reported to JS via PipelineListener.onStateChanged.
4
+ enum PipelineState: String {
5
+ case idle = "idle"
6
+ case connecting = "connecting"
7
+ case streaming = "streaming"
8
+ case draining = "draining"
9
+ case error = "error"
10
+ }
11
+
12
+ /// Listener interface — implemented by PipelineIntegration to bridge events to JS.
13
+ protocol PipelineListener: AnyObject {
14
+ func onStateChanged(_ state: PipelineState)
15
+ func onPlaybackStarted(turnId: String)
16
+ func onError(code: String, message: String)
17
+ func onZombieDetected(stalledMs: Int64)
18
+ func onUnderrun(count: Int)
19
+ func onDrained(turnId: String)
20
+ func onAudioFocusLost()
21
+ func onAudioFocusResumed()
22
+ }
23
+
24
+ /// Core orchestrator for the native audio pipeline (iOS).
25
+ ///
26
+ /// Attaches an AVAudioPlayerNode to the SharedAudioEngine, feeds it from a
27
+ /// JitterBuffer via a scheduling loop that chains buffer completions for
28
+ /// continuous output.
29
+ ///
30
+ /// Key design points:
31
+ /// - The player node stays alive for the entire session, playing silence when
32
+ /// idle (via JitterBuffer returning zeros when not primed).
33
+ /// - Config is immutable per session — disconnect and reconnect to change
34
+ /// sample rate.
35
+ /// - Route changes and interruptions are handled by SharedAudioEngine;
36
+ /// this class implements SharedAudioEngineDelegate for re-seeding.
37
+ /// - Zombie detection via timer checking that the scheduling loop is active.
38
+ /// - Turn management synchronized via turnLock to prevent interleaved
39
+ /// buffer.reset + buffer.write.
40
+ class AudioPipeline: SharedAudioEngineDelegate {
41
+ private static let TAG = "AudioPipeline"
42
+
43
+ /// Number of buffers to pre-schedule for continuous output.
44
+ private static let PRE_SCHEDULE_COUNT = 3
45
+
46
+ /// How often (seconds) the state-monitoring timer fires.
47
+ private static let STATE_POLL_INTERVAL: TimeInterval = 0.05
48
+
49
+ /// How often (seconds) zombie detection checks.
50
+ private static let ZOMBIE_POLL_INTERVAL: TimeInterval = 2.0
51
+
52
+ /// If scheduling loop hasn't run for this long, declare zombie.
53
+ private static let ZOMBIE_STALL_THRESHOLD_MS: Int64 = 5000
54
+
55
+ // ── Config (immutable per session) ──────────────────────────────────
56
+ private let sampleRate: Int
57
+ private let channelCount: Int
58
+ private let targetBufferMs: Int
59
+ private weak var listener: PipelineListener?
60
+ private weak var sharedEngine: SharedAudioEngine?
61
+
62
+ // ── Core components ─────────────────────────────────────────────────
63
+ private var playerNode: AVAudioPlayerNode?
64
+ private var outputFormat: AVAudioFormat?
65
+ private var jitterBuffer: JitterBuffer?
66
+
67
+ /// Number of interleaved Int16 samples per scheduled buffer.
68
+ let frameSizeSamples: Int
69
+
70
+ // ── Threading / state ───────────────────────────────────────────────
71
+ private var running = false
72
+ private let turnLock = NSLock()
73
+ private var currentTurnId: String?
74
+ private var playbackStartedForTurn = false
75
+ private var lastReportedUnderrunCount = 0
76
+ private var isInterrupted = false
77
+
78
+ /// Incremented each time the scheduling loop is torn down (route change, disconnect).
79
+ /// Completion handlers capture the generation at scheduling time and bail if it's stale.
80
+ /// This prevents duplicate chains and stale callbacks from re-entering after a rebuild.
81
+ private var scheduleGeneration: Int = 0
82
+
83
+ // ── Timers ──────────────────────────────────────────────────────────
84
+ private var stateTimer: DispatchSourceTimer?
85
+ private var zombieTimer: DispatchSourceTimer?
86
+ private var lastScheduleTime = Date()
87
+
88
+ // ── Pipeline state ──────────────────────────────────────────────────
89
+ private var state: PipelineState = .idle
90
+
91
+ // ── Telemetry ───────────────────────────────────────────────────────
92
+ private(set) var totalPushCalls: Int64 = 0
93
+ private(set) var totalPushBytes: Int64 = 0
94
+ private(set) var totalScheduledBuffers: Int64 = 0
95
+
96
+ // ── Pre-allocated render buffer ─────────────────────────────────────
97
+ private var renderSamples: [Int16] = []
98
+
99
+ init(sampleRate: Int, channelCount: Int, targetBufferMs: Int, sharedEngine: SharedAudioEngine, listener: PipelineListener) {
100
+ self.sampleRate = sampleRate
101
+ self.channelCount = channelCount
102
+ self.targetBufferMs = targetBufferMs
103
+ self.sharedEngine = sharedEngine
104
+ self.listener = listener
105
+ // 20ms frame size (matches typical iOS audio buffer duration)
106
+ self.frameSizeSamples = max(1, sampleRate * channelCount / 50)
107
+ }
108
+
109
+ // ════════════════════════════════════════════════════════════════════
110
+ // Connect / Disconnect
111
+ // ════════════════════════════════════════════════════════════════════
112
+
113
+ func connect() {
114
+ guard !running else {
115
+ Logger.debug("[\(AudioPipeline.TAG)] connect() called while already running — ignoring")
116
+ return
117
+ }
118
+ setState(.connecting)
119
+
120
+ do {
121
+ guard let sharedEngine = sharedEngine else {
122
+ throw NSError(domain: "AudioPipeline", code: -1,
123
+ userInfo: [NSLocalizedDescriptionKey: "SharedAudioEngine not set"])
124
+ }
125
+
126
+ // ── 1. JitterBuffer ─────────────────────────────────────────
127
+ jitterBuffer = JitterBuffer(
128
+ sampleRate: sampleRate,
129
+ channels: channelCount,
130
+ targetBufferMs: targetBufferMs
131
+ )
132
+
133
+ // ── 2. Pre-allocate render buffer ───────────────────────────
134
+ renderSamples = [Int16](repeating: 0, count: frameSizeSamples)
135
+
136
+ // ── 3. Audio session ────────────────────────────────────────
137
+ // Session category/mode is owned by ExpoPlayAudioStreamModule
138
+ // (ensureAudioSessionInitialized). Just ensure it's active.
139
+ try AVAudioSession.sharedInstance().setActive(true)
140
+
141
+ // ── 4. Create format and attach player node to shared engine ─
142
+ guard let format = AVAudioFormat(
143
+ commonFormat: .pcmFormatFloat32,
144
+ sampleRate: Double(sampleRate),
145
+ channels: AVAudioChannelCount(channelCount),
146
+ interleaved: false
147
+ ) else {
148
+ throw NSError(domain: "AudioPipeline", code: -1,
149
+ userInfo: [NSLocalizedDescriptionKey: "Failed to create audio format"])
150
+ }
151
+
152
+ let node = AVAudioPlayerNode()
153
+ sharedEngine.attachNode(node, format: format)
154
+ node.play()
155
+
156
+ self.playerNode = node
157
+ self.outputFormat = format
158
+ self.running = true
159
+
160
+ // ── 5. Start scheduling loop ────────────────────────────────
161
+ Logger.debug("[\(AudioPipeline.TAG)] Seeding scheduling loop — gen=\(scheduleGeneration) count=\(AudioPipeline.PRE_SCHEDULE_COUNT)")
162
+ for _ in 0..<AudioPipeline.PRE_SCHEDULE_COUNT {
163
+ scheduleNextBuffer()
164
+ }
165
+
166
+ // ── 6. State polling + zombie detection ─────────────────────
167
+ startStatePolling()
168
+ startZombieDetection()
169
+
170
+ // ── 7. Reset telemetry ──────────────────────────────────────
171
+ resetTelemetry()
172
+
173
+ setState(.idle)
174
+ Logger.debug("[\(AudioPipeline.TAG)] Connected — sampleRate=\(sampleRate) " +
175
+ "ch=\(channelCount) frameSamples=\(frameSizeSamples) " +
176
+ "targetBuffer=\(targetBufferMs)ms")
177
+ } catch {
178
+ Logger.debug("[\(AudioPipeline.TAG)] connect() failed: \(error)")
179
+ setState(.error)
180
+ listener?.onError(code: "CONNECT_FAILED", message: error.localizedDescription)
181
+ disconnect()
182
+ }
183
+ }
184
+
185
+ func disconnect() {
186
+ running = false
187
+ // Invalidate all in-flight completion handlers before detaching.
188
+ scheduleGeneration += 1
189
+
190
+ // Stop timers
191
+ stateTimer?.cancel()
192
+ stateTimer = nil
193
+ zombieTimer?.cancel()
194
+ zombieTimer = nil
195
+
196
+ // Detach node from shared engine (handles pause/stop/disconnect/detach)
197
+ if let node = playerNode {
198
+ sharedEngine?.detachNode(node)
199
+ }
200
+
201
+ playerNode = nil
202
+ outputFormat = nil
203
+ jitterBuffer = nil
204
+ currentTurnId = nil
205
+
206
+ setState(.idle)
207
+ Logger.debug("[\(AudioPipeline.TAG)] Disconnected")
208
+ }
209
+
210
+ // ════════════════════════════════════════════════════════════════════
211
+ // SharedAudioEngineDelegate
212
+ // ════════════════════════════════════════════════════════════════════
213
+
214
+ func engineDidRestartAfterRouteChange() {
215
+ guard running else {
216
+ Logger.debug("[\(AudioPipeline.TAG)] engineDidRestartAfterRouteChange — not running, skipping")
217
+ return
218
+ }
219
+ let engineRunning = sharedEngine?.engine?.isRunning == true
220
+ let nodeExists = playerNode != nil
221
+ // Bump generation so any in-flight completions from before the rebuild are invalidated.
222
+ // Without this, stopped-node completions that fire after isRebuilding clears would
223
+ // re-enter the loop alongside our re-seed, doubling the scheduling chain.
224
+ scheduleGeneration += 1
225
+ Logger.debug("[\(AudioPipeline.TAG)] Engine restarted after route change — " +
226
+ "re-seeding scheduling loop (gen=\(scheduleGeneration), engineRunning=\(engineRunning), node=\(nodeExists), " +
227
+ "state=\(state.rawValue), bufferMs=\(jitterBuffer?.bufferedMs() ?? -1))")
228
+ // Node was already re-attached and started by SharedAudioEngine.
229
+ // Re-seed the scheduling loop with a fresh generation.
230
+ lastScheduleTime = Date() // Reset zombie timer baseline
231
+ for _ in 0..<AudioPipeline.PRE_SCHEDULE_COUNT {
232
+ scheduleNextBuffer()
233
+ }
234
+ }
235
+
236
+ func engineDidRebuild() {
237
+ guard running else {
238
+ Logger.debug("[\(AudioPipeline.TAG)] engineDidRebuild — not running, skipping")
239
+ return
240
+ }
241
+
242
+ Logger.debug("[\(AudioPipeline.TAG)] Engine rebuilt — creating fresh node and re-seeding")
243
+
244
+ // Old node is invalid (detached during teardown). Create a fresh one.
245
+ scheduleGeneration += 1
246
+
247
+ guard let sharedEngine = sharedEngine,
248
+ let format = AVAudioFormat(
249
+ commonFormat: .pcmFormatFloat32,
250
+ sampleRate: Double(sampleRate),
251
+ channels: AVAudioChannelCount(channelCount),
252
+ interleaved: false
253
+ ) else {
254
+ Logger.debug("[\(AudioPipeline.TAG)] engineDidRebuild — cannot create format or engine missing, treating as dead")
255
+ running = false
256
+ setState(.error)
257
+ listener?.onError(code: "ENGINE_DIED", message: "Failed to recreate audio node after engine rebuild")
258
+ return
259
+ }
260
+
261
+ let node = AVAudioPlayerNode()
262
+ sharedEngine.attachNode(node, format: format)
263
+ node.play()
264
+
265
+ self.playerNode = node
266
+ self.outputFormat = format
267
+
268
+ let engineRunning = sharedEngine.engine?.isRunning == true
269
+ let nodeExists = playerNode != nil
270
+ Logger.debug("[\(AudioPipeline.TAG)] Fresh node attached after rebuild — " +
271
+ "gen=\(scheduleGeneration), engineRunning=\(engineRunning), node=\(nodeExists), " +
272
+ "state=\(state.rawValue), bufferMs=\(jitterBuffer?.bufferedMs() ?? -1))")
273
+
274
+ // Re-seed scheduling loop
275
+ lastScheduleTime = Date()
276
+ for _ in 0..<AudioPipeline.PRE_SCHEDULE_COUNT {
277
+ scheduleNextBuffer()
278
+ }
279
+ }
280
+
281
+ func engineDidDie(reason: String) {
282
+ Logger.debug("[\(AudioPipeline.TAG)] Engine died: \(reason)")
283
+ // Stop the pipeline so all state is cleaned up.
284
+ // Don't call disconnect() since the engine is already torn down —
285
+ // just reset our own state.
286
+ running = false
287
+ scheduleGeneration += 1
288
+ stateTimer?.cancel()
289
+ stateTimer = nil
290
+ zombieTimer?.cancel()
291
+ zombieTimer = nil
292
+ playerNode = nil
293
+ outputFormat = nil
294
+ jitterBuffer = nil
295
+ currentTurnId = nil
296
+ setState(.error)
297
+ listener?.onError(code: "ENGINE_DIED", message: reason)
298
+ }
299
+
300
+ func audioSessionInterruptionBegan() {
301
+ Logger.debug("[\(AudioPipeline.TAG)] Audio session interruption began")
302
+ isInterrupted = true
303
+ listener?.onAudioFocusLost()
304
+ }
305
+
306
+ func audioSessionInterruptionEnded() {
307
+ Logger.debug("[\(AudioPipeline.TAG)] Audio session interruption ended")
308
+ isInterrupted = false
309
+ // Engine already restarted by SharedAudioEngine. Re-seed scheduling.
310
+ if running {
311
+ scheduleGeneration += 1
312
+ for _ in 0..<AudioPipeline.PRE_SCHEDULE_COUNT {
313
+ scheduleNextBuffer()
314
+ }
315
+ }
316
+ listener?.onAudioFocusResumed()
317
+ }
318
+
319
+ // ════════════════════════════════════════════════════════════════════
320
+ // Push audio (bridge thread → jitter buffer)
321
+ // ════════════════════════════════════════════════════════════════════
322
+
323
+ func pushAudio(base64Audio: String, turnId: String, isFirstChunk: Bool, isLastChunk: Bool) {
324
+ guard let buf = jitterBuffer else {
325
+ listener?.onError(code: "NOT_CONNECTED", message: "Pipeline not connected")
326
+ return
327
+ }
328
+
329
+ turnLock.lock()
330
+ defer { turnLock.unlock() }
331
+
332
+ // ── Turn boundary handling ──────────────────────────────────────
333
+ if isFirstChunk || currentTurnId != turnId {
334
+ buf.reset()
335
+ currentTurnId = turnId
336
+ playbackStartedForTurn = false
337
+ lastReportedUnderrunCount = 0
338
+ setState(.streaming)
339
+ }
340
+
341
+ // ── Decode base64 → PCM shorts ──────────────────────────────────
342
+ guard let bytes = Data(base64Encoded: base64Audio) else {
343
+ listener?.onError(code: "DECODE_ERROR", message: "Base64 decode failed")
344
+ return
345
+ }
346
+
347
+ let sampleCount = bytes.count / 2
348
+ var samples = [Int16](repeating: 0, count: sampleCount)
349
+ bytes.withUnsafeBytes { rawBuffer in
350
+ guard let ptr = rawBuffer.baseAddress?.assumingMemoryBound(to: Int16.self) else { return }
351
+ for i in 0..<sampleCount {
352
+ samples[i] = Int16(littleEndian: ptr[i])
353
+ }
354
+ }
355
+
356
+ // ── Write into jitter buffer ────────────────────────────────────
357
+ buf.write(samples: samples)
358
+
359
+ // ── Telemetry ───────────────────────────────────────────────────
360
+ totalPushCalls += 1
361
+ totalPushBytes += Int64(bytes.count)
362
+
363
+ // ── End-of-stream ───────────────────────────────────────────────
364
+ if isLastChunk {
365
+ buf.markEndOfStream()
366
+ setState(.draining)
367
+ }
368
+ }
369
+
370
+ /// Invalidate the current turn. Resets the jitter buffer so stale audio
371
+ /// is discarded immediately.
372
+ func invalidateTurn(newTurnId: String) {
373
+ turnLock.lock()
374
+ defer { turnLock.unlock() }
375
+ jitterBuffer?.reset()
376
+ currentTurnId = newTurnId
377
+ playbackStartedForTurn = false
378
+ lastReportedUnderrunCount = 0
379
+ setState(.idle)
380
+ }
381
+
382
+ // ════════════════════════════════════════════════════════════════════
383
+ // State & Telemetry
384
+ // ════════════════════════════════════════════════════════════════════
385
+
386
+ func getState() -> PipelineState { return state }
387
+
388
+ func getTelemetry() -> [String: Any] {
389
+ let buf = jitterBuffer
390
+ return [
391
+ "state": state.rawValue,
392
+ "bufferMs": buf?.bufferedMs() ?? 0,
393
+ "bufferSamples": buf?.availableSamples() ?? 0,
394
+ "primed": buf?.isPrimed() ?? false,
395
+ "totalWritten": buf?.totalWritten ?? 0,
396
+ "totalRead": buf?.totalRead ?? 0,
397
+ "underrunCount": buf?.underrunCount ?? 0,
398
+ "peakLevel": buf?.peakLevel ?? 0,
399
+ "totalPushCalls": totalPushCalls,
400
+ "totalPushBytes": totalPushBytes,
401
+ "totalScheduledBuffers": totalScheduledBuffers,
402
+ "turnId": currentTurnId ?? ""
403
+ ]
404
+ }
405
+
406
+ // ════════════════════════════════════════════════════════════════════
407
+ // Scheduling loop
408
+ // ════════════════════════════════════════════════════════════════════
409
+
410
+ private func scheduleNextBuffer() {
411
+ guard running,
412
+ let se = sharedEngine, !se.isRebuilding,
413
+ let buf = jitterBuffer,
414
+ let node = playerNode,
415
+ let format = outputFormat,
416
+ se.engine?.isRunning == true else { return }
417
+
418
+ // Capture the current generation so the completion handler can detect staleness.
419
+ let capturedGeneration = scheduleGeneration
420
+
421
+ // Read interleaved Int16 samples from jitter buffer
422
+ buf.read(dest: &renderSamples, length: frameSizeSamples)
423
+
424
+ // Convert to non-interleaved Float32 for AVAudioEngine
425
+ let framesPerBuffer = frameSizeSamples / channelCount
426
+ guard let pcmBuffer = AVAudioPCMBuffer(
427
+ pcmFormat: format,
428
+ frameCapacity: AVAudioFrameCount(framesPerBuffer)
429
+ ) else { return }
430
+ pcmBuffer.frameLength = AVAudioFrameCount(framesPerBuffer)
431
+
432
+ if let channelData = pcmBuffer.floatChannelData {
433
+ if isInterrupted {
434
+ // Write silence during interruption
435
+ for ch in 0..<channelCount {
436
+ for i in 0..<framesPerBuffer {
437
+ channelData[ch][i] = 0
438
+ }
439
+ }
440
+ } else {
441
+ // De-interleave Int16 → non-interleaved Float32
442
+ for frame in 0..<framesPerBuffer {
443
+ for ch in 0..<channelCount {
444
+ let sampleIndex = frame * channelCount + ch
445
+ channelData[ch][frame] = Float(renderSamples[sampleIndex]) / 32768.0
446
+ }
447
+ }
448
+ }
449
+ }
450
+
451
+ totalScheduledBuffers += 1
452
+ lastScheduleTime = Date()
453
+
454
+ node.scheduleBuffer(pcmBuffer) { [weak self] in
455
+ guard let self = self, self.running else { return }
456
+ // Bail if this completion belongs to a previous scheduling generation
457
+ // (route change rebuilt the engine while this buffer was in flight).
458
+ guard self.scheduleGeneration == capturedGeneration else { return }
459
+ self.scheduleNextBuffer()
460
+ }
461
+ }
462
+
463
+ // ════════════════════════════════════════════════════════════════════
464
+ // State polling (runs on main thread via GCD timer)
465
+ // ════════════════════════════════════════════════════════════════════
466
+
467
+ private func startStatePolling() {
468
+ let timer = DispatchSource.makeTimerSource(queue: .main)
469
+ timer.schedule(
470
+ deadline: .now() + AudioPipeline.STATE_POLL_INTERVAL,
471
+ repeating: AudioPipeline.STATE_POLL_INTERVAL)
472
+ timer.setEventHandler { [weak self] in
473
+ self?.checkBufferState()
474
+ }
475
+ timer.resume()
476
+ stateTimer = timer
477
+ }
478
+
479
+ private func checkBufferState() {
480
+ guard let buf = jitterBuffer else { return }
481
+
482
+ turnLock.lock()
483
+ let turnId = currentTurnId
484
+ let alreadyStarted = playbackStartedForTurn
485
+ let lastUnderruns = lastReportedUnderrunCount
486
+ let currentState = state
487
+ turnLock.unlock()
488
+
489
+ // ── Playback-started event (once per turn) ──────────────────────
490
+ if !alreadyStarted && buf.isPrimed() && turnId != nil {
491
+ turnLock.lock()
492
+ playbackStartedForTurn = true
493
+ turnLock.unlock()
494
+ listener?.onPlaybackStarted(turnId: turnId!)
495
+ }
496
+
497
+ // ── Underrun debounce ───────────────────────────────────────────
498
+ let currentUnderruns = buf.underrunCount
499
+ if currentUnderruns > lastUnderruns {
500
+ turnLock.lock()
501
+ lastReportedUnderrunCount = currentUnderruns
502
+ turnLock.unlock()
503
+ listener?.onUnderrun(count: currentUnderruns)
504
+ }
505
+
506
+ // ── Drain detection ─────────────────────────────────────────────
507
+ if buf.isDrained() && currentState == .draining {
508
+ if let tid = turnId {
509
+ listener?.onDrained(turnId: tid)
510
+ }
511
+ setState(.idle)
512
+ }
513
+ }
514
+
515
+ // ════════════════════════════════════════════════════════════════════
516
+ // Zombie detection
517
+ // ════════════════════════════════════════════════════════════════════
518
+
519
+ private func startZombieDetection() {
520
+ lastScheduleTime = Date()
521
+ let timer = DispatchSource.makeTimerSource(queue: .main)
522
+ timer.schedule(
523
+ deadline: .now() + AudioPipeline.ZOMBIE_POLL_INTERVAL,
524
+ repeating: AudioPipeline.ZOMBIE_POLL_INTERVAL)
525
+ timer.setEventHandler { [weak self] in
526
+ guard let self = self else { return }
527
+ let stalledMs = Int64(Date().timeIntervalSince(self.lastScheduleTime) * 1000)
528
+ if stalledMs >= AudioPipeline.ZOMBIE_STALL_THRESHOLD_MS &&
529
+ (self.state == .streaming || self.state == .draining) {
530
+ Logger.debug("[\(AudioPipeline.TAG)] Zombie detected! stalledMs=\(stalledMs)")
531
+ self.listener?.onZombieDetected(stalledMs: stalledMs)
532
+ self.lastScheduleTime = Date()
533
+ }
534
+ }
535
+ timer.resume()
536
+ zombieTimer = timer
537
+ }
538
+
539
+ // ════════════════════════════════════════════════════════════════════
540
+ // Internal helpers
541
+ // ════════════════════════════════════════════════════════════════════
542
+
543
+ private func setState(_ newState: PipelineState) {
544
+ guard state != newState else { return }
545
+ state = newState
546
+ if Thread.isMainThread {
547
+ listener?.onStateChanged(newState)
548
+ } else {
549
+ DispatchQueue.main.async { [weak self] in
550
+ guard let self = self else { return }
551
+ self.listener?.onStateChanged(newState)
552
+ }
553
+ }
554
+ }
555
+
556
+ private func resetTelemetry() {
557
+ totalPushCalls = 0
558
+ totalPushBytes = 0
559
+ totalScheduledBuffers = 0
560
+ jitterBuffer?.resetTelemetry()
561
+ }
562
+ }