@edkimmel/expo-audio-stream 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/.eslintrc.js +5 -0
  2. package/.yarnrc.yml +8 -0
  3. package/NATIVE_EVENTS.md +270 -0
  4. package/README.md +289 -0
  5. package/android/build.gradle +92 -0
  6. package/android/src/main/AndroidManifest.xml +4 -0
  7. package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
  8. package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
  9. package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
  10. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
  11. package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
  12. package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
  13. package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
  14. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
  15. package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
  16. package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
  17. package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
  18. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
  19. package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
  20. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
  21. package/app.plugin.js +1 -0
  22. package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
  23. package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
  24. package/build/ExpoPlayAudioStreamModule.js +5 -0
  25. package/build/ExpoPlayAudioStreamModule.js.map +1 -0
  26. package/build/events.d.ts +36 -0
  27. package/build/events.d.ts.map +1 -0
  28. package/build/events.js +25 -0
  29. package/build/events.js.map +1 -0
  30. package/build/index.d.ts +125 -0
  31. package/build/index.d.ts.map +1 -0
  32. package/build/index.js +222 -0
  33. package/build/index.js.map +1 -0
  34. package/build/pipeline/index.d.ts +81 -0
  35. package/build/pipeline/index.d.ts.map +1 -0
  36. package/build/pipeline/index.js +140 -0
  37. package/build/pipeline/index.js.map +1 -0
  38. package/build/pipeline/types.d.ts +132 -0
  39. package/build/pipeline/types.d.ts.map +1 -0
  40. package/build/pipeline/types.js +5 -0
  41. package/build/pipeline/types.js.map +1 -0
  42. package/build/types.d.ts +221 -0
  43. package/build/types.d.ts.map +1 -0
  44. package/build/types.js +10 -0
  45. package/build/types.js.map +1 -0
  46. package/expo-module.config.json +9 -0
  47. package/ios/AudioPipeline.swift +562 -0
  48. package/ios/AudioUtils.swift +356 -0
  49. package/ios/ExpoPlayAudioStream.podspec +27 -0
  50. package/ios/ExpoPlayAudioStreamModule.swift +436 -0
  51. package/ios/ExpoPlayAudioStreamView.swift +7 -0
  52. package/ios/JitterBuffer.swift +208 -0
  53. package/ios/Logger.swift +7 -0
  54. package/ios/Microphone.swift +221 -0
  55. package/ios/MicrophoneDataDelegate.swift +4 -0
  56. package/ios/PipelineIntegration.swift +214 -0
  57. package/ios/RecordingResult.swift +10 -0
  58. package/ios/RecordingSettings.swift +11 -0
  59. package/ios/SharedAudioEngine.swift +484 -0
  60. package/ios/SoundConfig.swift +45 -0
  61. package/ios/SoundPlayer.swift +408 -0
  62. package/ios/SoundPlayerDelegate.swift +7 -0
  63. package/package.json +49 -0
  64. package/plugin/build/index.d.ts +5 -0
  65. package/plugin/build/index.js +28 -0
  66. package/plugin/src/index.ts +53 -0
  67. package/plugin/tsconfig.json +9 -0
  68. package/plugin/tsconfig.tsbuildinfo +1 -0
  69. package/src/ExpoPlayAudioStreamModule.ts +5 -0
  70. package/src/events.ts +66 -0
  71. package/src/index.ts +359 -0
  72. package/src/pipeline/index.ts +216 -0
  73. package/src/pipeline/types.ts +169 -0
  74. package/src/types.ts +270 -0
  75. package/tsconfig.json +9 -0
@@ -0,0 +1,651 @@
1
+ package expo.modules.audiostream
2
+
3
+ import android.media.AudioAttributes
4
+ import android.media.AudioFormat
5
+ import android.media.AudioTrack
6
+ import android.os.Bundle
7
+ import android.util.Base64
8
+ import android.util.Log
9
+ import expo.modules.kotlin.Promise
10
+ import kotlinx.coroutines.CoroutineScope
11
+ import kotlinx.coroutines.Dispatchers
12
+ import kotlinx.coroutines.Job
13
+ import kotlinx.coroutines.SupervisorJob
14
+ import kotlinx.coroutines.cancel
15
+ import kotlinx.coroutines.cancelAndJoin
16
+ import kotlinx.coroutines.channels.Channel
17
+ import kotlinx.coroutines.delay
18
+ import kotlinx.coroutines.flow.consumeAsFlow
19
+ import kotlinx.coroutines.launch
20
+ import kotlinx.coroutines.suspendCancellableCoroutine
21
+ import kotlinx.coroutines.withContext
22
+ import java.nio.ByteBuffer
23
+ import java.nio.ByteOrder
24
+ import kotlin.coroutines.cancellation.CancellationException
25
+ import kotlin.math.max
26
+ import kotlin.math.min
27
+
28
+ /**
29
+ * Enum representing PCM encoding formats
30
+ */
31
+ enum class PCMEncoding {
32
+ PCM_F32LE, // 32-bit float, little-endian
33
+ PCM_S16LE // 16-bit signed integer, little-endian
34
+ }
35
+
36
+ data class ChunkData(
37
+ val chunk: String,
38
+ val turnId: String,
39
+ val promise: Promise,
40
+ val encoding: PCMEncoding = PCMEncoding.PCM_S16LE
41
+ ) // contains the base64 chunk and encoding info
42
+
43
+ data class AudioChunk(
44
+ val audioData: FloatArray,
45
+ val promise: Promise,
46
+ val turnId: String,
47
+ var isPromiseSettled: Boolean = false
48
+ ) // contains the decoded base64 chunk
49
+
50
+ class AudioPlaybackManager(private val eventSender: EventSender? = null) {
51
+ private lateinit var processingChannel: Channel<ChunkData>
52
+ private lateinit var playbackChannel: Channel<AudioChunk>
53
+
54
+ private val coroutineScope = CoroutineScope(Dispatchers.Default + SupervisorJob())
55
+
56
+ private var processingJob: Job? = null
57
+ private var currentPlaybackJob: Job? = null
58
+
59
+ private lateinit var audioTrack: AudioTrack
60
+ private var isPlaying = false
61
+ private var currentTurnId: String? = null
62
+ private var hasSentSoundStartedEvent = false
63
+ private var segmentsLeftToPlay = 0
64
+
65
+ // Current sound configuration
66
+ private var config: SoundConfig = SoundConfig.DEFAULT
67
+
68
+ // Whether the AudioTrack was created with PCM_FLOAT (true) or PCM_16BIT (false).
69
+ // Some device HALs don't support FLOAT output; we detect this at init and fall back.
70
+ private var trackUsesFloat: Boolean = true
71
+
72
+ // Specific turnID to ignore sound events (similar to iOS)
73
+ // Removed: private val suspendSoundEventTurnId: String = "suspend-sound-events"
74
+
75
+ init {
76
+ initializeAudioTrack()
77
+ initializeChannels()
78
+ }
79
+
80
+ private fun initializeAudioTrack() {
81
+ // Try PCM_FLOAT first; fall back to PCM_16BIT if the HAL doesn't support it.
82
+ var encoding = AudioFormat.ENCODING_PCM_FLOAT
83
+ var minBufferSize = AudioTrack.getMinBufferSize(
84
+ config.sampleRate,
85
+ AudioFormat.CHANNEL_OUT_MONO,
86
+ encoding
87
+ )
88
+
89
+ if (minBufferSize <= 0) {
90
+ Log.w("AudioPlaybackManager",
91
+ "getMinBufferSize returned $minBufferSize for PCM_FLOAT " +
92
+ "(sampleRate=${config.sampleRate}). Falling back to PCM_16BIT.")
93
+ encoding = AudioFormat.ENCODING_PCM_16BIT
94
+ minBufferSize = AudioTrack.getMinBufferSize(
95
+ config.sampleRate,
96
+ AudioFormat.CHANNEL_OUT_MONO,
97
+ encoding
98
+ )
99
+ if (minBufferSize <= 0) {
100
+ // Last resort: calculate a 20 ms frame for 16-bit mono (2 bytes/sample)
101
+ Log.e("AudioPlaybackManager",
102
+ "getMinBufferSize also failed for PCM_16BIT ($minBufferSize). " +
103
+ "Using 20ms fallback buffer.")
104
+ minBufferSize = (config.sampleRate * 2) / 50
105
+ }
106
+ }
107
+
108
+ trackUsesFloat = (encoding == AudioFormat.ENCODING_PCM_FLOAT)
109
+
110
+ val audioFormat = AudioFormat.Builder()
111
+ .setSampleRate(config.sampleRate)
112
+ .setEncoding(encoding)
113
+ .setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
114
+ .build()
115
+
116
+ // Configure audio attributes based on playback mode
117
+ val audioAttributesBuilder = AudioAttributes.Builder()
118
+ .setUsage(AudioAttributes.USAGE_MEDIA)
119
+
120
+ // Set content type based on playback mode
121
+ val contentType = when (config.playbackMode) {
122
+ PlaybackMode.CONVERSATION, PlaybackMode.VOICE_PROCESSING ->
123
+ AudioAttributes.CONTENT_TYPE_SPEECH
124
+ else ->
125
+ AudioAttributes.CONTENT_TYPE_MUSIC
126
+ }
127
+
128
+ audioAttributesBuilder.setContentType(contentType)
129
+
130
+ audioTrack =
131
+ AudioTrack.Builder()
132
+ .setAudioAttributes(audioAttributesBuilder.build())
133
+ .setAudioFormat(audioFormat)
134
+ .setBufferSizeInBytes(minBufferSize * 2)
135
+ .setTransferMode(AudioTrack.MODE_STREAM)
136
+ .build()
137
+
138
+ Log.d("AudioPlaybackManager",
139
+ "AudioTrack created: encoding=${if (trackUsesFloat) "FLOAT" else "16BIT"}, " +
140
+ "sampleRate=${config.sampleRate}, bufferBytes=${minBufferSize * 2}")
141
+ }
142
+
143
+ private fun initializeChannels() {
144
+ // Close the channels if they are still open
145
+ if (!::processingChannel.isInitialized || processingChannel.isClosedForSend) {
146
+ processingChannel = Channel(Channel.UNLIMITED)
147
+ }
148
+ if (!::playbackChannel.isInitialized || playbackChannel.isClosedForSend) {
149
+ playbackChannel = Channel(Channel.UNLIMITED)
150
+ }
151
+ }
152
+
153
+ fun playAudio(chunk: String, turnId: String, promise: Promise, encoding: PCMEncoding = PCMEncoding.PCM_S16LE) {
154
+ coroutineScope.launch {
155
+ if (processingChannel.isClosedForSend || playbackChannel.isClosedForSend) {
156
+ Log.d("ExpoPlayStreamModule", "Re-initializing channels")
157
+ initializeChannels()
158
+ }
159
+ // Update the current turnId (this will reset flags if needed through setCurrentTurnId)
160
+ setCurrentTurnId(turnId)
161
+
162
+ processingChannel.send(ChunkData(chunk, turnId, promise, encoding))
163
+ ensureProcessingLoopStarted()
164
+ }
165
+ }
166
+
167
+ fun setCurrentTurnId(turnId: String) {
168
+ // Reset tracking flags when turnId changes
169
+ if (currentTurnId != turnId) {
170
+ hasSentSoundStartedEvent = false
171
+ // Only reset segments counter if we're not in the middle of playback
172
+ if (!isPlaying || playbackChannel.isEmpty) {
173
+ segmentsLeftToPlay = 0
174
+ }
175
+ }
176
+ currentTurnId = turnId
177
+ }
178
+
179
+ fun runOnDispose() {
180
+ stopPlayback()
181
+ processingChannel.close()
182
+ stopProcessingLoop()
183
+ coroutineScope.cancel()
184
+ }
185
+
186
+ fun stopProcessingLoop() {
187
+ processingJob?.cancel()
188
+ processingJob = null
189
+ }
190
+
191
+
192
+ private fun ensureProcessingLoopStarted() {
193
+ if (processingJob == null || processingJob?.isActive != true) {
194
+ startProcessingLoop()
195
+ }
196
+ }
197
+
198
+ private fun startProcessingLoop() {
199
+ processingJob =
200
+ coroutineScope.launch {
201
+ for (chunkData in processingChannel) {
202
+ if (chunkData.turnId == currentTurnId) {
203
+ processAndEnqueueChunk(chunkData)
204
+ }
205
+
206
+ if (processingChannel.isEmpty && !isPlaying && playbackChannel.isEmpty) {
207
+ break // Stop the loop if there's no more work to do
208
+ }
209
+ }
210
+ processingJob = null
211
+ }
212
+ }
213
+
214
+ private suspend fun processAndEnqueueChunk(chunkData: ChunkData) {
215
+ try {
216
+ val decodedBytes = Base64.decode(chunkData.chunk, Base64.DEFAULT)
217
+ val audioDataWithoutRIFF = removeRIFFHeaderIfNeeded(decodedBytes)
218
+
219
+ // Use the encoding specified in the chunk data
220
+ val audioData = convertPCMDataToFloatArray(audioDataWithoutRIFF, chunkData.encoding)
221
+
222
+ // Check if this is the first chunk and we need to send the SoundStarted event
223
+ // Using hybrid approach checking both flag, segments count, and channel state
224
+ val isFirstChunk = segmentsLeftToPlay == 0 &&
225
+ playbackChannel.isEmpty &&
226
+ (!hasSentSoundStartedEvent || !isPlaying)
227
+
228
+ if (isFirstChunk && chunkData.turnId != SUSPEND_SOUND_EVENT_TURN_ID) {
229
+ sendSoundStartedEvent()
230
+ hasSentSoundStartedEvent = true
231
+ }
232
+
233
+ playbackChannel.send(
234
+ AudioChunk(
235
+ audioData,
236
+ chunkData.promise,
237
+ chunkData.turnId
238
+ )
239
+ )
240
+
241
+ // Increment the segments counter
242
+ segmentsLeftToPlay++
243
+
244
+ if (!isPlaying) {
245
+ startPlayback()
246
+ }
247
+ } catch (e: Exception) {
248
+ chunkData.promise.reject("ERR_PROCESSING_AUDIO", e.message, e)
249
+ }
250
+ }
251
+
252
+ fun startPlayback(promise: Promise? = null) {
253
+ try {
254
+ if (!isPlaying) {
255
+ if (::audioTrack.isInitialized && audioTrack.state != AudioTrack.STATE_UNINITIALIZED) {
256
+ audioTrack.play()
257
+ isPlaying = true
258
+ startPlaybackLoop()
259
+ ensureProcessingLoopStarted()
260
+ } else {
261
+ throw IllegalStateException("AudioTrack not initialized or in invalid state")
262
+ }
263
+ }
264
+ promise?.resolve(null)
265
+ } catch (e: Exception) {
266
+ promise?.reject("ERR_START_PLAYBACK", e.message, e)
267
+ }
268
+ }
269
+
270
+ fun stopPlayback(promise: Promise? = null) {
271
+ if (!isPlaying || playbackChannel.isEmpty ) {
272
+ promise?.resolve(null)
273
+ return
274
+ }
275
+ isPlaying = false
276
+ coroutineScope.launch {
277
+ try {
278
+
279
+ if (::audioTrack.isInitialized && audioTrack.state != AudioTrack.STATE_UNINITIALIZED) {
280
+ try {
281
+ audioTrack.stop()
282
+ try {
283
+ audioTrack.flush()
284
+ } catch (e: Exception) {
285
+ Log.e("ExpoPlayStreamModule", "Error flushing AudioTrack: ${e.message}", e)
286
+ // Continue with other cleanup operations
287
+ }
288
+ } catch (e: Exception) {
289
+ Log.e("ExpoPlayStreamModule", "Error stopping AudioTrack: ${e.message}", e)
290
+ // Continue with other cleanup operations
291
+ }
292
+ }
293
+ // Safely cancel jobs
294
+ if (currentPlaybackJob != null) {
295
+ currentPlaybackJob?.cancelAndJoin()
296
+ currentPlaybackJob = null
297
+ }
298
+
299
+ if (processingJob != null) {
300
+ processingJob?.cancelAndJoin()
301
+ processingJob = null
302
+ }
303
+
304
+ // Resolve remaining promises in playbackChannel
305
+ for (chunk in playbackChannel) {
306
+ if (!chunk.isPromiseSettled) {
307
+ chunk.isPromiseSettled = true
308
+ chunk.promise.resolve(null)
309
+ }
310
+ }
311
+
312
+ if (!processingChannel.isClosedForSend) {
313
+ processingChannel.close()
314
+ }
315
+ if (!playbackChannel.isClosedForSend) {
316
+ playbackChannel.close()
317
+ }
318
+
319
+ // Reset the sound started event flag
320
+ hasSentSoundStartedEvent = false
321
+
322
+ // Reset the segments counter
323
+ segmentsLeftToPlay = 0
324
+
325
+ promise?.resolve(null)
326
+ } catch (e: CancellationException) {
327
+ Log.d("ExpoPlayStreamModule", "Stop playback was cancelled: ${e.message}")
328
+ promise?.resolve(null)
329
+ } catch (e: Exception) {
330
+ Log.d("ExpoPlayStreamModule", "Error in stopPlayback: ${e.message}")
331
+ promise?.reject("ERR_STOP_PLAYBACK", e.message, e)
332
+ }
333
+ }
334
+ }
335
+
336
+ private fun startPlaybackLoop() {
337
+ currentPlaybackJob =
338
+ coroutineScope.launch {
339
+ playbackChannel.consumeAsFlow().collect { chunk ->
340
+ if (isPlaying) {
341
+
342
+ if (currentTurnId == chunk.turnId) {
343
+ playChunk(chunk)
344
+ }
345
+
346
+ } else {
347
+ // If not playing, we should resolve the promise to avoid leaks
348
+ chunk.promise.resolve(null)
349
+ }
350
+ }
351
+ }
352
+ }
353
+
354
+ private suspend fun playChunk(chunk: AudioChunk) {
355
+ withContext(Dispatchers.IO) {
356
+ try {
357
+ val chunkSize = chunk.audioData.size
358
+
359
+ suspendCancellableCoroutine { continuation ->
360
+ // Write the audio data — convert to shorts if the track is 16-bit
361
+ val written = if (trackUsesFloat) {
362
+ audioTrack.write(
363
+ chunk.audioData,
364
+ 0,
365
+ chunkSize,
366
+ AudioTrack.WRITE_BLOCKING
367
+ )
368
+ } else {
369
+ val shortData = ShortArray(chunkSize) { i ->
370
+ (chunk.audioData[i].coerceIn(-1f, 1f) * 32767f).toInt().toShort()
371
+ }
372
+ audioTrack.write(
373
+ shortData,
374
+ 0,
375
+ chunkSize,
376
+ AudioTrack.WRITE_BLOCKING
377
+ )
378
+ }
379
+
380
+ // Resolve the promise immediately after writing
381
+ // This lets the client know the data was accepted
382
+ if (!chunk.isPromiseSettled) {
383
+ chunk.isPromiseSettled = true
384
+ chunk.promise.resolve(null)
385
+ }
386
+
387
+ if (written != chunkSize) {
388
+ // If we couldn't write all the data, resume with failure
389
+ val error = Exception("Failed to write entire audio chunk")
390
+ continuation.resumeWith(Result.failure(error))
391
+ return@suspendCancellableCoroutine
392
+ }
393
+
394
+ // Calculate expected playback duration in milliseconds
395
+ val playbackDurationMs = (written.toFloat() / config.sampleRate * 1000).toLong()
396
+
397
+ // Store a reference to the delay job
398
+ val delayJob = coroutineScope.launch {
399
+ // Wait for a portion of the audio to play
400
+ // Wait for 50% of duration, but cap at 90% of duration to ensure loop continues reasonably quickly
401
+ val waitTime = (playbackDurationMs * 0.5).toLong().coerceAtMost((playbackDurationMs * 0.9).toLong()) // Keep early resume
402
+ delay(waitTime) // Wait for partial duration
403
+ continuation.resumeWith(Result.success(Unit))
404
+
405
+ // Continue waiting in the background for the rest of the estimated duration
406
+ delay(playbackDurationMs - waitTime)
407
+ // Signal that this chunk has finished playing asynchronously
408
+ handleChunkCompletion(chunk)
409
+ }
410
+
411
+ continuation.invokeOnCancellation {
412
+ // Cancel the delay job to prevent it from resuming the continuation
413
+ delayJob.cancel()
414
+
415
+ // Settle the promise if it hasn't been settled yet
416
+ if (!chunk.isPromiseSettled) {
417
+ chunk.isPromiseSettled = true
418
+ chunk.promise.reject("ERR_PLAYBACK_CANCELLED", "Playback was cancelled", null)
419
+ }
420
+
421
+ // Any other cleanup specific to this chunk
422
+ // For example, if we were tracking this chunk in a map or list, we would remove it
423
+ }
424
+ }
425
+ } catch (e: Exception) {
426
+ Log.e("ExpoPlayStreamModule", "Error in playChunk: ${e.message}", e)
427
+ if (!chunk.isPromiseSettled) {
428
+ chunk.isPromiseSettled = true
429
+ chunk.promise.reject("ERR_PLAYBACK", e.message, e)
430
+ }
431
+ }
432
+ }
433
+ }
434
+
435
+ /**
436
+ * Handles the completion of a single audio chunk's estimated playback duration.
437
+ * This is called asynchronously from the delay job within playChunk.
438
+ * Decrements the segment counter and sends the final event if applicable.
439
+ * Uses coroutineScope to ensure thread safety if needed for state access.
440
+ */
441
+ private fun handleChunkCompletion(chunk: AudioChunk) {
442
+ coroutineScope.launch { // Launch on default dispatcher for safety
443
+ segmentsLeftToPlay = (segmentsLeftToPlay - 1).coerceAtLeast(0)
444
+
445
+ // Check if this was the last chunk for the current turn ID and the queue is empty
446
+ val isFinalChunk = segmentsLeftToPlay == 0 && playbackChannel.isEmpty && chunk.turnId == currentTurnId
447
+
448
+ if (isFinalChunk && chunk.turnId != SUSPEND_SOUND_EVENT_TURN_ID) {
449
+ sendSoundChunkPlayedEvent(isFinal = true)
450
+ // Reset the flag after the final chunk event for this turn is sent
451
+ hasSentSoundStartedEvent = false
452
+ }
453
+ }
454
+ }
455
+
456
+ /**
457
+ * Sends the SoundStarted event to JavaScript
458
+ */
459
+ private fun sendSoundStartedEvent() {
460
+ eventSender?.sendExpoEvent(Constants.SOUND_STARTED_EVENT_NAME, Bundle())
461
+ }
462
+
463
+ /**
464
+ * Sends the SoundChunkPlayed event to JavaScript
465
+ * @param isFinal Boolean indicating if this is the final chunk in the playback sequence
466
+ */
467
+ private fun sendSoundChunkPlayedEvent(isFinal: Boolean) {
468
+ val params = Bundle()
469
+ params.putBoolean("isFinal", isFinal)
470
+ eventSender?.sendExpoEvent(Constants.SOUND_CHUNK_PLAYED_EVENT_NAME, params)
471
+ }
472
+
473
+ /**
474
+ * Converts PCM data to a float array based on the specified encoding format
475
+ * @param pcmData The raw PCM data bytes
476
+ * @param encoding The PCM encoding format (PCM_F32LE or PCM_S16LE)
477
+ * @return FloatArray containing normalized audio samples (-1.0 to 1.0)
478
+ */
479
+ private fun convertPCMDataToFloatArray(pcmData: ByteArray, encoding: PCMEncoding): FloatArray {
480
+ return when (encoding) {
481
+ PCMEncoding.PCM_F32LE -> {
482
+ // Handle Float32 PCM data (4 bytes per sample)
483
+ val floatBuffer = ByteBuffer.wrap(pcmData).order(ByteOrder.LITTLE_ENDIAN).asFloatBuffer()
484
+ val floatArray = FloatArray(floatBuffer.remaining())
485
+ floatBuffer.get(floatArray)
486
+ floatArray
487
+ }
488
+ PCMEncoding.PCM_S16LE -> {
489
+ // Handle Int16 PCM data (2 bytes per sample)
490
+ val shortBuffer = ByteBuffer.wrap(pcmData).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer()
491
+ val shortArray = ShortArray(shortBuffer.remaining())
492
+ shortBuffer.get(shortArray)
493
+ // Convert Int16 samples to normalized Float32 (-1.0 to 1.0)
494
+ FloatArray(shortArray.size) { index -> shortArray[index] / 32768.0f }
495
+ }
496
+ }
497
+ }
498
+
499
+ private fun removeRIFFHeaderIfNeeded(audioData: ByteArray): ByteArray {
500
+ val headerSize = 44
501
+ val riffHeader = "RIFF".toByteArray(Charsets.US_ASCII)
502
+
503
+ return if (audioData.size > headerSize && audioData.startsWith(riffHeader)) {
504
+ audioData.copyOfRange(headerSize, audioData.size)
505
+ } else {
506
+ audioData
507
+ }
508
+ }
509
+
510
+ private fun ByteArray.startsWith(prefix: ByteArray): Boolean {
511
+ if (this.size < prefix.size) return false
512
+ return prefix.contentEquals(this.sliceArray(prefix.indices))
513
+ }
514
+
515
+ /**
516
+ * Updates the sound configuration
517
+ * @param newConfig The new configuration to apply
518
+ * @param promise Promise to resolve when configuration is updated
519
+ */
520
+ fun updateConfig(newConfig: SoundConfig, promise: Promise) {
521
+ Log.d("ExpoPlayStreamModule", "Updating sound configuration - sampleRate: ${newConfig.sampleRate}, playbackMode: ${newConfig.playbackMode}")
522
+
523
+ // Skip if configuration hasn't changed
524
+ if (newConfig.sampleRate == config.sampleRate && newConfig.playbackMode == config.playbackMode) {
525
+ Log.d("ExpoPlayStreamModule", "Configuration unchanged, skipping update")
526
+ promise.resolve(null)
527
+ return
528
+ }
529
+
530
+ // Save current playback state
531
+ val wasPlaying = isPlaying
532
+
533
+ // Step 1: Pause audio and cancel jobs (but don't close channels)
534
+ pauseAudioAndJobs()
535
+
536
+ // Step 2: Update configuration
537
+ config = newConfig
538
+
539
+ // Step 3: Create new AudioTrack with updated config
540
+ initializeAudioTrack()
541
+
542
+ // Step 4: Restart playback if it was active before
543
+ if (wasPlaying) {
544
+ restartPlayback()
545
+ }
546
+
547
+ promise.resolve(null)
548
+ }
549
+
550
+ /**
551
+ * Pauses audio without touching the jobs or channels
552
+ */
553
+ private fun pauseAudioAndJobs() {
554
+ if (isPlaying) {
555
+ Log.d("ExpoPlayStreamModule", "Pausing audio before config update")
556
+
557
+ try {
558
+ // Pause and flush audio track
559
+ if (::audioTrack.isInitialized && audioTrack.state != AudioTrack.STATE_UNINITIALIZED) {
560
+ try {
561
+ audioTrack.pause()
562
+ try {
563
+ audioTrack.flush()
564
+ } catch (e: Exception) {
565
+ Log.e("ExpoPlayStreamModule", "Error flushing AudioTrack in pauseAudioAndJobs: ${e.message}", e)
566
+ // Continue with other operations
567
+ }
568
+ Log.d("ExpoPlayStreamModule", "Audio paused, playback job left running")
569
+ } catch (e: Exception) {
570
+ Log.e("ExpoPlayStreamModule", "Error pausing AudioTrack: ${e.message}", e)
571
+ }
572
+ } else {
573
+ Log.d("ExpoPlayStreamModule", "AudioTrack not initialized or in invalid state, skipping pause/flush")
574
+ }
575
+
576
+ // Update state
577
+ isPlaying = false
578
+
579
+ // Note: We don't cancel any jobs anymore
580
+ // The playback loop will continue running but won't process chunks due to isPlaying being false
581
+ // This avoids any issues with channels being closed when cancelling jobs
582
+ } catch (e: Exception) {
583
+ Log.e("ExpoPlayStreamModule", "Error pausing AudioTrack: ${e.message}", e)
584
+ }
585
+ }
586
+
587
+ // Release AudioTrack
588
+ if (::audioTrack.isInitialized) {
589
+ try {
590
+ Log.d("ExpoPlayStreamModule", "Releasing AudioTrack")
591
+ if (audioTrack.state != AudioTrack.STATE_UNINITIALIZED) {
592
+ audioTrack.release()
593
+ }
594
+ } catch (e: Exception) {
595
+ Log.e("ExpoPlayStreamModule", "Error releasing AudioTrack: ${e.message}", e)
596
+ }
597
+ }
598
+ }
599
+
600
+ /**
601
+ * Restarts playback with the new AudioTrack
602
+ */
603
+ private fun restartPlayback() {
604
+ try {
605
+ Log.d("ExpoPlayStreamModule", "Restarting playback")
606
+
607
+ // Start AudioTrack
608
+ if (::audioTrack.isInitialized && audioTrack.state != AudioTrack.STATE_UNINITIALIZED) {
609
+ try {
610
+ audioTrack.play()
611
+ isPlaying = true
612
+ } catch (e: Exception) {
613
+ Log.e("ExpoPlayStreamModule", "Error starting AudioTrack: ${e.message}", e)
614
+ isPlaying = false
615
+ return
616
+ }
617
+ } else {
618
+ Log.e("ExpoPlayStreamModule", "AudioTrack not initialized or in invalid state, cannot restart playback")
619
+ return
620
+ }
621
+
622
+ // The playback loop is already running, we just need to set isPlaying to true
623
+ // Only start a new loop if the current one doesn't exist
624
+ if (currentPlaybackJob == null || currentPlaybackJob?.isActive != true) {
625
+ Log.d("ExpoPlayStreamModule", "Starting new playback loop")
626
+ startPlaybackLoop()
627
+ } else {
628
+ Log.d("ExpoPlayStreamModule", "Using existing playback loop")
629
+ }
630
+
631
+ // Ensure processing loop is running
632
+ ensureProcessingLoopStarted()
633
+ } catch (e: Exception) {
634
+ Log.e("ExpoPlayStreamModule", "Error restarting playback: ${e.message}", e)
635
+ }
636
+ }
637
+
638
+ /**
639
+ * Resets the sound configuration to default values
640
+ * @param promise Promise to resolve when configuration is reset
641
+ */
642
+ fun resetConfigToDefault(promise: Promise) {
643
+ Log.d("ExpoPlayStreamModule", "Resetting sound configuration to default values")
644
+ updateConfig(SoundConfig.DEFAULT, promise)
645
+ }
646
+
647
+ companion object {
648
+ // Public constant for suspending sound events
649
+ public const val SUSPEND_SOUND_EVENT_TURN_ID: String = "suspend-sound-events"
650
+ }
651
+ }