@edkimmel/expo-audio-stream 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +5 -0
- package/.yarnrc.yml +8 -0
- package/NATIVE_EVENTS.md +270 -0
- package/README.md +289 -0
- package/android/build.gradle +92 -0
- package/android/src/main/AndroidManifest.xml +4 -0
- package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
- package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
- package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
- package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
- package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
- package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
- package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
- package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
- package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
- package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
- package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
- package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
- package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
- package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
- package/app.plugin.js +1 -0
- package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
- package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
- package/build/ExpoPlayAudioStreamModule.js +5 -0
- package/build/ExpoPlayAudioStreamModule.js.map +1 -0
- package/build/events.d.ts +36 -0
- package/build/events.d.ts.map +1 -0
- package/build/events.js +25 -0
- package/build/events.js.map +1 -0
- package/build/index.d.ts +125 -0
- package/build/index.d.ts.map +1 -0
- package/build/index.js +222 -0
- package/build/index.js.map +1 -0
- package/build/pipeline/index.d.ts +81 -0
- package/build/pipeline/index.d.ts.map +1 -0
- package/build/pipeline/index.js +140 -0
- package/build/pipeline/index.js.map +1 -0
- package/build/pipeline/types.d.ts +132 -0
- package/build/pipeline/types.d.ts.map +1 -0
- package/build/pipeline/types.js +5 -0
- package/build/pipeline/types.js.map +1 -0
- package/build/types.d.ts +221 -0
- package/build/types.d.ts.map +1 -0
- package/build/types.js +10 -0
- package/build/types.js.map +1 -0
- package/expo-module.config.json +9 -0
- package/ios/AudioPipeline.swift +562 -0
- package/ios/AudioUtils.swift +356 -0
- package/ios/ExpoPlayAudioStream.podspec +27 -0
- package/ios/ExpoPlayAudioStreamModule.swift +436 -0
- package/ios/ExpoPlayAudioStreamView.swift +7 -0
- package/ios/JitterBuffer.swift +208 -0
- package/ios/Logger.swift +7 -0
- package/ios/Microphone.swift +221 -0
- package/ios/MicrophoneDataDelegate.swift +4 -0
- package/ios/PipelineIntegration.swift +214 -0
- package/ios/RecordingResult.swift +10 -0
- package/ios/RecordingSettings.swift +11 -0
- package/ios/SharedAudioEngine.swift +484 -0
- package/ios/SoundConfig.swift +45 -0
- package/ios/SoundPlayer.swift +408 -0
- package/ios/SoundPlayerDelegate.swift +7 -0
- package/package.json +49 -0
- package/plugin/build/index.d.ts +5 -0
- package/plugin/build/index.js +28 -0
- package/plugin/src/index.ts +53 -0
- package/plugin/tsconfig.json +9 -0
- package/plugin/tsconfig.tsbuildinfo +1 -0
- package/src/ExpoPlayAudioStreamModule.ts +5 -0
- package/src/events.ts +66 -0
- package/src/index.ts +359 -0
- package/src/pipeline/index.ts +216 -0
- package/src/pipeline/types.ts +169 -0
- package/src/types.ts +270 -0
- package/tsconfig.json +9 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
package expo.modules.audiostream.pipeline
|
|
2
|
+
|
|
3
|
+
import java.util.ArrayDeque
|
|
4
|
+
import java.util.concurrent.atomic.AtomicInteger
|
|
5
|
+
import java.util.concurrent.atomic.AtomicLong
|
|
6
|
+
import java.util.concurrent.locks.ReentrantLock
|
|
7
|
+
import kotlin.concurrent.withLock
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Lock-based chunk queue for PCM audio (16-bit signed, little-endian).
|
|
11
|
+
*
|
|
12
|
+
* Single producer (bridge thread) writes decoded PCM via [write].
|
|
13
|
+
* Single consumer (write thread) drains via [read].
|
|
14
|
+
* All shared state is guarded by a [ReentrantLock].
|
|
15
|
+
*
|
|
16
|
+
* Features:
|
|
17
|
+
* - Chunk queue: incoming [ShortArray] chunks are enqueued by reference
|
|
18
|
+
* (zero-copy on the producer side). No fixed capacity limit.
|
|
19
|
+
* - Priming gate: [read] returns silence until [targetBufferMs] of audio has
|
|
20
|
+
* accumulated (or [markEndOfStream] force-primes so the tail drains).
|
|
21
|
+
* - Silence-fill on underflow: when the buffer has fewer samples than the
|
|
22
|
+
* consumer requested, the remainder is filled with silence and an underrun
|
|
23
|
+
* is counted.
|
|
24
|
+
* - Telemetry via atomics: total frames written/read, underrun count, peak
|
|
25
|
+
* buffer level – all readable without acquiring the lock.
|
|
26
|
+
*/
|
|
27
|
+
class JitterBuffer(
|
|
28
|
+
/** Sample rate in Hz — used to convert between samples and milliseconds. */
|
|
29
|
+
private val sampleRate: Int,
|
|
30
|
+
/** Number of channels (1 = mono, 2 = stereo). */
|
|
31
|
+
private val channels: Int,
|
|
32
|
+
/** How many ms of audio to accumulate before the priming gate opens. */
|
|
33
|
+
private val targetBufferMs: Int
|
|
34
|
+
) {
|
|
35
|
+
// ── Chunk queue storage ──────────────────────────────────────────────
|
|
36
|
+
private val chunks = ArrayDeque<ShortArray>()
|
|
37
|
+
private var readCursor = 0 // offset into the head chunk
|
|
38
|
+
private var count = 0 // total live samples across all chunks
|
|
39
|
+
|
|
40
|
+
// ── Priming gate ────────────────────────────────────────────────────
|
|
41
|
+
private val primingSamples: Int =
|
|
42
|
+
(sampleRate * channels * targetBufferMs) / 1000
|
|
43
|
+
private var primed = false
|
|
44
|
+
|
|
45
|
+
// ── End-of-stream ───────────────────────────────────────────────────
|
|
46
|
+
private var endOfStream = false
|
|
47
|
+
|
|
48
|
+
// ── Lock ────────────────────────────────────────────────────────────
|
|
49
|
+
private val lock = ReentrantLock()
|
|
50
|
+
|
|
51
|
+
// ── Telemetry (lock-free reads) ─────────────────────────────────────
|
|
52
|
+
/** Total samples written by the producer since last [reset]. */
|
|
53
|
+
val totalWritten = AtomicLong(0)
|
|
54
|
+
|
|
55
|
+
/** Total samples read by the consumer since last [reset]. */
|
|
56
|
+
val totalRead = AtomicLong(0)
|
|
57
|
+
|
|
58
|
+
/** Number of underrun events (consumer asked for more than available). */
|
|
59
|
+
val underrunCount = AtomicInteger(0)
|
|
60
|
+
|
|
61
|
+
/** Peak buffer level in samples observed at write time. */
|
|
62
|
+
val peakLevel = AtomicInteger(0)
|
|
63
|
+
|
|
64
|
+
// ── Producer API ────────────────────────────────────────────────────
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Enqueue [samples] into the chunk queue.
|
|
68
|
+
*
|
|
69
|
+
* When the full array is passed (offset == 0, length == samples.size),
|
|
70
|
+
* the array reference is stored directly — zero copy. Otherwise a
|
|
71
|
+
* subrange copy is made.
|
|
72
|
+
*
|
|
73
|
+
* @return number of samples enqueued (always [length]).
|
|
74
|
+
*/
|
|
75
|
+
fun write(samples: ShortArray, offset: Int = 0, length: Int = samples.size): Int {
|
|
76
|
+
lock.withLock {
|
|
77
|
+
val chunk = if (offset == 0 && length == samples.size) {
|
|
78
|
+
samples
|
|
79
|
+
} else {
|
|
80
|
+
samples.copyOfRange(offset, offset + length)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
chunks.addLast(chunk)
|
|
84
|
+
count += length
|
|
85
|
+
|
|
86
|
+
// Update peak telemetry
|
|
87
|
+
if (count > peakLevel.get()) {
|
|
88
|
+
peakLevel.set(count)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
totalWritten.addAndGet(length.toLong())
|
|
92
|
+
|
|
93
|
+
// Check priming
|
|
94
|
+
if (!primed && count >= primingSamples) {
|
|
95
|
+
primed = true
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return length
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// ── Consumer API ────────────────────────────────────────────────────
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Fill [dest] with up to [length] samples from the chunk queue.
|
|
106
|
+
*
|
|
107
|
+
* Behaviour depends on the priming gate:
|
|
108
|
+
* - **Not primed & no end-of-stream**: fills [dest] with silence and
|
|
109
|
+
* returns [length] (the consumer keeps writing silence to AudioTrack
|
|
110
|
+
* so it stays alive).
|
|
111
|
+
* - **Primed (or EOS forced-prime)**: copies available samples; if fewer
|
|
112
|
+
* than [length] are available the remainder is zero-filled and an
|
|
113
|
+
* underrun is recorded.
|
|
114
|
+
*
|
|
115
|
+
* @return the number of samples placed in [dest] (always [length]).
|
|
116
|
+
*/
|
|
117
|
+
fun read(dest: ShortArray, offset: Int = 0, length: Int = dest.size): Int {
|
|
118
|
+
lock.withLock {
|
|
119
|
+
if (!primed) {
|
|
120
|
+
// Still priming – fill with silence
|
|
121
|
+
dest.fill(0, offset, offset + length)
|
|
122
|
+
return length
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
var destPos = offset
|
|
126
|
+
var remaining = length
|
|
127
|
+
|
|
128
|
+
while (remaining > 0 && chunks.isNotEmpty()) {
|
|
129
|
+
val chunk = chunks.peekFirst()
|
|
130
|
+
val available = chunk.size - readCursor
|
|
131
|
+
val toCopy = minOf(available, remaining)
|
|
132
|
+
|
|
133
|
+
System.arraycopy(chunk, readCursor, dest, destPos, toCopy)
|
|
134
|
+
readCursor += toCopy
|
|
135
|
+
destPos += toCopy
|
|
136
|
+
remaining -= toCopy
|
|
137
|
+
count -= toCopy
|
|
138
|
+
|
|
139
|
+
if (readCursor >= chunk.size) {
|
|
140
|
+
chunks.pollFirst()
|
|
141
|
+
readCursor = 0
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Silence-fill remainder on underflow
|
|
146
|
+
if (remaining > 0) {
|
|
147
|
+
dest.fill(0, destPos, destPos + remaining)
|
|
148
|
+
// Only count as underrun if we expected more data (not drained)
|
|
149
|
+
if (!endOfStream) {
|
|
150
|
+
underrunCount.incrementAndGet()
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
totalRead.addAndGet(length.toLong())
|
|
155
|
+
return length
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// ── Control API ─────────────────────────────────────────────────────
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Mark that the producer will not write any more data for this turn.
|
|
163
|
+
* Force-primes the buffer so the consumer can drain whatever remains
|
|
164
|
+
* rather than waiting for [targetBufferMs] to fill.
|
|
165
|
+
*/
|
|
166
|
+
fun markEndOfStream() {
|
|
167
|
+
lock.withLock {
|
|
168
|
+
endOfStream = true
|
|
169
|
+
if (!primed) {
|
|
170
|
+
primed = true // force open the gate so tail audio drains
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/** @return `true` after [markEndOfStream] was called AND the buffer is empty. */
|
|
176
|
+
fun isDrained(): Boolean {
|
|
177
|
+
lock.withLock {
|
|
178
|
+
return endOfStream && count == 0
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/** Current buffer level in samples (snapshot). */
|
|
183
|
+
fun availableSamples(): Int {
|
|
184
|
+
lock.withLock {
|
|
185
|
+
return count
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/** Current buffer level converted to milliseconds. */
|
|
190
|
+
fun bufferedMs(): Int {
|
|
191
|
+
lock.withLock {
|
|
192
|
+
return if (sampleRate * channels == 0) 0
|
|
193
|
+
else (count * 1000) / (sampleRate * channels)
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/** Whether the priming gate is currently open. */
|
|
198
|
+
fun isPrimed(): Boolean {
|
|
199
|
+
lock.withLock {
|
|
200
|
+
return primed
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Reset the buffer to its initial empty state.
|
|
206
|
+
* Called on turn changes to discard stale audio.
|
|
207
|
+
*/
|
|
208
|
+
fun reset() {
|
|
209
|
+
lock.withLock {
|
|
210
|
+
chunks.clear()
|
|
211
|
+
readCursor = 0
|
|
212
|
+
count = 0
|
|
213
|
+
primed = false
|
|
214
|
+
endOfStream = false
|
|
215
|
+
}
|
|
216
|
+
// Telemetry is deliberately NOT reset here — the pipeline resets
|
|
217
|
+
// telemetry at reconnect via [resetTelemetry].
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/** Reset all telemetry counters to zero. */
|
|
221
|
+
fun resetTelemetry() {
|
|
222
|
+
totalWritten.set(0)
|
|
223
|
+
totalRead.set(0)
|
|
224
|
+
underrunCount.set(0)
|
|
225
|
+
peakLevel.set(0)
|
|
226
|
+
}
|
|
227
|
+
}
|
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
package expo.modules.audiostream.pipeline
|
|
2
|
+
|
|
3
|
+
import android.content.Context
|
|
4
|
+
import android.os.Bundle
|
|
5
|
+
import android.util.Log
|
|
6
|
+
import expo.modules.audiostream.EventSender
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Bridge layer wiring [AudioPipeline] into the existing ExpoPlayAudioStreamModule.
|
|
10
|
+
*
|
|
11
|
+
* This class holds the pipeline instance, implements [PipelineListener] to forward
|
|
12
|
+
* native events as Expo bridge events, and exposes the 7 bridge methods that the
|
|
13
|
+
* module's definition() block should declare.
|
|
14
|
+
*
|
|
15
|
+
* ┌─────────────────────────────────────────────────────────────────────────┐
|
|
16
|
+
* │ INTEGRATION STEPS for ExpoPlayAudioStreamModule.kt │
|
|
17
|
+
* │ │
|
|
18
|
+
* │ 1. Add field: │
|
|
19
|
+
* │ private lateinit var pipelineIntegration: PipelineIntegration │
|
|
20
|
+
* │ │
|
|
21
|
+
* │ 2. Initialize after existing managers (inside definition() block): │
|
|
22
|
+
* │ initializePipeline() │
|
|
23
|
+
* │ And add the method: │
|
|
24
|
+
* │ private fun initializePipeline() { │
|
|
25
|
+
* │ val ctx = appContext.reactContext │
|
|
26
|
+
* │ ?: throw IllegalStateException("Context not available") │
|
|
27
|
+
* │ pipelineIntegration = PipelineIntegration(ctx, this) │
|
|
28
|
+
* │ } │
|
|
29
|
+
* │ │
|
|
30
|
+
* │ 3. Add 8 event names to the Events() block: │
|
|
31
|
+
* │ PipelineIntegration.EVENT_STATE_CHANGED, │
|
|
32
|
+
* │ PipelineIntegration.EVENT_PLAYBACK_STARTED, │
|
|
33
|
+
* │ PipelineIntegration.EVENT_ERROR, │
|
|
34
|
+
* │ PipelineIntegration.EVENT_ZOMBIE_DETECTED, │
|
|
35
|
+
* │ PipelineIntegration.EVENT_UNDERRUN, │
|
|
36
|
+
* │ PipelineIntegration.EVENT_DRAINED, │
|
|
37
|
+
* │ PipelineIntegration.EVENT_AUDIO_FOCUS_LOST, │
|
|
38
|
+
* │ PipelineIntegration.EVENT_AUDIO_FOCUS_RESUMED │
|
|
39
|
+
* │ │
|
|
40
|
+
* │ 4. Add 7 AsyncFunction / Function declarations: │
|
|
41
|
+
* │ │
|
|
42
|
+
* │ AsyncFunction("connectPipeline") { options: Map<String, Any?>, │
|
|
43
|
+
* │ promise: Promise -> │
|
|
44
|
+
* │ pipelineIntegration.connect(options, promise) │
|
|
45
|
+
* │ } │
|
|
46
|
+
* │ │
|
|
47
|
+
* │ AsyncFunction("pushPipelineAudio") { options: Map<String, Any?>, │
|
|
48
|
+
* │ promise: Promise -> │
|
|
49
|
+
* │ pipelineIntegration.pushAudio(options, promise) │
|
|
50
|
+
* │ } │
|
|
51
|
+
* │ │
|
|
52
|
+
* │ Function("pushPipelineAudioSync") { options: Map<String, Any?> ->│
|
|
53
|
+
* │ pipelineIntegration.pushAudioSync(options) │
|
|
54
|
+
* │ } │
|
|
55
|
+
* │ │
|
|
56
|
+
* │ AsyncFunction("disconnectPipeline") { promise: Promise -> │
|
|
57
|
+
* │ pipelineIntegration.disconnect(promise) │
|
|
58
|
+
* │ } │
|
|
59
|
+
* │ │
|
|
60
|
+
* │ AsyncFunction("invalidatePipelineTurn") { │
|
|
61
|
+
* │ options: Map<String, Any?>, promise: Promise -> │
|
|
62
|
+
* │ pipelineIntegration.invalidateTurn(options, promise) │
|
|
63
|
+
* │ } │
|
|
64
|
+
* │ │
|
|
65
|
+
* │ Function("getPipelineTelemetry") { │
|
|
66
|
+
* │ pipelineIntegration.getTelemetry() │
|
|
67
|
+
* │ } │
|
|
68
|
+
* │ │
|
|
69
|
+
* │ Function("getPipelineState") { │
|
|
70
|
+
* │ pipelineIntegration.getState() │
|
|
71
|
+
* │ } │
|
|
72
|
+
* │ │
|
|
73
|
+
* │ 5. Call in OnDestroy and destroy(): │
|
|
74
|
+
* │ pipelineIntegration.destroy() │
|
|
75
|
+
* │ │
|
|
76
|
+
* └─────────────────────────────────────────────────────────────────────────┘
|
|
77
|
+
*/
|
|
78
|
+
class PipelineIntegration(
|
|
79
|
+
private val context: Context,
|
|
80
|
+
private val eventSender: EventSender
|
|
81
|
+
) : PipelineListener {
|
|
82
|
+
|
|
83
|
+
companion object {
|
|
84
|
+
private const val TAG = "PipelineIntegration"
|
|
85
|
+
|
|
86
|
+
// ── Event name constants (match the TS PipelineEventMap keys) ───
|
|
87
|
+
const val EVENT_STATE_CHANGED = "PipelineStateChanged"
|
|
88
|
+
const val EVENT_PLAYBACK_STARTED = "PipelinePlaybackStarted"
|
|
89
|
+
const val EVENT_ERROR = "PipelineError"
|
|
90
|
+
const val EVENT_ZOMBIE_DETECTED = "PipelineZombieDetected"
|
|
91
|
+
const val EVENT_UNDERRUN = "PipelineUnderrun"
|
|
92
|
+
const val EVENT_DRAINED = "PipelineDrained"
|
|
93
|
+
const val EVENT_AUDIO_FOCUS_LOST = "PipelineAudioFocusLost"
|
|
94
|
+
const val EVENT_AUDIO_FOCUS_RESUMED = "PipelineAudioFocusResumed"
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
private var pipeline: AudioPipeline? = null
|
|
98
|
+
|
|
99
|
+
// ════════════════════════════════════════════════════════════════════
|
|
100
|
+
// Bridge methods
|
|
101
|
+
// ════════════════════════════════════════════════════════════════════
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Connect the pipeline. Creates a new [AudioPipeline] with the given options.
|
|
105
|
+
*
|
|
106
|
+
* Options map:
|
|
107
|
+
* - `sampleRate` (Int, default 24000)
|
|
108
|
+
* - `channelCount` (Int, default 1)
|
|
109
|
+
* - `targetBufferMs` (Int, default 80)
|
|
110
|
+
*/
|
|
111
|
+
fun connect(options: Map<String, Any?>, promise: expo.modules.kotlin.Promise) {
|
|
112
|
+
try {
|
|
113
|
+
// Tear down any existing pipeline first
|
|
114
|
+
pipeline?.disconnect()
|
|
115
|
+
|
|
116
|
+
val sampleRate = (options["sampleRate"] as? Number)?.toInt() ?: 24000
|
|
117
|
+
val channelCount = (options["channelCount"] as? Number)?.toInt() ?: 1
|
|
118
|
+
val targetBufferMs = (options["targetBufferMs"] as? Number)?.toInt() ?: 80
|
|
119
|
+
|
|
120
|
+
pipeline = AudioPipeline(
|
|
121
|
+
context = context,
|
|
122
|
+
sampleRate = sampleRate,
|
|
123
|
+
channelCount = channelCount,
|
|
124
|
+
targetBufferMs = targetBufferMs,
|
|
125
|
+
listener = this
|
|
126
|
+
)
|
|
127
|
+
pipeline!!.connect()
|
|
128
|
+
|
|
129
|
+
val result = Bundle().apply {
|
|
130
|
+
putInt("sampleRate", sampleRate)
|
|
131
|
+
putInt("channelCount", channelCount)
|
|
132
|
+
putInt("targetBufferMs", targetBufferMs)
|
|
133
|
+
putInt("frameSizeSamples", pipeline!!.frameSizeSamples)
|
|
134
|
+
}
|
|
135
|
+
promise.resolve(result)
|
|
136
|
+
} catch (e: Exception) {
|
|
137
|
+
Log.e(TAG, "connect failed", e)
|
|
138
|
+
promise.reject("PIPELINE_CONNECT_ERROR", e.message ?: "Unknown error", e)
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Push base64-encoded PCM audio into the jitter buffer (async — resolves Promise).
|
|
144
|
+
*
|
|
145
|
+
* Options map:
|
|
146
|
+
* - `audio` (String) — base64-encoded PCM16 LE data
|
|
147
|
+
* - `turnId` (String) — conversation turn identifier
|
|
148
|
+
* - `isFirstChunk` (Boolean, default false)
|
|
149
|
+
* - `isLastChunk` (Boolean, default false)
|
|
150
|
+
*/
|
|
151
|
+
fun pushAudio(options: Map<String, Any?>, promise: expo.modules.kotlin.Promise) {
|
|
152
|
+
try {
|
|
153
|
+
val audio = options["audio"] as? String
|
|
154
|
+
?: throw IllegalArgumentException("Missing 'audio' field")
|
|
155
|
+
val turnId = options["turnId"] as? String
|
|
156
|
+
?: throw IllegalArgumentException("Missing 'turnId' field")
|
|
157
|
+
val isFirstChunk = options["isFirstChunk"] as? Boolean ?: false
|
|
158
|
+
val isLastChunk = options["isLastChunk"] as? Boolean ?: false
|
|
159
|
+
|
|
160
|
+
val p = pipeline
|
|
161
|
+
?: throw IllegalStateException("Pipeline not connected")
|
|
162
|
+
p.pushAudio(audio, turnId, isFirstChunk, isLastChunk)
|
|
163
|
+
promise.resolve(null)
|
|
164
|
+
} catch (e: Exception) {
|
|
165
|
+
Log.e(TAG, "pushAudio failed", e)
|
|
166
|
+
promise.reject("PIPELINE_PUSH_ERROR", e.message ?: "Unknown error", e)
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* Push base64-encoded PCM audio synchronously (Function, not AsyncFunction).
|
|
172
|
+
* No Promise overhead — designed for the hot path.
|
|
173
|
+
*
|
|
174
|
+
* Same options as [pushAudio].
|
|
175
|
+
*/
|
|
176
|
+
fun pushAudioSync(options: Map<String, Any?>): Boolean {
|
|
177
|
+
return try {
|
|
178
|
+
val audio = options["audio"] as? String ?: return false
|
|
179
|
+
val turnId = options["turnId"] as? String ?: return false
|
|
180
|
+
val isFirstChunk = options["isFirstChunk"] as? Boolean ?: false
|
|
181
|
+
val isLastChunk = options["isLastChunk"] as? Boolean ?: false
|
|
182
|
+
|
|
183
|
+
pipeline?.pushAudio(audio, turnId, isFirstChunk, isLastChunk)
|
|
184
|
+
true
|
|
185
|
+
} catch (e: Exception) {
|
|
186
|
+
Log.e(TAG, "pushAudioSync failed", e)
|
|
187
|
+
false
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
/**
|
|
192
|
+
* Disconnect the pipeline. Tears down AudioTrack, write thread, etc.
|
|
193
|
+
*/
|
|
194
|
+
fun disconnect(promise: expo.modules.kotlin.Promise) {
|
|
195
|
+
try {
|
|
196
|
+
pipeline?.disconnect()
|
|
197
|
+
pipeline = null
|
|
198
|
+
promise.resolve(null)
|
|
199
|
+
} catch (e: Exception) {
|
|
200
|
+
Log.e(TAG, "disconnect failed", e)
|
|
201
|
+
promise.reject("PIPELINE_DISCONNECT_ERROR", e.message ?: "Unknown error", e)
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
/**
|
|
206
|
+
* Invalidate the current turn — discards stale audio in the jitter buffer.
|
|
207
|
+
*
|
|
208
|
+
* Options map:
|
|
209
|
+
* - `turnId` (String) — the new turn identifier
|
|
210
|
+
*/
|
|
211
|
+
fun invalidateTurn(options: Map<String, Any?>, promise: expo.modules.kotlin.Promise) {
|
|
212
|
+
try {
|
|
213
|
+
val turnId = options["turnId"] as? String
|
|
214
|
+
?: throw IllegalArgumentException("Missing 'turnId' field")
|
|
215
|
+
|
|
216
|
+
pipeline?.invalidateTurn(turnId)
|
|
217
|
+
promise.resolve(null)
|
|
218
|
+
} catch (e: Exception) {
|
|
219
|
+
Log.e(TAG, "invalidateTurn failed", e)
|
|
220
|
+
promise.reject("PIPELINE_INVALIDATE_ERROR", e.message ?: "Unknown error", e)
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/**
|
|
225
|
+
* Get current pipeline telemetry as a Bundle (returned to JS as a map).
|
|
226
|
+
*/
|
|
227
|
+
fun getTelemetry(): Bundle {
|
|
228
|
+
return pipeline?.getTelemetry() ?: Bundle().apply {
|
|
229
|
+
putString("state", PipelineState.IDLE.value)
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/**
|
|
234
|
+
* Get current pipeline state string.
|
|
235
|
+
*/
|
|
236
|
+
fun getState(): String {
|
|
237
|
+
return pipeline?.getState()?.value ?: PipelineState.IDLE.value
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/**
|
|
241
|
+
* Log AudioTrack health — called from the device callback to capture
|
|
242
|
+
* track state at the moment of a route change.
|
|
243
|
+
*/
|
|
244
|
+
fun logAudioTrackHealth(trigger: String) {
|
|
245
|
+
pipeline?.logTrackHealth(trigger) ?: Log.d(TAG, "logAudioTrackHealth($trigger) — no pipeline connected")
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Destroy the integration — called from OnDestroy / destroy().
|
|
250
|
+
*/
|
|
251
|
+
fun destroy() {
|
|
252
|
+
pipeline?.disconnect()
|
|
253
|
+
pipeline = null
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// ════════════════════════════════════════════════════════════════════
|
|
257
|
+
// PipelineListener implementation → Expo bridge events
|
|
258
|
+
// ════════════════════════════════════════════════════════════════════
|
|
259
|
+
|
|
260
|
+
override fun onStateChanged(state: PipelineState) {
|
|
261
|
+
sendEvent(EVENT_STATE_CHANGED, Bundle().apply {
|
|
262
|
+
putString("state", state.value)
|
|
263
|
+
})
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
override fun onPlaybackStarted(turnId: String) {
|
|
267
|
+
sendEvent(EVENT_PLAYBACK_STARTED, Bundle().apply {
|
|
268
|
+
putString("turnId", turnId)
|
|
269
|
+
})
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
override fun onError(code: String, message: String) {
|
|
273
|
+
sendEvent(EVENT_ERROR, Bundle().apply {
|
|
274
|
+
putString("code", code)
|
|
275
|
+
putString("message", message)
|
|
276
|
+
})
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
override fun onZombieDetected(playbackHead: Long, stalledMs: Long) {
|
|
280
|
+
sendEvent(EVENT_ZOMBIE_DETECTED, Bundle().apply {
|
|
281
|
+
putLong("playbackHead", playbackHead)
|
|
282
|
+
putLong("stalledMs", stalledMs)
|
|
283
|
+
})
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
override fun onUnderrun(count: Int) {
|
|
287
|
+
sendEvent(EVENT_UNDERRUN, Bundle().apply {
|
|
288
|
+
putInt("count", count)
|
|
289
|
+
})
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
override fun onDrained(turnId: String) {
|
|
293
|
+
sendEvent(EVENT_DRAINED, Bundle().apply {
|
|
294
|
+
putString("turnId", turnId)
|
|
295
|
+
})
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
override fun onAudioFocusLost() {
|
|
299
|
+
sendEvent(EVENT_AUDIO_FOCUS_LOST, Bundle())
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
override fun onAudioFocusResumed() {
|
|
303
|
+
sendEvent(EVENT_AUDIO_FOCUS_RESUMED, Bundle())
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// ── Helper ──────────────────────────────────────────────────────────
|
|
307
|
+
|
|
308
|
+
private fun sendEvent(eventName: String, params: Bundle) {
|
|
309
|
+
try {
|
|
310
|
+
eventSender.sendExpoEvent(eventName, params)
|
|
311
|
+
} catch (e: Exception) {
|
|
312
|
+
Log.w(TAG, "Failed to send event $eventName", e)
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
}
|
package/app.plugin.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('./plugin/build')
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ExpoPlayAudioStreamModule.d.ts","sourceRoot":"","sources":["../src/ExpoPlayAudioStreamModule.ts"],"names":[],"mappings":";AAIA,wBAA0D"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { requireNativeModule } from 'expo-modules-core';
|
|
2
|
+
// It loads the native module object from the JSI or falls back to
|
|
3
|
+
// the bridge module (from NativeModulesProxy) if the remote debugger is on.
|
|
4
|
+
export default requireNativeModule('ExpoPlayAudioStream');
|
|
5
|
+
//# sourceMappingURL=ExpoPlayAudioStreamModule.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ExpoPlayAudioStreamModule.js","sourceRoot":"","sources":["../src/ExpoPlayAudioStreamModule.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,mBAAmB,CAAC;AAExD,kEAAkE;AAClE,4EAA4E;AAC5E,eAAe,mBAAmB,CAAC,qBAAqB,CAAC,CAAC","sourcesContent":["import { requireNativeModule } from 'expo-modules-core';\n\n// It loads the native module object from the JSI or falls back to\n// the bridge module (from NativeModulesProxy) if the remote debugger is on.\nexport default requireNativeModule('ExpoPlayAudioStream');\n"]}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { type EventSubscription } from "expo-modules-core";
|
|
2
|
+
export type Subscription = EventSubscription;
|
|
3
|
+
export interface AudioEventPayload {
|
|
4
|
+
encoded?: string;
|
|
5
|
+
buffer?: Float32Array;
|
|
6
|
+
fileUri: string;
|
|
7
|
+
lastEmittedSize: number;
|
|
8
|
+
position: number;
|
|
9
|
+
deltaSize: number;
|
|
10
|
+
totalSize: number;
|
|
11
|
+
mimeType: string;
|
|
12
|
+
streamUuid: string;
|
|
13
|
+
soundLevel?: number;
|
|
14
|
+
}
|
|
15
|
+
export type SoundChunkPlayedEventPayload = {
|
|
16
|
+
isFinal: boolean;
|
|
17
|
+
};
|
|
18
|
+
export declare const DeviceReconnectedReasons: {
|
|
19
|
+
readonly newDeviceAvailable: "newDeviceAvailable";
|
|
20
|
+
readonly oldDeviceUnavailable: "oldDeviceUnavailable";
|
|
21
|
+
readonly unknown: "unknown";
|
|
22
|
+
};
|
|
23
|
+
export type DeviceReconnectedReason = (typeof DeviceReconnectedReasons)[keyof typeof DeviceReconnectedReasons];
|
|
24
|
+
export type DeviceReconnectedEventPayload = {
|
|
25
|
+
reason: DeviceReconnectedReason;
|
|
26
|
+
};
|
|
27
|
+
export declare const AudioEvents: {
|
|
28
|
+
AudioData: string;
|
|
29
|
+
SoundChunkPlayed: string;
|
|
30
|
+
SoundStarted: string;
|
|
31
|
+
DeviceReconnected: string;
|
|
32
|
+
};
|
|
33
|
+
export declare function addAudioEventListener(listener: (event: AudioEventPayload) => Promise<void>): EventSubscription;
|
|
34
|
+
export declare function addSoundChunkPlayedListener(listener: (event: SoundChunkPlayedEventPayload) => Promise<void>): EventSubscription;
|
|
35
|
+
export declare function subscribeToEvent<T extends unknown>(eventName: string, listener: (event: T | undefined) => Promise<void>): EventSubscription;
|
|
36
|
+
//# sourceMappingURL=events.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"events.d.ts","sourceRoot":"","sources":["../src/events.ts"],"names":[],"mappings":"AAEA,OAAO,EAAgB,KAAK,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AAGzE,MAAM,MAAM,YAAY,GAAG,iBAAiB,CAAC;AAM7C,MAAM,WAAW,iBAAiB;IAChC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,YAAY,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,eAAe,EAAE,MAAM,CAAC;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,MAAM,4BAA4B,GAAG;IACzC,OAAO,EAAE,OAAO,CAAC;CAClB,CAAC;AAEF,eAAO,MAAM,wBAAwB;;;;CAI3B,CAAC;AAEX,MAAM,MAAM,uBAAuB,GACjC,CAAC,OAAO,wBAAwB,CAAC,CAAC,MAAM,OAAO,wBAAwB,CAAC,CAAC;AAE3E,MAAM,MAAM,6BAA6B,GAAG;IAC1C,MAAM,EAAE,uBAAuB,CAAC;CACjC,CAAC;AAEF,eAAO,MAAM,WAAW;;;;;CAKvB,CAAC;AAEF,wBAAgB,qBAAqB,CACnC,QAAQ,EAAE,CAAC,KAAK,EAAE,iBAAiB,KAAK,OAAO,CAAC,IAAI,CAAC,GACpD,iBAAiB,CAEnB;AAED,wBAAgB,2BAA2B,CACzC,QAAQ,EAAE,CAAC,KAAK,EAAE,4BAA4B,KAAK,OAAO,CAAC,IAAI,CAAC,GAC/D,iBAAiB,CAEnB;AAED,wBAAgB,gBAAgB,CAAC,CAAC,SAAS,OAAO,EAChD,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,SAAS,KAAK,OAAO,CAAC,IAAI,CAAC,GAChD,iBAAiB,CAEnB"}
|
package/build/events.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
// packages/expo-audio-stream/src/events.ts
|
|
2
|
+
import { EventEmitter } from "expo-modules-core";
|
|
3
|
+
import ExpoPlayAudioStreamModule from "./ExpoPlayAudioStreamModule";
|
|
4
|
+
const emitter = new EventEmitter(ExpoPlayAudioStreamModule);
|
|
5
|
+
export const DeviceReconnectedReasons = {
|
|
6
|
+
newDeviceAvailable: "newDeviceAvailable",
|
|
7
|
+
oldDeviceUnavailable: "oldDeviceUnavailable",
|
|
8
|
+
unknown: "unknown",
|
|
9
|
+
};
|
|
10
|
+
export const AudioEvents = {
|
|
11
|
+
AudioData: "AudioData",
|
|
12
|
+
SoundChunkPlayed: "SoundChunkPlayed",
|
|
13
|
+
SoundStarted: "SoundStarted",
|
|
14
|
+
DeviceReconnected: "DeviceReconnected",
|
|
15
|
+
};
|
|
16
|
+
export function addAudioEventListener(listener) {
|
|
17
|
+
return emitter.addListener("AudioData", listener);
|
|
18
|
+
}
|
|
19
|
+
export function addSoundChunkPlayedListener(listener) {
|
|
20
|
+
return emitter.addListener("SoundChunkPlayed", listener);
|
|
21
|
+
}
|
|
22
|
+
export function subscribeToEvent(eventName, listener) {
|
|
23
|
+
return emitter.addListener(eventName, listener);
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=events.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"events.js","sourceRoot":"","sources":["../src/events.ts"],"names":[],"mappings":"AAAA,2CAA2C;AAE3C,OAAO,EAAE,YAAY,EAA0B,MAAM,mBAAmB,CAAC;AAKzE,OAAO,yBAAyB,MAAM,6BAA6B,CAAC;AAEpE,MAAM,OAAO,GAAG,IAAI,YAAY,CAAC,yBAAyB,CAAC,CAAC;AAmB5D,MAAM,CAAC,MAAM,wBAAwB,GAAG;IACtC,kBAAkB,EAAE,oBAAoB;IACxC,oBAAoB,EAAE,sBAAsB;IAC5C,OAAO,EAAE,SAAS;CACV,CAAC;AASX,MAAM,CAAC,MAAM,WAAW,GAAG;IACzB,SAAS,EAAE,WAAW;IACtB,gBAAgB,EAAE,kBAAkB;IACpC,YAAY,EAAE,cAAc;IAC5B,iBAAiB,EAAE,mBAAmB;CACvC,CAAC;AAEF,MAAM,UAAU,qBAAqB,CACnC,QAAqD;IAErD,OAAQ,OAAe,CAAC,WAAW,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AAC7D,CAAC;AAED,MAAM,UAAU,2BAA2B,CACzC,QAAgE;IAEhE,OAAQ,OAAe,CAAC,WAAW,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;AACpE,CAAC;AAED,MAAM,UAAU,gBAAgB,CAC9B,SAAiB,EACjB,QAAiD;IAEjD,OAAQ,OAAe,CAAC,WAAW,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;AAC3D,CAAC","sourcesContent":["// packages/expo-audio-stream/src/events.ts\n\nimport { EventEmitter, type EventSubscription } from \"expo-modules-core\";\n\n// Type alias for backwards compatibility\nexport type Subscription = EventSubscription;\n\nimport ExpoPlayAudioStreamModule from \"./ExpoPlayAudioStreamModule\";\n\nconst emitter = new EventEmitter(ExpoPlayAudioStreamModule);\n\nexport interface AudioEventPayload {\n encoded?: string;\n buffer?: Float32Array;\n fileUri: string;\n lastEmittedSize: number;\n position: number;\n deltaSize: number;\n totalSize: number;\n mimeType: string;\n streamUuid: string;\n soundLevel?: number;\n}\n\nexport type SoundChunkPlayedEventPayload = {\n isFinal: boolean;\n};\n\nexport const DeviceReconnectedReasons = {\n newDeviceAvailable: \"newDeviceAvailable\",\n oldDeviceUnavailable: \"oldDeviceUnavailable\",\n unknown: \"unknown\",\n} as const;\n\nexport type DeviceReconnectedReason =\n (typeof DeviceReconnectedReasons)[keyof typeof DeviceReconnectedReasons];\n\nexport type DeviceReconnectedEventPayload = {\n reason: DeviceReconnectedReason;\n};\n\nexport const AudioEvents = {\n AudioData: \"AudioData\",\n SoundChunkPlayed: \"SoundChunkPlayed\",\n SoundStarted: \"SoundStarted\",\n DeviceReconnected: \"DeviceReconnected\",\n};\n\nexport function addAudioEventListener(\n listener: (event: AudioEventPayload) => Promise<void>\n): EventSubscription {\n return (emitter as any).addListener(\"AudioData\", listener);\n}\n\nexport function addSoundChunkPlayedListener(\n listener: (event: SoundChunkPlayedEventPayload) => Promise<void>\n): EventSubscription {\n return (emitter as any).addListener(\"SoundChunkPlayed\", listener);\n}\n\nexport function subscribeToEvent<T extends unknown>(\n eventName: string,\n listener: (event: T | undefined) => Promise<void>\n): EventSubscription {\n return (emitter as any).addListener(eventName, listener);\n}\n"]}
|