@edkimmel/expo-audio-stream 0.4.0 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -79,6 +79,7 @@ const result = await Pipeline.connect({
79
79
  channelCount: 1,
80
80
  targetBufferMs: 80,
81
81
  frequencyBandIntervalMs: 100, // optional: emit frequency bands every 100ms
82
+ audioMode: "mixWithOthers", // coexist with other apps (default)
82
83
  });
83
84
 
84
85
  // Subscribe to events
@@ -226,11 +227,23 @@ interface ConnectPipelineOptions {
226
227
  sampleRate?: number; // default 24000
227
228
  channelCount?: number; // default 1 (mono)
228
229
  targetBufferMs?: number; // ms to buffer before priming gate opens (default 80)
230
+ playbackMode?: "voiceProcessing" | "conversation";
229
231
  frequencyBandIntervalMs?: number; // emit PipelineFrequencyBands every N ms (omit to disable)
230
232
  frequencyBandConfig?: FrequencyBandConfig; // crossover frequencies (optional)
233
+ audioMode?: "mixWithOthers" | "duckOthers" | "doNotMix"; // default "mixWithOthers"
231
234
  }
232
235
  ```
233
236
 
237
+ #### `audioMode`
238
+
239
+ Controls how pipeline playback coexists with audio from other apps on the device. Default: `"mixWithOthers"` (matches expo-audio).
240
+
241
+ - **`"mixWithOthers"`** — plays alongside other apps without interrupting them. On Android no audio focus is requested; on iOS the session uses the `.mixWithOthers` category option. Best for sound effects and short clips.
242
+ - **`"duckOthers"`** — requests audio focus with ducking. Other apps lower their volume but keep playing.
243
+ - **`"doNotMix"`** — requests exclusive audio focus. Other apps pause.
244
+
245
+ > **Breaking change:** The default was effectively `"doNotMix"` in prior versions. If you rely on the previous behavior — where connecting the pipeline pauses other apps' audio — pass `audioMode: "doNotMix"` explicitly when calling `Pipeline.connect`.
246
+
234
247
  ### PushPipelineAudioOptions
235
248
 
236
249
  ```typescript
@@ -58,6 +58,29 @@ interface PipelineListener {
58
58
  // AudioPipeline
59
59
  // ────────────────────────────────────────────────────────────────────────────
60
60
 
61
+ /**
62
+ * Controls how pipeline playback coexists with audio from other apps.
63
+ * Mirrors the `PipelineAudioMode` TS type.
64
+ */
65
+ enum class AudioMode {
66
+ /** No focus request — playback mixes freely with other audio. */
67
+ MIX_WITH_OTHERS,
68
+
69
+ /** Request transient focus with ducking — others lower volume but keep playing. */
70
+ DUCK_OTHERS,
71
+
72
+ /** Request exclusive focus — others pause. */
73
+ DO_NOT_MIX;
74
+
75
+ companion object {
76
+ fun fromString(value: String?): AudioMode = when (value) {
77
+ "duckOthers" -> DUCK_OTHERS
78
+ "doNotMix" -> DO_NOT_MIX
79
+ else -> MIX_WITH_OTHERS // default includes null, "mixWithOthers", and unknown
80
+ }
81
+ }
82
+ }
83
+
61
84
  /**
62
85
  * Core orchestrator for the native audio pipeline.
63
86
  *
@@ -90,6 +113,7 @@ class AudioPipeline(
90
113
  private val frequencyBandIntervalMs: Int = 100,
91
114
  private val lowCrossoverHz: Float = 300f,
92
115
  private val highCrossoverHz: Float = 2000f,
116
+ private val audioMode: AudioMode = AudioMode.MIX_WITH_OTHERS,
93
117
  private val listener: PipelineListener
94
118
  ) {
95
119
  companion object {
@@ -550,18 +574,46 @@ class AudioPipeline(
550
574
  // ════════════════════════════════════════════════════════════════════
551
575
 
552
576
  private fun requestAudioFocus() {
553
- val result = audioManager.requestAudioFocus(
554
- focusChangeListener,
555
- AudioManager.STREAM_MUSIC,
556
- AudioManager.AUDIOFOCUS_GAIN
557
- )
558
- hasAudioFocus.set(result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED)
559
- if (!hasAudioFocus.get()) {
560
- Log.w(TAG, "Audio focus request denied")
577
+ when (audioMode) {
578
+ AudioMode.MIX_WITH_OTHERS -> {
579
+ // No focus request — we coexist silently with other apps.
580
+ // Mark as "has focus" so the write loop proceeds unconditionally.
581
+ hasAudioFocus.set(true)
582
+ audioFocusLost.set(false)
583
+ Log.d(TAG, "Audio focus skipped (mixWithOthers)")
584
+ }
585
+ AudioMode.DUCK_OTHERS -> {
586
+ val result = audioManager.requestAudioFocus(
587
+ focusChangeListener,
588
+ AudioManager.STREAM_MUSIC,
589
+ AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK
590
+ )
591
+ hasAudioFocus.set(result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED)
592
+ if (!hasAudioFocus.get()) {
593
+ Log.w(TAG, "Audio focus request (duckOthers) denied")
594
+ }
595
+ }
596
+ AudioMode.DO_NOT_MIX -> {
597
+ val result = audioManager.requestAudioFocus(
598
+ focusChangeListener,
599
+ AudioManager.STREAM_MUSIC,
600
+ AudioManager.AUDIOFOCUS_GAIN
601
+ )
602
+ hasAudioFocus.set(result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED)
603
+ if (!hasAudioFocus.get()) {
604
+ Log.w(TAG, "Audio focus request (doNotMix) denied")
605
+ }
606
+ }
561
607
  }
562
608
  }
563
609
 
564
610
  private fun abandonAudioFocus() {
611
+ if (audioMode == AudioMode.MIX_WITH_OTHERS) {
612
+ // No focus was ever requested — nothing to abandon.
613
+ hasAudioFocus.set(false)
614
+ audioFocusLost.set(false)
615
+ return
616
+ }
565
617
  audioManager.abandonAudioFocus(focusChangeListener)
566
618
  hasAudioFocus.set(false)
567
619
  audioFocusLost.set(false)
@@ -121,6 +121,7 @@ class PipelineIntegration(
121
121
  val bandConfig = options["frequencyBandConfig"] as? Map<*, *>
122
122
  val lowCrossoverHz = (bandConfig?.get("lowCrossoverHz") as? Number)?.toFloat() ?: 300f
123
123
  val highCrossoverHz = (bandConfig?.get("highCrossoverHz") as? Number)?.toFloat() ?: 2000f
124
+ val audioMode = AudioMode.fromString(options["audioMode"] as? String)
124
125
 
125
126
  pipeline = AudioPipeline(
126
127
  context = context,
@@ -130,6 +131,7 @@ class PipelineIntegration(
130
131
  frequencyBandIntervalMs = frequencyBandIntervalMs,
131
132
  lowCrossoverHz = lowCrossoverHz,
132
133
  highCrossoverHz = highCrossoverHz,
134
+ audioMode = audioMode,
133
135
  listener = this
134
136
  )
135
137
  pipeline!!.connect()
@@ -1,4 +1,15 @@
1
1
  import { PlaybackMode, FrequencyBandConfig, FrequencyBands } from "../types";
2
+ /**
3
+ * How the pipeline's playback should coexist with other audio on the device.
4
+ *
5
+ * - `'mixWithOthers'` (default): plays alongside other apps without
6
+ * interrupting them. On Android no audio focus is requested. Best for
7
+ * sound effects and short clips.
8
+ * - `'duckOthers'`: requests audio focus with ducking. Other apps lower
9
+ * their volume but keep playing.
10
+ * - `'doNotMix'`: requests exclusive audio focus. Other apps pause.
11
+ */
12
+ export type PipelineAudioMode = 'mixWithOthers' | 'duckOthers' | 'doNotMix';
2
13
  /** Options passed to `connectPipeline()`. */
3
14
  export interface ConnectPipelineOptions {
4
15
  /** Sample rate in Hz (default 24000). */
@@ -18,6 +29,15 @@ export interface ConnectPipelineOptions {
18
29
  frequencyBandIntervalMs?: number;
19
30
  /** Optional frequency band crossover configuration. */
20
31
  frequencyBandConfig?: FrequencyBandConfig;
32
+ /**
33
+ * How pipeline playback should coexist with other apps' audio.
34
+ * Default is `'mixWithOthers'` (matches expo-audio).
35
+ *
36
+ * Note: this is a **behavior change** vs. prior versions of this library,
37
+ * which effectively used `'doNotMix'`. Pass `'doNotMix'` explicitly to
38
+ * preserve that old behavior.
39
+ */
40
+ audioMode?: PipelineAudioMode;
21
41
  }
22
42
  /** Result returned from a successful `connectPipeline()` call. */
23
43
  export interface ConnectPipelineResult {
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/pipeline/types.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAI7E,6CAA6C;AAC7C,MAAM,WAAW,sBAAsB;IACrC,yCAAyC;IACzC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,6DAA6D;IAC7D,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,sEAAsE;IACtE,uBAAuB,CAAC,EAAE,MAAM,CAAC;IACjC,uDAAuD;IACvD,mBAAmB,CAAC,EAAE,mBAAmB,CAAC;CAC3C;AAED,kEAAkE;AAClE,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;IACrB,cAAc,EAAE,MAAM,CAAC;IACvB;;;;OAIG;IACH,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAID,2EAA2E;AAC3E,MAAM,WAAW,wBAAwB;IACvC,sDAAsD;IACtD,KAAK,EAAE,MAAM,CAAC;IACd,oCAAoC;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,4EAA4E;IAC5E,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,iFAAiF;IACjF,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAID,oDAAoD;AACpD,MAAM,WAAW,6BAA6B;IAC5C,2EAA2E;IAC3E,MAAM,EAAE,MAAM,CAAC;CAChB;AAID;;;;;;;;GAQG;AACH,MAAM,MAAM,aAAa,GACrB,MAAM,GACN,YAAY,GACZ,WAAW,GACX,UAAU,GACV,OAAO,CAAC;AAIZ,0CAA0C;AAC1C,MAAM,WAAW,yBAAyB;IACxC,KAAK,EAAE,aAAa,CAAC;CACtB;AAED,6CAA6C;AAC7C,MAAM,WAAW,4BAA4B;IAC3C,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,mCAAmC;AACnC,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,4CAA4C;AAC5C,MAAM,WAAW,2BAA2B;IAC1C,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,sCAAsC;AACtC,MAAM,WAAW,qBAAqB;IACpC,KAAK,EAAE,MAAM,CAAC;CACf;AAED,qCAAqC;AACrC,MAAM,WAAW,oBAAoB;IACnC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,6EAA6E;AAC7E,MAAM,MAAM,2BAA2B,GAAG,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAEhE,gFAAgF;AAChF,MAAM,MAAM,8BAA8B,GAAG,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAEnE,4CAA4C;AAC5C,MAAM,WAAW,2BAA4B,SAAQ,cAAc;CAAG;AAEtE;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B,oBAAoB,EAAE,yBAAyB,CAAC;IAChD,uBAAuB,EAAE,4BAA4B,CAAC;IACtD,aAAa,EAAE,kBAAkB,CAAC;IAClC,sBAAsB,EAAE,2BAA2B,CAAC;IACpD,gBAAgB,EAAE,qBAAqB,CAAC;IACxC,eAAe,EAAE,oBAAoB,CAAC;IACtC,sBAAsB,EAAE,2BAA2B,CAAC;IACpD,yBAAyB,EAAE,8BAA8B,CAAC;IAC1D,sBAAsB,EAAE,2BAA2B,CAAC;CACrD;AAED,gDAAgD;AAChD,MAAM,MAAM,iBAAiB,GAAG,MAAM,gBAAgB,CAAC;AAIvD,wCAAwC;AACxC,MAAM,WAAW,uBAAuB;IACtC,4CAA4C;IAC5C,QAAQ,EAAE,MAAM,CAAC;IACjB,uCAAuC;IACvC,aAAa,EAAE,MAAM,CAAC;IACtB,2CAA2C;IAC3C,MAAM,EAAE,OAAO,CAAC;IAChB,8DAA8D;IAC9D,YAAY,EAAE,MAAM,CAAC;IACrB,2DAA2D;IAC3D,SAAS,EAAE,MAAM,CAAC;IAClB,iCAAiC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,oCAAoC;IACpC,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,wCAAwC;AACxC,MAAM,WAAW,iBAAkB,SAAQ,uBAAuB;IAChE,8BAA8B;IAC9B,KAAK,EAAE,aAAa,CAAC;IACrB,yDAAyD;IACzD,cAAc,EAAE,MAAM,CAAC;IACvB,wCAAwC;IACxC,cAAc,EAAE,MAAM,CAAC;IACvB,iDAAiD;IACjD,eAAe,EAAE,MAAM,CAAC;IACxB,+BAA+B;IAC/B,MAAM,EAAE,MAAM,CAAC;CAChB"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/pipeline/types.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAI7E;;;;;;;;;GASG;AACH,MAAM,MAAM,iBAAiB,GAAG,eAAe,GAAG,YAAY,GAAG,UAAU,CAAC;AAE5E,6CAA6C;AAC7C,MAAM,WAAW,sBAAsB;IACrC,yCAAyC;IACzC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,6DAA6D;IAC7D,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,sEAAsE;IACtE,uBAAuB,CAAC,EAAE,MAAM,CAAC;IACjC,uDAAuD;IACvD,mBAAmB,CAAC,EAAE,mBAAmB,CAAC;IAC1C;;;;;;;OAOG;IACH,SAAS,CAAC,EAAE,iBAAiB,CAAC;CAC/B;AAED,kEAAkE;AAClE,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;IACrB,cAAc,EAAE,MAAM,CAAC;IACvB;;;;OAIG;IACH,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAID,2EAA2E;AAC3E,MAAM,WAAW,wBAAwB;IACvC,sDAAsD;IACtD,KAAK,EAAE,MAAM,CAAC;IACd,oCAAoC;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,4EAA4E;IAC5E,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,iFAAiF;IACjF,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAID,oDAAoD;AACpD,MAAM,WAAW,6BAA6B;IAC5C,2EAA2E;IAC3E,MAAM,EAAE,MAAM,CAAC;CAChB;AAID;;;;;;;;GAQG;AACH,MAAM,MAAM,aAAa,GACrB,MAAM,GACN,YAAY,GACZ,WAAW,GACX,UAAU,GACV,OAAO,CAAC;AAIZ,0CAA0C;AAC1C,MAAM,WAAW,yBAAyB;IACxC,KAAK,EAAE,aAAa,CAAC;CACtB;AAED,6CAA6C;AAC7C,MAAM,WAAW,4BAA4B;IAC3C,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,mCAAmC;AACnC,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,4CAA4C;AAC5C,MAAM,WAAW,2BAA2B;IAC1C,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,sCAAsC;AACtC,MAAM,WAAW,qBAAqB;IACpC,KAAK,EAAE,MAAM,CAAC;CACf;AAED,qCAAqC;AACrC,MAAM,WAAW,oBAAoB;IACnC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,6EAA6E;AAC7E,MAAM,MAAM,2BAA2B,GAAG,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAEhE,gFAAgF;AAChF,MAAM,MAAM,8BAA8B,GAAG,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAEnE,4CAA4C;AAC5C,MAAM,WAAW,2BAA4B,SAAQ,cAAc;CAAG;AAEtE;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B,oBAAoB,EAAE,yBAAyB,CAAC;IAChD,uBAAuB,EAAE,4BAA4B,CAAC;IACtD,aAAa,EAAE,kBAAkB,CAAC;IAClC,sBAAsB,EAAE,2BAA2B,CAAC;IACpD,gBAAgB,EAAE,qBAAqB,CAAC;IACxC,eAAe,EAAE,oBAAoB,CAAC;IACtC,sBAAsB,EAAE,2BAA2B,CAAC;IACpD,yBAAyB,EAAE,8BAA8B,CAAC;IAC1D,sBAAsB,EAAE,2BAA2B,CAAC;CACrD;AAED,gDAAgD;AAChD,MAAM,MAAM,iBAAiB,GAAG,MAAM,gBAAgB,CAAC;AAIvD,wCAAwC;AACxC,MAAM,WAAW,uBAAuB;IACtC,4CAA4C;IAC5C,QAAQ,EAAE,MAAM,CAAC;IACjB,uCAAuC;IACvC,aAAa,EAAE,MAAM,CAAC;IACtB,2CAA2C;IAC3C,MAAM,EAAE,OAAO,CAAC;IAChB,8DAA8D;IAC9D,YAAY,EAAE,MAAM,CAAC;IACrB,2DAA2D;IAC3D,SAAS,EAAE,MAAM,CAAC;IAClB,iCAAiC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,oCAAoC;IACpC,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,wCAAwC;AACxC,MAAM,WAAW,iBAAkB,SAAQ,uBAAuB;IAChE,8BAA8B;IAC9B,KAAK,EAAE,aAAa,CAAC;IACrB,yDAAyD;IACzD,cAAc,EAAE,MAAM,CAAC;IACvB,wCAAwC;IACxC,cAAc,EAAE,MAAM,CAAC;IACvB,iDAAiD;IACjD,eAAe,EAAE,MAAM,CAAC;IACxB,+BAA+B;IAC/B,MAAM,EAAE,MAAM,CAAC;CAChB"}
@@ -1 +1 @@
1
- {"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/pipeline/types.ts"],"names":[],"mappings":"AAAA,+EAA+E;AAC/E,8CAA8C;AAC9C,+EAA+E","sourcesContent":["// ────────────────────────────────────────────────────────────────────────────\n// Native Audio Pipeline — V3 TypeScript Types\n// ────────────────────────────────────────────────────────────────────────────\n\nimport { PlaybackMode, FrequencyBandConfig, FrequencyBands } from \"../types\";\n\n// ── Connect ─────────────────────────────────────────────────────────────────\n\n/** Options passed to `connectPipeline()`. */\nexport interface ConnectPipelineOptions {\n /** Sample rate in Hz (default 24000). */\n sampleRate?: number;\n /** Number of channels — 1 = mono, 2 = stereo (default 1). */\n channelCount?: number;\n /**\n * How many ms of audio to accumulate in the jitter buffer before the\n * priming gate opens and audio begins playing (default 80).\n */\n targetBufferMs?: number;\n /**\n * Playback mode hint for native optimizations. Affects thread priority and\n */\n playbackMode?: PlaybackMode;\n /** Interval in ms for PipelineFrequencyBands events (default 100). */\n frequencyBandIntervalMs?: number;\n /** Optional frequency band crossover configuration. */\n frequencyBandConfig?: FrequencyBandConfig;\n}\n\n/** Result returned from a successful `connectPipeline()` call. */\nexport interface ConnectPipelineResult {\n sampleRate: number;\n channelCount: number;\n targetBufferMs: number;\n /**\n * Frame size in samples derived from the device HAL's\n * `AudioTrack.getMinBufferSize()`. Useful for understanding the write\n * granularity on the native side.\n */\n frameSizeSamples: number;\n}\n\n// ── Push Audio ──────────────────────────────────────────────────────────────\n\n/** Options passed to `pushPipelineAudio()` / `pushPipelineAudioSync()`. */\nexport interface PushPipelineAudioOptions {\n /** Base64-encoded PCM 16-bit signed LE audio data. */\n audio: string;\n /** Conversation turn identifier. */\n turnId: string;\n /** True if this is the first chunk of a new turn (resets jitter buffer). */\n isFirstChunk?: boolean;\n /** True if this is the final chunk of the current turn (marks end-of-stream). */\n isLastChunk?: boolean;\n}\n\n// ── Invalidate Turn ─────────────────────────────────────────────────────────\n\n/** Options passed to `invalidatePipelineTurn()`. */\nexport interface InvalidatePipelineTurnOptions {\n /** The new turn identifier — stale audio for the old turn is discarded. */\n turnId: string;\n}\n\n// ── State ───────────────────────────────────────────────────────────────────\n\n/**\n * Pipeline states reported via `PipelineStateChanged` events.\n *\n * - `idle` — connected but no audio flowing\n * - `connecting` — AudioTrack being created, focus being requested\n * - `streaming` — actively receiving and playing audio\n * - `draining` — end-of-stream marked, playing remaining buffer\n * - `error` — unrecoverable error (zombie, write failure, etc.)\n */\nexport type PipelineState =\n | 'idle'\n | 'connecting'\n | 'streaming'\n | 'draining'\n | 'error';\n\n// ── Events ──────────────────────────────────────────────────────────────────\n\n/** Payload for `PipelineStateChanged`. */\nexport interface PipelineStateChangedEvent {\n state: PipelineState;\n}\n\n/** Payload for `PipelinePlaybackStarted`. */\nexport interface PipelinePlaybackStartedEvent {\n turnId: string;\n}\n\n/** Payload for `PipelineError`. */\nexport interface PipelineErrorEvent {\n code: string;\n message: string;\n}\n\n/** Payload for `PipelineZombieDetected`. */\nexport interface PipelineZombieDetectedEvent {\n playbackHead: number;\n stalledMs: number;\n}\n\n/** Payload for `PipelineUnderrun`. */\nexport interface PipelineUnderrunEvent {\n count: number;\n}\n\n/** Payload for `PipelineDrained`. */\nexport interface PipelineDrainedEvent {\n turnId: string;\n}\n\n/** Payload for `PipelineAudioFocusLost` (empty — presence is the signal). */\nexport type PipelineAudioFocusLostEvent = Record<string, never>;\n\n/** Payload for `PipelineAudioFocusResumed` (empty — presence is the signal). */\nexport type PipelineAudioFocusResumedEvent = Record<string, never>;\n\n/** Payload for `PipelineFrequencyBands`. */\nexport interface PipelineFrequencyBandsEvent extends FrequencyBands {}\n\n/**\n * Map of all pipeline event names to their payload types.\n * Used with `Pipeline.subscribe<K>()` for type-safe event subscriptions.\n */\nexport interface PipelineEventMap {\n PipelineStateChanged: PipelineStateChangedEvent;\n PipelinePlaybackStarted: PipelinePlaybackStartedEvent;\n PipelineError: PipelineErrorEvent;\n PipelineZombieDetected: PipelineZombieDetectedEvent;\n PipelineUnderrun: PipelineUnderrunEvent;\n PipelineDrained: PipelineDrainedEvent;\n PipelineAudioFocusLost: PipelineAudioFocusLostEvent;\n PipelineAudioFocusResumed: PipelineAudioFocusResumedEvent;\n PipelineFrequencyBands: PipelineFrequencyBandsEvent;\n}\n\n/** Union of all pipeline event name strings. */\nexport type PipelineEventName = keyof PipelineEventMap;\n\n// ── Telemetry ───────────────────────────────────────────────────────────────\n\n/** Jitter buffer telemetry counters. */\nexport interface PipelineBufferTelemetry {\n /** Current buffer level in milliseconds. */\n bufferMs: number;\n /** Current buffer level in samples. */\n bufferSamples: number;\n /** Whether the priming gate has opened. */\n primed: boolean;\n /** Total samples written by the producer since last reset. */\n totalWritten: number;\n /** Total samples read by the consumer since last reset. */\n totalRead: number;\n /** Number of underrun events. */\n underrunCount: number;\n /** Peak buffer level in samples. */\n peakLevel: number;\n}\n\n/** Full pipeline telemetry snapshot. */\nexport interface PipelineTelemetry extends PipelineBufferTelemetry {\n /** Current pipeline state. */\n state: PipelineState;\n /** Total pushAudio/pushAudioSync calls since connect. */\n totalPushCalls: number;\n /** Total bytes pushed since connect. */\n totalPushBytes: number;\n /** Total write-loop iterations since connect. */\n totalWriteLoops: number;\n /** Current turn identifier. */\n turnId: string;\n}\n"]}
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/pipeline/types.ts"],"names":[],"mappings":"AAAA,+EAA+E;AAC/E,8CAA8C;AAC9C,+EAA+E","sourcesContent":["// ────────────────────────────────────────────────────────────────────────────\n// Native Audio Pipeline — V3 TypeScript Types\n// ────────────────────────────────────────────────────────────────────────────\n\nimport { PlaybackMode, FrequencyBandConfig, FrequencyBands } from \"../types\";\n\n// ── Connect ─────────────────────────────────────────────────────────────────\n\n/**\n * How the pipeline's playback should coexist with other audio on the device.\n *\n * - `'mixWithOthers'` (default): plays alongside other apps without\n * interrupting them. On Android no audio focus is requested. Best for\n * sound effects and short clips.\n * - `'duckOthers'`: requests audio focus with ducking. Other apps lower\n * their volume but keep playing.\n * - `'doNotMix'`: requests exclusive audio focus. Other apps pause.\n */\nexport type PipelineAudioMode = 'mixWithOthers' | 'duckOthers' | 'doNotMix';\n\n/** Options passed to `connectPipeline()`. */\nexport interface ConnectPipelineOptions {\n /** Sample rate in Hz (default 24000). */\n sampleRate?: number;\n /** Number of channels — 1 = mono, 2 = stereo (default 1). */\n channelCount?: number;\n /**\n * How many ms of audio to accumulate in the jitter buffer before the\n * priming gate opens and audio begins playing (default 80).\n */\n targetBufferMs?: number;\n /**\n * Playback mode hint for native optimizations. Affects thread priority and\n */\n playbackMode?: PlaybackMode;\n /** Interval in ms for PipelineFrequencyBands events (default 100). */\n frequencyBandIntervalMs?: number;\n /** Optional frequency band crossover configuration. */\n frequencyBandConfig?: FrequencyBandConfig;\n /**\n * How pipeline playback should coexist with other apps' audio.\n * Default is `'mixWithOthers'` (matches expo-audio).\n *\n * Note: this is a **behavior change** vs. prior versions of this library,\n * which effectively used `'doNotMix'`. Pass `'doNotMix'` explicitly to\n * preserve that old behavior.\n */\n audioMode?: PipelineAudioMode;\n}\n\n/** Result returned from a successful `connectPipeline()` call. */\nexport interface ConnectPipelineResult {\n sampleRate: number;\n channelCount: number;\n targetBufferMs: number;\n /**\n * Frame size in samples derived from the device HAL's\n * `AudioTrack.getMinBufferSize()`. Useful for understanding the write\n * granularity on the native side.\n */\n frameSizeSamples: number;\n}\n\n// ── Push Audio ──────────────────────────────────────────────────────────────\n\n/** Options passed to `pushPipelineAudio()` / `pushPipelineAudioSync()`. */\nexport interface PushPipelineAudioOptions {\n /** Base64-encoded PCM 16-bit signed LE audio data. */\n audio: string;\n /** Conversation turn identifier. */\n turnId: string;\n /** True if this is the first chunk of a new turn (resets jitter buffer). */\n isFirstChunk?: boolean;\n /** True if this is the final chunk of the current turn (marks end-of-stream). */\n isLastChunk?: boolean;\n}\n\n// ── Invalidate Turn ─────────────────────────────────────────────────────────\n\n/** Options passed to `invalidatePipelineTurn()`. */\nexport interface InvalidatePipelineTurnOptions {\n /** The new turn identifier — stale audio for the old turn is discarded. */\n turnId: string;\n}\n\n// ── State ───────────────────────────────────────────────────────────────────\n\n/**\n * Pipeline states reported via `PipelineStateChanged` events.\n *\n * - `idle` — connected but no audio flowing\n * - `connecting` — AudioTrack being created, focus being requested\n * - `streaming` — actively receiving and playing audio\n * - `draining` — end-of-stream marked, playing remaining buffer\n * - `error` — unrecoverable error (zombie, write failure, etc.)\n */\nexport type PipelineState =\n | 'idle'\n | 'connecting'\n | 'streaming'\n | 'draining'\n | 'error';\n\n// ── Events ──────────────────────────────────────────────────────────────────\n\n/** Payload for `PipelineStateChanged`. */\nexport interface PipelineStateChangedEvent {\n state: PipelineState;\n}\n\n/** Payload for `PipelinePlaybackStarted`. */\nexport interface PipelinePlaybackStartedEvent {\n turnId: string;\n}\n\n/** Payload for `PipelineError`. */\nexport interface PipelineErrorEvent {\n code: string;\n message: string;\n}\n\n/** Payload for `PipelineZombieDetected`. */\nexport interface PipelineZombieDetectedEvent {\n playbackHead: number;\n stalledMs: number;\n}\n\n/** Payload for `PipelineUnderrun`. */\nexport interface PipelineUnderrunEvent {\n count: number;\n}\n\n/** Payload for `PipelineDrained`. */\nexport interface PipelineDrainedEvent {\n turnId: string;\n}\n\n/** Payload for `PipelineAudioFocusLost` (empty — presence is the signal). */\nexport type PipelineAudioFocusLostEvent = Record<string, never>;\n\n/** Payload for `PipelineAudioFocusResumed` (empty — presence is the signal). */\nexport type PipelineAudioFocusResumedEvent = Record<string, never>;\n\n/** Payload for `PipelineFrequencyBands`. */\nexport interface PipelineFrequencyBandsEvent extends FrequencyBands {}\n\n/**\n * Map of all pipeline event names to their payload types.\n * Used with `Pipeline.subscribe<K>()` for type-safe event subscriptions.\n */\nexport interface PipelineEventMap {\n PipelineStateChanged: PipelineStateChangedEvent;\n PipelinePlaybackStarted: PipelinePlaybackStartedEvent;\n PipelineError: PipelineErrorEvent;\n PipelineZombieDetected: PipelineZombieDetectedEvent;\n PipelineUnderrun: PipelineUnderrunEvent;\n PipelineDrained: PipelineDrainedEvent;\n PipelineAudioFocusLost: PipelineAudioFocusLostEvent;\n PipelineAudioFocusResumed: PipelineAudioFocusResumedEvent;\n PipelineFrequencyBands: PipelineFrequencyBandsEvent;\n}\n\n/** Union of all pipeline event name strings. */\nexport type PipelineEventName = keyof PipelineEventMap;\n\n// ── Telemetry ───────────────────────────────────────────────────────────────\n\n/** Jitter buffer telemetry counters. */\nexport interface PipelineBufferTelemetry {\n /** Current buffer level in milliseconds. */\n bufferMs: number;\n /** Current buffer level in samples. */\n bufferSamples: number;\n /** Whether the priming gate has opened. */\n primed: boolean;\n /** Total samples written by the producer since last reset. */\n totalWritten: number;\n /** Total samples read by the consumer since last reset. */\n totalRead: number;\n /** Number of underrun events. */\n underrunCount: number;\n /** Peak buffer level in samples. */\n peakLevel: number;\n}\n\n/** Full pipeline telemetry snapshot. */\nexport interface PipelineTelemetry extends PipelineBufferTelemetry {\n /** Current pipeline state. */\n state: PipelineState;\n /** Total pushAudio/pushAudioSync calls since connect. */\n totalPushCalls: number;\n /** Total bytes pushed since connect. */\n totalPushBytes: number;\n /** Total write-loop iterations since connect. */\n totalWriteLoops: number;\n /** Current turn identifier. */\n turnId: string;\n}\n"]}
@@ -158,20 +158,45 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, Pipeline
158
158
 
159
159
  AsyncFunction("connectPipeline") { (options: [String: Any], promise: Promise) in
160
160
  do {
161
+ // Always ensure the session is set up (no-op if already initialized).
162
+ // The one-time guard inside ensureAudioSessionInitialized covers
163
+ // the mic-only path; we re-apply the category below every connect
164
+ // because audioMode may change between connects.
161
165
  if !self.isAudioSessionInitialized {
162
166
  try self.ensureAudioSessionInitialized()
163
167
  }
164
168
 
165
- // Parse playback mode from options to configure shared engine
166
- let playbackModeString = options["playbackMode"] as? String ?? "regular"
169
+ // Parse audioMode (default: "mixWithOthers")
170
+ let audioModeString = options["audioMode"] as? String ?? "mixWithOthers"
171
+ var categoryOptions: AVAudioSession.CategoryOptions =
172
+ [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP]
173
+ switch audioModeString {
174
+ case "mixWithOthers":
175
+ categoryOptions.insert(.mixWithOthers)
176
+ case "duckOthers":
177
+ categoryOptions.insert(.duckOthers)
178
+ case "doNotMix":
179
+ break // no additional option
180
+ default:
181
+ categoryOptions.insert(.mixWithOthers)
182
+ }
183
+
184
+ // Reconfigure the session category with the right mix options.
185
+ // Runtime category changes are supported on iOS.
186
+ let audioSession = AVAudioSession.sharedInstance()
187
+ try audioSession.setCategory(
188
+ .playAndRecord, mode: .videoChat, options: categoryOptions)
189
+ try audioSession.setActive(true)
190
+
191
+ // Parse playback mode from options to configure shared engine.
192
+ // Always use VP — this library is meant for mic+speaker combos.
193
+ let playbackModeString = options["playbackMode"] as? String ?? "conversation"
167
194
  let playbackMode: PlaybackMode
168
195
  switch playbackModeString {
169
196
  case "voiceProcessing":
170
197
  playbackMode = .voiceProcessing
171
- case "conversation":
172
- playbackMode = .conversation
173
198
  default:
174
- playbackMode = .regular
199
+ playbackMode = .conversation
175
200
  }
176
201
 
177
202
  // Configure shared engine (handles voice processing)
@@ -38,18 +38,14 @@ protocol SharedAudioEngineDelegate: AnyObject {
38
38
  /// Consumers attach their own AVAudioPlayerNode via `attachNode(_:format:)`.
39
39
  /// The mixer handles sample-rate conversion from each node's format to the
40
40
  /// hardware output format automatically.
41
- ///
42
- /// All public methods are serialized on an internal queue to prevent races
43
- /// between Expo async-function calls, notification handlers, and teardown.
44
41
  class SharedAudioEngine {
45
42
  private static let TAG = "SharedAudioEngine"
46
43
 
47
- /// Serial queue that protects all engine / node / state mutations.
48
- private let queue = DispatchQueue(label: "expo.audio.SharedAudioEngine")
44
+ private let lock = NSRecursiveLock()
49
45
 
50
46
  // ── Engine state ─────────────────────────────────────────────────────
51
47
  private(set) var engine: AVAudioEngine?
52
- private(set) var playbackMode: PlaybackMode = .regular
48
+ private(set) var playbackMode: PlaybackMode = .conversation
53
49
  private(set) var isConfigured = false
54
50
 
55
51
  /// All registered consumers receive route-change and interruption callbacks.
@@ -57,11 +53,17 @@ class SharedAudioEngine {
57
53
  private let delegates = NSHashTable<AnyObject>.weakObjects()
58
54
 
59
55
  func addDelegate(_ d: SharedAudioEngineDelegate) {
60
- queue.sync { delegates.add(d as AnyObject) }
56
+ lock.lock()
57
+ defer { lock.unlock() }
58
+ if !delegates.contains(d as AnyObject) {
59
+ delegates.add(d as AnyObject)
60
+ }
61
61
  }
62
62
 
63
63
  func removeDelegate(_ d: SharedAudioEngineDelegate) {
64
- queue.sync { delegates.remove(d as AnyObject) }
64
+ lock.lock()
65
+ defer { lock.unlock() }
66
+ delegates.remove(d as AnyObject)
65
67
  }
66
68
 
67
69
  private func notifyDelegates(_ block: (SharedAudioEngineDelegate) -> Void) {
@@ -90,10 +92,8 @@ class SharedAudioEngine {
90
92
  ///
91
93
  /// - Parameter playbackMode: Determines whether voice processing is enabled.
92
94
  func configure(playbackMode: PlaybackMode) throws {
93
- try queue.sync { try _configure(playbackMode: playbackMode) }
94
- }
95
-
96
- private func _configure(playbackMode: PlaybackMode) throws {
95
+ lock.lock()
96
+ defer { lock.unlock() }
97
97
  if isConfigured && self.playbackMode == playbackMode && engine?.isRunning == true {
98
98
  Logger.debug("[\(SharedAudioEngine.TAG)] Already configured for \(playbackMode) and engine running, skipping")
99
99
  return
@@ -105,7 +105,7 @@ class SharedAudioEngine {
105
105
 
106
106
  // Tear down existing engine (keeps attachedNodes info for re-attach)
107
107
  let previousNodes = attachedNodes
108
- _teardown()
108
+ teardown()
109
109
 
110
110
  Logger.debug("[\(SharedAudioEngine.TAG)] Configuring engine — playbackMode=\(playbackMode)")
111
111
 
@@ -119,12 +119,10 @@ class SharedAudioEngine {
119
119
  Logger.debug("[\(SharedAudioEngine.TAG)] Voice processing enabled")
120
120
  }
121
121
 
122
- // Force the output node (and implicitly the graph) to be created
123
- // before starting. inputNode/outputNode are lazyif neither is
124
- // accessed, the graph has zero nodes and Initialize crashes with
125
- // "inputNode != nullptr || outputNode != nullptr".
126
- // The VP path above already accesses both; this covers regular mode.
127
- _ = engine.mainMixerNode
122
+ // VP accesses inputNode/outputNode above, which creates the graph.
123
+ // Do NOT access mainMixerNode hereinserting the mixer after
124
+ // setVoiceProcessingEnabled disrupts VoiceProcessingIO's internal
125
+ // graph and causes scheduleBuffer completions to never fire.
128
126
 
129
127
  try engine.start()
130
128
 
@@ -142,7 +140,7 @@ class SharedAudioEngine {
142
140
 
143
141
  // Re-attach any nodes that were connected before reconfiguration
144
142
  for info in previousNodes {
145
- _attachNode(info.node, format: info.format)
143
+ attachNode(info.node, format: info.format)
146
144
  info.node.play()
147
145
  }
148
146
 
@@ -162,10 +160,8 @@ class SharedAudioEngine {
162
160
  /// Connects `node → mainMixerNode` with the given format.
163
161
  /// The mixer handles sample-rate conversion to hardware output.
164
162
  func attachNode(_ node: AVAudioPlayerNode, format: AVAudioFormat) {
165
- queue.sync { _attachNode(node, format: format) }
166
- }
167
-
168
- private func _attachNode(_ node: AVAudioPlayerNode, format: AVAudioFormat) {
163
+ lock.lock()
164
+ defer { lock.unlock() }
169
165
  guard let engine = engine else {
170
166
  Logger.debug("[\(SharedAudioEngine.TAG)] attachNode called but engine is nil")
171
167
  return
@@ -180,25 +176,23 @@ class SharedAudioEngine {
180
176
 
181
177
  /// Detach a consumer's player node from the shared engine.
182
178
  func detachNode(_ node: AVAudioPlayerNode) {
183
- queue.sync { _detachNode(node) }
184
- }
185
-
186
- private func _detachNode(_ node: AVAudioPlayerNode) {
179
+ lock.lock()
180
+ defer { lock.unlock() }
187
181
  guard let engine = engine else { return }
188
182
 
183
+ node.pause()
184
+ node.stop()
185
+
186
+ // Only disconnect/detach if the node is still attached to this engine.
187
+ // The node may already have been removed (e.g. engine died, concurrent
188
+ // teardown, or duplicate disconnect call).
189
189
  if node.engine === engine {
190
190
  engine.disconnectNodeOutput(node)
191
191
  engine.detach(node)
192
192
  }
193
193
  attachedNodes.removeAll { $0.node === node }
194
194
 
195
- // Stop the engine if no nodes remain — no reason to keep it running.
196
- if attachedNodes.isEmpty && engine.isRunning {
197
- engine.stop()
198
- Logger.debug("[\(SharedAudioEngine.TAG)] Node detached, engine stopped (no remaining nodes)")
199
- } else {
200
- Logger.debug("[\(SharedAudioEngine.TAG)] Node detached")
201
- }
195
+ Logger.debug("[\(SharedAudioEngine.TAG)] Node detached")
202
196
  }
203
197
 
204
198
  // ════════════════════════════════════════════════════════════════════
@@ -207,31 +201,21 @@ class SharedAudioEngine {
207
201
 
208
202
  /// Tear down the engine completely. Called on reconfigure or module destroy.
209
203
  func teardown() {
210
- queue.sync { _teardown() }
211
- }
212
-
213
- private func _teardown() {
204
+ lock.lock()
205
+ defer { lock.unlock() }
214
206
  // Remove observers
215
207
  NotificationCenter.default.removeObserver(
216
208
  self, name: AVAudioSession.routeChangeNotification, object: nil)
217
209
  NotificationCenter.default.removeObserver(
218
210
  self, name: AVAudioSession.interruptionNotification, object: nil)
219
211
 
220
- // Disable voice processing BEFORE stopping so the system begins
221
- // swapping VoiceProcessingIO back to RemoteIO while we clean up.
222
- // Without this, a new engine created immediately after teardown can
223
- // crash in Initialize (inputNode/outputNode both nil) because the
224
- // IO unit is still mid-swap.
225
- if playbackMode == .conversation || playbackMode == .voiceProcessing {
226
- if let engine = engine {
227
- try? engine.inputNode.setVoiceProcessingEnabled(false)
228
- try? engine.outputNode.setVoiceProcessingEnabled(false)
229
- }
230
- }
231
-
232
- engine?.stop()
212
+ // Detach all tracked nodes
233
213
  if let engine = engine {
234
214
  for info in attachedNodes {
215
+ info.node.pause()
216
+ info.node.stop()
217
+ // Guard against nodes already removed from engine (e.g. engine
218
+ // died or node was detached by a concurrent disconnect call).
235
219
  if info.node.engine === engine {
236
220
  engine.disconnectNodeOutput(info.node)
237
221
  engine.detach(info.node)
@@ -240,6 +224,15 @@ class SharedAudioEngine {
240
224
  }
241
225
  attachedNodes.removeAll()
242
226
 
227
+ // Disable voice processing before stopping
228
+ if playbackMode == .conversation || playbackMode == .voiceProcessing {
229
+ if let engine = engine {
230
+ try? engine.inputNode.setVoiceProcessingEnabled(false)
231
+ try? engine.outputNode.setVoiceProcessingEnabled(false)
232
+ }
233
+ }
234
+
235
+ engine?.stop()
243
236
  engine = nil
244
237
  isConfigured = false
245
238
 
@@ -260,12 +253,9 @@ class SharedAudioEngine {
260
253
  return
261
254
  }
262
255
 
263
- queue.async { [weak self] in
264
- self?._handleRouteChange(reason: reason)
265
- }
266
- }
256
+ lock.lock()
257
+ defer { lock.unlock() }
267
258
 
268
- private func _handleRouteChange(reason: AVAudioSession.RouteChangeReason) {
269
259
  let routeDescription = AVAudioSession.sharedInstance().currentRoute.outputs
270
260
  .map { "\($0.portName) (\($0.portType.rawValue))" }
271
261
  .joined(separator: ", ")
@@ -285,7 +275,14 @@ class SharedAudioEngine {
285
275
  // Suppress completion handlers from node.stop() re-entering the scheduling loop
286
276
  isRebuildingForRouteChange = true
287
277
 
288
- // 1. Stop engine
278
+ // 1. Stop all attached nodes (completion handlers fire but are gated)
279
+ for info in attachedNodes {
280
+ Logger.debug("[\(SharedAudioEngine.TAG)] Stopping node — isPlaying=\(info.node.isPlaying)")
281
+ info.node.pause()
282
+ info.node.stop()
283
+ }
284
+
285
+ // 2. Stop engine
289
286
  if engine.isRunning {
290
287
  engine.stop()
291
288
  Logger.debug("[\(SharedAudioEngine.TAG)] Engine stopped")
@@ -293,7 +290,7 @@ class SharedAudioEngine {
293
290
  Logger.debug("[\(SharedAudioEngine.TAG)] Engine was already stopped")
294
291
  }
295
292
 
296
- // 2. Detach all nodes
293
+ // 3. Detach all nodes
297
294
  for info in attachedNodes {
298
295
  if info.node.engine === engine {
299
296
  engine.disconnectNodeOutput(info.node)
@@ -302,7 +299,7 @@ class SharedAudioEngine {
302
299
  }
303
300
  Logger.debug("[\(SharedAudioEngine.TAG)] Nodes detached (\(attachedNodes.count))")
304
301
 
305
- // 3. Re-enable voice processing (resets after engine stop)
302
+ // 4. Re-enable voice processing (resets after engine stop)
306
303
  if playbackMode == .conversation || playbackMode == .voiceProcessing {
307
304
  do {
308
305
  try engine.inputNode.setVoiceProcessingEnabled(true)
@@ -312,14 +309,14 @@ class SharedAudioEngine {
312
309
  }
313
310
  }
314
311
 
315
- // 4. Re-attach all nodes
312
+ // 5. Re-attach all nodes
316
313
  for info in attachedNodes {
317
314
  engine.attach(info.node)
318
315
  engine.connect(info.node, to: engine.mainMixerNode, format: info.format)
319
316
  }
320
317
  Logger.debug("[\(SharedAudioEngine.TAG)] Nodes re-attached (\(attachedNodes.count))")
321
318
 
322
- // 5. Reactivate session and restart engine with retry.
319
+ // 6. Reactivate session and restart engine with retry.
323
320
  // Voice processing mode switches the underlying audio unit (RemoteIO ↔
324
321
  // VoiceProcessingIO). This swap completes asynchronously — if we call
325
322
  // engine.start() immediately, the engine appears to start (isRunning=true)
@@ -330,7 +327,7 @@ class SharedAudioEngine {
330
327
  ? [0.15, 0.3, 0.6] // 150ms, 300ms, 600ms pre-start delay for VP mode (+100ms post-start verify)
331
328
  : [0.0, 0.1, 0.25] // immediate, then backoff for non-VP (+50ms post-start verify)
332
329
 
333
- _attemptRestart(engine: engine, retryDelays: retryDelays, attempt: 0)
330
+ self.attemptRestart(engine: engine, retryDelays: retryDelays, attempt: 0)
334
331
 
335
332
  case .categoryChange:
336
333
  Logger.debug("[\(SharedAudioEngine.TAG)] Audio session category changed")
@@ -343,14 +340,12 @@ class SharedAudioEngine {
343
340
  /// is truly running and nodes are playing before declaring success.
344
341
  /// On final failure, falls back to a full rebuild. If that also fails,
345
342
  /// tears down everything and notifies delegates via `engineDidDie`.
346
- ///
347
- /// Must be called on `queue`.
348
- private func _attemptRestart(engine: AVAudioEngine, retryDelays: [TimeInterval], attempt: Int) {
343
+ private func attemptRestart(engine: AVAudioEngine, retryDelays: [TimeInterval], attempt: Int) {
349
344
  guard attempt < retryDelays.count else {
350
345
  // Exhausted in-place retries — try a full rebuild as last resort
351
346
  Logger.debug("[\(SharedAudioEngine.TAG)] All \(retryDelays.count) restart attempts failed — attempting full rebuild")
352
347
  isRebuildingForRouteChange = false
353
- _rebuildEngine()
348
+ rebuildEngine()
354
349
  return
355
350
  }
356
351
 
@@ -381,7 +376,7 @@ class SharedAudioEngine {
381
376
  }
382
377
  } catch {
383
378
  Logger.debug("[\(SharedAudioEngine.TAG)] engine.start() threw on attempt \(attempt + 1): \(error)")
384
- self._attemptRestart(engine: engine, retryDelays: retryDelays, attempt: attempt + 1)
379
+ self.attemptRestart(engine: engine, retryDelays: retryDelays, attempt: attempt + 1)
385
380
  return
386
381
  }
387
382
 
@@ -400,14 +395,14 @@ class SharedAudioEngine {
400
395
  // Failed immediately — no point waiting, retry now
401
396
  Logger.debug("[\(SharedAudioEngine.TAG)] Restart attempt \(attempt + 1) failed immediately")
402
397
  if engine.isRunning { engine.stop() }
403
- self._attemptRestart(engine: engine, retryDelays: retryDelays, attempt: attempt + 1)
398
+ self.attemptRestart(engine: engine, retryDelays: retryDelays, attempt: attempt + 1)
404
399
  return
405
400
  }
406
401
 
407
402
  // Voice processing can cause the engine to die asynchronously after
408
- // appearing to start. Wait then re-verify before declaring success.
403
+ // appearing to start. Wait 100ms then re-verify before declaring success.
409
404
  let verifyDelay: TimeInterval = (self.playbackMode == .conversation || self.playbackMode == .voiceProcessing) ? 0.1 : 0.05
410
- self.queue.asyncAfter(deadline: .now() + verifyDelay) { [weak self] in
405
+ DispatchQueue.main.asyncAfter(deadline: .now() + verifyDelay) { [weak self] in
411
406
  guard let self = self, let engine = self.engine else {
412
407
  self?.isRebuildingForRouteChange = false
413
408
  return
@@ -436,9 +431,9 @@ class SharedAudioEngine {
436
431
  if isVP {
437
432
  Logger.debug("[\(SharedAudioEngine.TAG)] VP mode — skipping remaining in-place retries, going to full rebuild")
438
433
  self.isRebuildingForRouteChange = false
439
- self._rebuildEngine()
434
+ self.rebuildEngine()
440
435
  } else {
441
- self._attemptRestart(engine: engine, retryDelays: retryDelays, attempt: attempt + 1)
436
+ self.attemptRestart(engine: engine, retryDelays: retryDelays, attempt: attempt + 1)
442
437
  }
443
438
  }
444
439
  }
@@ -446,7 +441,7 @@ class SharedAudioEngine {
446
441
 
447
442
  if delay > 0 {
448
443
  Logger.debug("[\(SharedAudioEngine.TAG)] Waiting \(Int(delay * 1000))ms before attempt \(attempt + 1)")
449
- queue.asyncAfter(deadline: .now() + delay, execute: work)
444
+ DispatchQueue.main.asyncAfter(deadline: .now() + delay, execute: work)
450
445
  } else {
451
446
  work()
452
447
  }
@@ -459,17 +454,15 @@ class SharedAudioEngine {
459
454
  ///
460
455
  /// If this also fails, declare the engine dead, tear down all state, and
461
456
  /// notify delegates so they can report the failure to JS.
462
- ///
463
- /// Must be called on `queue`.
464
- private func _rebuildEngine() {
457
+ private func rebuildEngine() {
465
458
  Logger.debug("[\(SharedAudioEngine.TAG)] rebuildEngine — creating fresh engine (old nodes will NOT be reused)")
466
459
  let savedMode = playbackMode
467
460
 
468
461
  // Full teardown (clears attachedNodes, stops engine, nils it)
469
- _teardown()
462
+ teardown()
470
463
 
471
464
  do {
472
- try _configure(playbackMode: savedMode)
465
+ try configure(playbackMode: savedMode)
473
466
  // Do NOT re-attach old nodes. The VP IO swap can leave old
474
467
  // AVAudioPlayerNode instances in a broken state. Delegates must
475
468
  // create fresh nodes in their engineDidRebuild() callback.
@@ -478,7 +471,7 @@ class SharedAudioEngine {
478
471
  } catch {
479
472
  Logger.debug("[\(SharedAudioEngine.TAG)] rebuildEngine FAILED — engine is dead: \(error)")
480
473
  // Ensure everything is torn down so a future connect() starts clean
481
- _teardown()
474
+ teardown()
482
475
  let reason = "Route change recovery failed after all retries: \(error.localizedDescription)"
483
476
  notifyDelegates { $0.engineDidDie(reason: reason) }
484
477
  }
@@ -493,12 +486,9 @@ class SharedAudioEngine {
493
486
  let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
494
487
  let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { return }
495
488
 
496
- queue.async { [weak self] in
497
- self?._handleInterruption(type: type)
498
- }
499
- }
489
+ lock.lock()
490
+ defer { lock.unlock() }
500
491
 
501
- private func _handleInterruption(type: AVAudioSession.InterruptionType) {
502
492
  if type == .began {
503
493
  Logger.debug("[\(SharedAudioEngine.TAG)] Audio session interruption began")
504
494
  notifyDelegates { $0.audioSessionInterruptionBegan() }
@@ -521,6 +511,6 @@ class SharedAudioEngine {
521
511
  }
522
512
 
523
513
  deinit {
524
- _teardown()
514
+ teardown()
525
515
  }
526
516
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@edkimmel/expo-audio-stream",
3
- "version": "0.4.0",
3
+ "version": "0.4.2",
4
4
  "description": "Expo Play Audio Stream module",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",
@@ -6,6 +6,18 @@ import { PlaybackMode, FrequencyBandConfig, FrequencyBands } from "../types";
6
6
 
7
7
  // ── Connect ─────────────────────────────────────────────────────────────────
8
8
 
9
+ /**
10
+ * How the pipeline's playback should coexist with other audio on the device.
11
+ *
12
+ * - `'mixWithOthers'` (default): plays alongside other apps without
13
+ * interrupting them. On Android no audio focus is requested. Best for
14
+ * sound effects and short clips.
15
+ * - `'duckOthers'`: requests audio focus with ducking. Other apps lower
16
+ * their volume but keep playing.
17
+ * - `'doNotMix'`: requests exclusive audio focus. Other apps pause.
18
+ */
19
+ export type PipelineAudioMode = 'mixWithOthers' | 'duckOthers' | 'doNotMix';
20
+
9
21
  /** Options passed to `connectPipeline()`. */
10
22
  export interface ConnectPipelineOptions {
11
23
  /** Sample rate in Hz (default 24000). */
@@ -25,6 +37,15 @@ export interface ConnectPipelineOptions {
25
37
  frequencyBandIntervalMs?: number;
26
38
  /** Optional frequency band crossover configuration. */
27
39
  frequencyBandConfig?: FrequencyBandConfig;
40
+ /**
41
+ * How pipeline playback should coexist with other apps' audio.
42
+ * Default is `'mixWithOthers'` (matches expo-audio).
43
+ *
44
+ * Note: this is a **behavior change** vs. prior versions of this library,
45
+ * which effectively used `'doNotMix'`. Pass `'doNotMix'` explicitly to
46
+ * preserve that old behavior.
47
+ */
48
+ audioMode?: PipelineAudioMode;
28
49
  }
29
50
 
30
51
  /** Result returned from a successful `connectPipeline()` call. */