@edkimmel/expo-audio-stream 0.3.3 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,17 +3,14 @@ import AVFoundation
3
3
  import ExpoModulesCore
4
4
 
5
5
  let audioDataEvent: String = "AudioData"
6
- let soundIsPlayedEvent: String = "SoundChunkPlayed"
7
- let soundIsStartedEvent: String = "SoundStarted"
8
6
  let deviceReconnectedEvent: String = "DeviceReconnected"
9
7
 
10
8
 
11
- public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPlayerDelegate, PipelineEventSender {
9
+ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, PipelineEventSender {
12
10
  private var _microphone: Microphone?
13
- private var _soundPlayer: SoundPlayer?
14
11
  private var _pipelineIntegration: PipelineIntegration?
15
12
 
16
- /// Single shared AVAudioEngine used by both SoundPlayer and AudioPipeline.
13
+ /// Single shared AVAudioEngine used by AudioPipeline.
17
14
  private let sharedAudioEngine = SharedAudioEngine()
18
15
 
19
16
  private var microphone: Microphone {
@@ -24,15 +21,6 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
24
21
  return _microphone!
25
22
  }
26
23
 
27
- private var soundPlayer: SoundPlayer {
28
- if _soundPlayer == nil {
29
- _soundPlayer = SoundPlayer()
30
- _soundPlayer?.delegate = self
31
- _soundPlayer?.setSharedEngine(sharedAudioEngine)
32
- }
33
- return _soundPlayer!
34
- }
35
-
36
24
  private var pipelineIntegration: PipelineIntegration {
37
25
  if _pipelineIntegration == nil {
38
26
  _pipelineIntegration = PipelineIntegration(eventSender: self, sharedEngine: sharedAudioEngine)
@@ -53,8 +41,6 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
53
41
  // Defines event names that the module can send to JavaScript.
54
42
  Events([
55
43
  audioDataEvent,
56
- soundIsPlayedEvent,
57
- soundIsStartedEvent,
58
44
  deviceReconnectedEvent,
59
45
  PipelineIntegration.EVENT_STATE_CHANGED,
60
46
  PipelineIntegration.EVENT_PLAYBACK_STARTED,
@@ -67,7 +53,7 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
67
53
  PipelineIntegration.EVENT_FREQUENCY_BANDS,
68
54
  ])
69
55
 
70
- Function("destroy") {
56
+ AsyncFunction("destroy") { (promise: Promise) in
71
57
  self._pipelineIntegration?.destroy()
72
58
  self._pipelineIntegration = nil
73
59
  self.sharedAudioEngine.teardown()
@@ -77,7 +63,7 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
77
63
  self.isAudioSessionInitialized = false
78
64
  }
79
65
  self._microphone = nil
80
- self._soundPlayer = nil
66
+ promise.resolve(nil)
81
67
  }
82
68
 
83
69
  /// Prompts the user to select the microphone mode.
@@ -108,55 +94,12 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
108
94
  ])
109
95
  }
110
96
 
111
- AsyncFunction("playSound") { (base64Chunk: String, turnId: String, encoding: String?, promise: Promise) in
112
- Logger.debug("Play sound")
113
- do {
114
- if !isAudioSessionInitialized {
115
- try ensureAudioSessionInitialized()
116
- }
117
-
118
- // Ensure shared engine is configured (playSound may be called without setSoundConfig)
119
- if !self.sharedAudioEngine.isConfigured {
120
- try self.sharedAudioEngine.configure(playbackMode: .regular)
121
- self.sharedAudioEngine.addDelegate(self.soundPlayer)
122
- }
123
-
124
- // Determine the audio format based on the encoding parameter
125
- let commonFormat: AVAudioCommonFormat
126
- switch encoding {
127
- case "pcm_f32le":
128
- commonFormat = .pcmFormatFloat32
129
- case "pcm_s16le", nil:
130
- commonFormat = .pcmFormatInt16
131
- default:
132
- Logger.debug("[ExpoPlayAudioStreamModule] Unsupported encoding: \(encoding ?? "nil"), defaulting to PCM_S16LE")
133
- commonFormat = .pcmFormatInt16
134
- }
135
-
136
- try soundPlayer.play(audioChunk: base64Chunk, turnId: turnId, resolver: {
137
- _ in promise.resolve(nil)
138
- }, rejecter: {code, message, error in
139
- promise.reject(code ?? "ERR_UNKNOWN", message ?? "Unknown error")
140
- }, commonFormat: commonFormat)
141
- } catch {
142
- print("Error enqueuing audio: \(error.localizedDescription)")
143
- }
144
- }
145
-
146
- AsyncFunction("stopSound") { (promise: Promise) in
147
- soundPlayer.stop(promise)
148
- }
149
-
150
- AsyncFunction("clearSoundQueueByTurnId") { (turnId: String, promise: Promise) in
151
- soundPlayer.clearSoundQueue(turnIdToClear: turnId, resolver: promise)
152
- }
153
-
154
97
  AsyncFunction("startMicrophone") { (options: [String: Any], promise: Promise) in
155
98
  // Create recording settings
156
99
  // Extract settings from provided options, using default values if necessary
157
- let sampleRate = options["sampleRate"] as? Double ?? 16000.0 // it fails if not 48000, why?
158
- let numberOfChannels = options["channelConfig"] as? Int ?? 1 // Mono channel configuration
159
- let bitDepth = options["audioFormat"] as? Int ?? 16 // 16bits
100
+ let sampleRate = options["sampleRate"] as? Double ?? 16000.0
101
+ let numberOfChannels = options["channelConfig"] as? Int ?? 1
102
+ let bitDepth = options["audioFormat"] as? Int ?? 16
160
103
  let interval = options["interval"] as? Int ?? 1000
161
104
 
162
105
  let fbConfigDict = options["frequencyBandConfig"] as? [String: Any]
@@ -203,11 +146,6 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
203
146
  }
204
147
  }
205
148
 
206
- /// Stops the microphone recording and releases associated resources
207
- /// - Parameter promise: A promise to resolve when microphone recording is stopped
208
- /// - Note: This method stops the active recording session, processes any remaining audio data,
209
- /// and releases hardware resources. It should be called when the app no longer needs
210
- /// microphone access to conserve battery and system resources.
211
149
  AsyncFunction("stopMicrophone") { (promise: Promise) in
212
150
  microphone.stopRecording(resolver: promise)
213
151
  }
@@ -216,62 +154,6 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
216
154
  microphone.toggleSilence(isSilent: isSilent)
217
155
  }
218
156
 
219
- /// Sets the sound player configuration
220
- /// - Parameters:
221
- /// - config: A dictionary containing configuration options:
222
- /// - `sampleRate`: The sample rate for audio playback (default is 16000.0).
223
- /// - `playbackMode`: The playback mode ("regular", "voiceProcessing", or "conversation").
224
- /// - `useDefault`: When true, resets to default configuration regardless of other parameters.
225
- /// - promise: A promise to resolve when configuration is updated or reject with an error.
226
- AsyncFunction("setSoundConfig") { (config: [String: Any], promise: Promise) in
227
- // Check if we should use default configuration
228
- let useDefault = config["useDefault"] as? Bool ?? false
229
-
230
- do {
231
- if !isAudioSessionInitialized {
232
- try ensureAudioSessionInitialized()
233
- }
234
-
235
- if useDefault {
236
- // Reset to default configuration — configure engine for regular mode
237
- Logger.debug("[ExpoPlayAudioStreamModule] Resetting sound configuration to default values")
238
- try self.sharedAudioEngine.configure(playbackMode: .regular)
239
- self.sharedAudioEngine.addDelegate(self.soundPlayer)
240
- try soundPlayer.resetConfigToDefault()
241
- } else {
242
- // Extract configuration values from the provided dictionary
243
- let sampleRate = config["sampleRate"] as? Double ?? 16000.0
244
- let playbackModeString = config["playbackMode"] as? String ?? "regular"
245
-
246
- // Convert string playback mode to enum
247
- let playbackMode: PlaybackMode
248
- switch playbackModeString {
249
- case "voiceProcessing":
250
- playbackMode = .voiceProcessing
251
- case "conversation":
252
- playbackMode = .conversation
253
- default:
254
- playbackMode = .regular
255
- }
256
-
257
- // Configure shared engine first (handles voice processing)
258
- try self.sharedAudioEngine.configure(playbackMode: playbackMode)
259
- self.sharedAudioEngine.addDelegate(self.soundPlayer)
260
-
261
- // Create a new SoundConfig object
262
- let soundConfig = SoundConfig(sampleRate: sampleRate, playbackMode: playbackMode)
263
-
264
- // Update the sound player configuration (attaches node to shared engine)
265
- Logger.debug("[ExpoPlayAudioStreamModule] Setting sound configuration - sampleRate: \(sampleRate), playbackMode: \(playbackModeString)")
266
- try soundPlayer.updateConfig(soundConfig)
267
- }
268
-
269
- promise.resolve(nil)
270
- } catch {
271
- promise.reject("ERROR_CONFIG_UPDATE", "Failed to set sound configuration: \(error.localizedDescription)")
272
- }
273
- }
274
-
275
157
  // ── Pipeline functions ────────────────────────────────────────────
276
158
 
277
159
  AsyncFunction("connectPipeline") { (options: [String: Any], promise: Promise) in
@@ -280,16 +162,15 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
280
162
  try self.ensureAudioSessionInitialized()
281
163
  }
282
164
 
283
- // Parse playback mode from options to configure shared engine
284
- let playbackModeString = options["playbackMode"] as? String ?? "regular"
165
+ // Parse playback mode from options to configure shared engine.
166
+ // Always use VP this library is meant for mic+speaker combos.
167
+ let playbackModeString = options["playbackMode"] as? String ?? "conversation"
285
168
  let playbackMode: PlaybackMode
286
169
  switch playbackModeString {
287
170
  case "voiceProcessing":
288
171
  playbackMode = .voiceProcessing
289
- case "conversation":
290
- playbackMode = .conversation
291
172
  default:
292
- playbackMode = .regular
173
+ playbackMode = .conversation
293
174
  }
294
175
 
295
176
  // Configure shared engine (handles voice processing)
@@ -352,10 +233,8 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
352
233
  options: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP])
353
234
  if let settings = recordingSettings {
354
235
  try audioSession.setPreferredSampleRate(settings.sampleRate)
355
- // Set IO buffer duration short enough to support the desired emission interval.
356
- // Use the hardware sample rate (not the desired rate) since this is a hardware-level setting.
357
236
  let hwSampleRate = audioSession.sampleRate > 0 ? audioSession.sampleRate : 48000.0
358
- let preferredDuration = 512.0 / hwSampleRate // ~10.7ms at 48kHz
237
+ let preferredDuration = 512.0 / hwSampleRate
359
238
  try audioSession.setPreferredIOBufferDuration(preferredDuration)
360
239
  }
361
240
  try audioSession.setActive(true)
@@ -395,7 +274,6 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
395
274
 
396
275
  func onMicrophoneData(_ microphoneData: Data, _ soundLevel: Float?, _ frequencyBands: FrequencyBands?) {
397
276
  let encodedData = microphoneData.base64EncodedString()
398
- // Construct the event payload similar to Android
399
277
  var eventBody: [String: Any] = [
400
278
  "fileUri": "",
401
279
  "lastEmittedSize": 0,
@@ -413,7 +291,6 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
413
291
  "high": bands.high
414
292
  ]
415
293
  }
416
- // Emit the event to JavaScript
417
294
  sendEvent(audioDataEvent, eventBody)
418
295
  }
419
296
 
@@ -441,12 +318,4 @@ public class ExpoPlayAudioStreamModule: Module, MicrophoneDataDelegate, SoundPla
441
318
 
442
319
  sendEvent(deviceReconnectedEvent, ["reason": reasonString])
443
320
  }
444
-
445
- func onSoundChunkPlayed(_ isFinal: Bool) {
446
- sendEvent(soundIsPlayedEvent, ["isFinal": isFinal])
447
- }
448
-
449
- func onSoundStartedPlaying() {
450
- sendEvent(soundIsStartedEvent)
451
- }
452
321
  }
@@ -1,7 +1,7 @@
1
1
  import AVFoundation
2
2
 
3
3
  /// Delegate for receiving engine lifecycle events.
4
- /// Both SoundPlayer and AudioPipeline implement this
4
+ /// Both AudioPipeline consumers implement this
5
5
  /// to handle route changes and interruptions.
6
6
  protocol SharedAudioEngineDelegate: AnyObject {
7
7
  /// Called after the engine has been restarted due to a route change.
@@ -27,7 +27,7 @@ protocol SharedAudioEngineDelegate: AnyObject {
27
27
  func engineDidDie(reason: String)
28
28
  }
29
29
 
30
- /// Owns the single AVAudioEngine shared between SoundPlayer and AudioPipeline.
30
+ /// Owns the single AVAudioEngine shared between AudioPipeline consumers.
31
31
  ///
32
32
  /// Responsibilities:
33
33
  /// - Engine lifecycle (create, start, stop, teardown)
@@ -41,9 +41,11 @@ protocol SharedAudioEngineDelegate: AnyObject {
41
41
  class SharedAudioEngine {
42
42
  private static let TAG = "SharedAudioEngine"
43
43
 
44
+ private let lock = NSRecursiveLock()
45
+
44
46
  // ── Engine state ─────────────────────────────────────────────────────
45
47
  private(set) var engine: AVAudioEngine?
46
- private(set) var playbackMode: PlaybackMode = .regular
48
+ private(set) var playbackMode: PlaybackMode = .conversation
47
49
  private(set) var isConfigured = false
48
50
 
49
51
  /// All registered consumers receive route-change and interruption callbacks.
@@ -51,12 +53,16 @@ class SharedAudioEngine {
51
53
  private let delegates = NSHashTable<AnyObject>.weakObjects()
52
54
 
53
55
  func addDelegate(_ d: SharedAudioEngineDelegate) {
56
+ lock.lock()
57
+ defer { lock.unlock() }
54
58
  if !delegates.contains(d as AnyObject) {
55
59
  delegates.add(d as AnyObject)
56
60
  }
57
61
  }
58
62
 
59
63
  func removeDelegate(_ d: SharedAudioEngineDelegate) {
64
+ lock.lock()
65
+ defer { lock.unlock() }
60
66
  delegates.remove(d as AnyObject)
61
67
  }
62
68
 
@@ -86,6 +92,8 @@ class SharedAudioEngine {
86
92
  ///
87
93
  /// - Parameter playbackMode: Determines whether voice processing is enabled.
88
94
  func configure(playbackMode: PlaybackMode) throws {
95
+ lock.lock()
96
+ defer { lock.unlock() }
89
97
  if isConfigured && self.playbackMode == playbackMode && engine?.isRunning == true {
90
98
  Logger.debug("[\(SharedAudioEngine.TAG)] Already configured for \(playbackMode) and engine running, skipping")
91
99
  return
@@ -111,10 +119,10 @@ class SharedAudioEngine {
111
119
  Logger.debug("[\(SharedAudioEngine.TAG)] Voice processing enabled")
112
120
  }
113
121
 
114
- // Do NOT explicitly connect mainMixerNode outputNode.
115
- // The engine auto-negotiates the hardware format for that hop,
116
- // avoiding IsFormatSampleRateAndChannelCountValid crashes when
117
- // the consumer's format doesn't match the hardware sample rate.
122
+ // VP accesses inputNode/outputNode above, which creates the graph.
123
+ // Do NOT access mainMixerNode here inserting the mixer after
124
+ // setVoiceProcessingEnabled disrupts VoiceProcessingIO's internal
125
+ // graph and causes scheduleBuffer completions to never fire.
118
126
 
119
127
  try engine.start()
120
128
 
@@ -152,6 +160,8 @@ class SharedAudioEngine {
152
160
  /// Connects `node → mainMixerNode` with the given format.
153
161
  /// The mixer handles sample-rate conversion to hardware output.
154
162
  func attachNode(_ node: AVAudioPlayerNode, format: AVAudioFormat) {
163
+ lock.lock()
164
+ defer { lock.unlock() }
155
165
  guard let engine = engine else {
156
166
  Logger.debug("[\(SharedAudioEngine.TAG)] attachNode called but engine is nil")
157
167
  return
@@ -166,6 +176,8 @@ class SharedAudioEngine {
166
176
 
167
177
  /// Detach a consumer's player node from the shared engine.
168
178
  func detachNode(_ node: AVAudioPlayerNode) {
179
+ lock.lock()
180
+ defer { lock.unlock() }
169
181
  guard let engine = engine else { return }
170
182
 
171
183
  node.pause()
@@ -189,6 +201,8 @@ class SharedAudioEngine {
189
201
 
190
202
  /// Tear down the engine completely. Called on reconfigure or module destroy.
191
203
  func teardown() {
204
+ lock.lock()
205
+ defer { lock.unlock() }
192
206
  // Remove observers
193
207
  NotificationCenter.default.removeObserver(
194
208
  self, name: AVAudioSession.routeChangeNotification, object: nil)
@@ -239,6 +253,9 @@ class SharedAudioEngine {
239
253
  return
240
254
  }
241
255
 
256
+ lock.lock()
257
+ defer { lock.unlock() }
258
+
242
259
  let routeDescription = AVAudioSession.sharedInstance().currentRoute.outputs
243
260
  .map { "\($0.portName) (\($0.portType.rawValue))" }
244
261
  .joined(separator: ", ")
@@ -469,6 +486,9 @@ class SharedAudioEngine {
469
486
  let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
470
487
  let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { return }
471
488
 
489
+ lock.lock()
490
+ defer { lock.unlock() }
491
+
472
492
  if type == .began {
473
493
  Logger.debug("[\(SharedAudioEngine.TAG)] Audio session interruption began")
474
494
  notifyDelegates { $0.audioSessionInterruptionBegan() }
@@ -2,44 +2,10 @@
2
2
  public enum PlaybackMode {
3
3
  /// Regular playback mode for standard audio playback
4
4
  case regular
5
-
5
+
6
6
  /// Conversation mode optimized for speech
7
7
  case conversation
8
-
8
+
9
9
  /// Voice processing mode with enhanced voice quality and automatic engine cleanup
10
10
  case voiceProcessing
11
11
  }
12
-
13
- /// Configuration for audio playback settings
14
- public struct SoundConfig {
15
- /// The sample rate for audio playback in Hz
16
- public var sampleRate: Double
17
-
18
- /// The playback mode (regular, conversation, or voiceProcessing)
19
- public var playbackMode: PlaybackMode
20
-
21
- /// Default configuration with standard settings
22
- public static let defaultConfig = SoundConfig(
23
- sampleRate: 44100.0,
24
- playbackMode: .regular
25
- )
26
-
27
- /// Creates a new sound configuration with the specified settings
28
- /// - Parameters:
29
- /// - sampleRate: The sample rate in Hz (default: 44100.0)
30
- /// - playbackMode: The playback mode (default: .regular)
31
- public init(
32
- sampleRate: Double = 44100.0,
33
- playbackMode: PlaybackMode = .regular
34
- ) {
35
- self.sampleRate = sampleRate
36
- self.playbackMode = playbackMode
37
- }
38
-
39
- /// Resets the configuration to default values
40
- /// - Returns: The updated configuration with default values
41
- public mutating func resetToDefault() -> SoundConfig {
42
- self = SoundConfig.defaultConfig
43
- return self
44
- }
45
- }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@edkimmel/expo-audio-stream",
3
- "version": "0.3.3",
3
+ "version": "0.4.1",
4
4
  "description": "Expo Play Audio Stream module",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",
package/src/events.ts CHANGED
@@ -23,10 +23,6 @@ export interface AudioEventPayload {
23
23
  frequencyBands?: { low: number; mid: number; high: number };
24
24
  }
25
25
 
26
- export type SoundChunkPlayedEventPayload = {
27
- isFinal: boolean;
28
- };
29
-
30
26
  export const DeviceReconnectedReasons = {
31
27
  newDeviceAvailable: "newDeviceAvailable",
32
28
  oldDeviceUnavailable: "oldDeviceUnavailable",
@@ -42,8 +38,6 @@ export type DeviceReconnectedEventPayload = {
42
38
 
43
39
  export const AudioEvents = {
44
40
  AudioData: "AudioData",
45
- SoundChunkPlayed: "SoundChunkPlayed",
46
- SoundStarted: "SoundStarted",
47
41
  DeviceReconnected: "DeviceReconnected",
48
42
  };
49
43
 
@@ -53,12 +47,6 @@ export function addAudioEventListener(
53
47
  return (emitter as any).addListener("AudioData", listener);
54
48
  }
55
49
 
56
- export function addSoundChunkPlayedListener(
57
- listener: (event: SoundChunkPlayedEventPayload) => Promise<void>
58
- ): EventSubscription {
59
- return (emitter as any).addListener("SoundChunkPlayed", listener);
60
- }
61
-
62
50
  export function subscribeToEvent<T extends unknown>(
63
51
  eventName: string,
64
52
  listener: (event: T | undefined) => Promise<void>
package/src/index.ts CHANGED
@@ -8,7 +8,6 @@ import {
8
8
  AudioRecording,
9
9
  RecordingConfig,
10
10
  StartRecordingResult,
11
- SoundConfig,
12
11
  PlaybackMode,
13
12
  Encoding,
14
13
  EncodingTypes,
@@ -31,82 +30,21 @@ import {
31
30
 
32
31
  import {
33
32
  addAudioEventListener,
34
- addSoundChunkPlayedListener,
35
33
  AudioEventPayload,
36
- SoundChunkPlayedEventPayload,
37
34
  AudioEvents,
38
35
  subscribeToEvent,
39
36
  DeviceReconnectedReason,
40
37
  DeviceReconnectedEventPayload,
41
38
  } from "./events";
42
39
 
43
- const SuspendSoundEventTurnId = "suspend-sound-events";
44
-
45
40
  export class ExpoPlayAudioStream {
46
41
  /**
47
42
  * Destroys the audio stream module, cleaning up all resources.
48
43
  * This should be called when the module is no longer needed.
49
44
  * It will reset all internal state and release audio resources.
50
45
  */
51
- static destroy() {
52
- ExpoPlayAudioStreamModule.destroy();
53
- }
54
-
55
- /**
56
- * @deprecated Use the `Pipeline` class for more efficient audio streaming with better error handling and telemetry.
57
- * Plays a sound.
58
- * @param {string} audio - The audio to play.
59
- * @param {string} turnId - The turn ID.
60
- * @param {string} [encoding] - The encoding format of the audio data ('pcm_f32le' or 'pcm_s16le').
61
- * @returns {Promise<void>}
62
- * @throws {Error} If the sound fails to play.
63
- */
64
- static async playSound(
65
- audio: string,
66
- turnId: string,
67
- encoding?: Encoding
68
- ): Promise<void> {
69
- try {
70
- await ExpoPlayAudioStreamModule.playSound(
71
- audio,
72
- turnId,
73
- encoding ?? EncodingTypes.PCM_S16LE
74
- );
75
- } catch (error) {
76
- console.error(error);
77
- throw new Error(`Failed to enqueue audio: ${error}`);
78
- }
79
- }
80
-
81
- /**
82
- * @deprecated Use the `Pipeline` class for more efficient audio streaming with better error handling and telemetry.
83
- * Stops the currently playing sound.
84
- * @returns {Promise<void>}
85
- * @throws {Error} If the sound fails to stop.
86
- */
87
- static async stopSound(): Promise<void> {
88
- try {
89
- await ExpoPlayAudioStreamModule.stopSound();
90
- } catch (error) {
91
- console.error(error);
92
- throw new Error(`Failed to stop enqueued audio: ${error}`);
93
- }
94
- }
95
-
96
- /**
97
- * @deprecated Use the `Pipeline` class for more efficient audio streaming with better error handling and telemetry.
98
- * Clears the sound queue by turn ID.
99
- * @param {string} turnId - The turn ID.
100
- * @returns {Promise<void>}
101
- * @throws {Error} If the sound queue fails to clear.
102
- */
103
- static async clearSoundQueueByTurnId(turnId: string): Promise<void> {
104
- try {
105
- await ExpoPlayAudioStreamModule.clearSoundQueueByTurnId(turnId);
106
- } catch (error) {
107
- console.error(error);
108
- throw new Error(`Failed to clear sound queue: ${error}`);
109
- }
46
+ static async destroy() {
47
+ await ExpoPlayAudioStreamModule.destroy();
110
48
  }
111
49
 
112
50
  /**
@@ -181,15 +119,7 @@ export class ExpoPlayAudioStream {
181
119
  /**
182
120
  * Subscribes to audio events emitted during recording/streaming.
183
121
  * @param onMicrophoneStream - Callback function that will be called when audio data is received.
184
- * The callback receives an AudioDataEvent containing:
185
- * - data: Base64 encoded audio data at original sample rate
186
- * - data16kHz: Optional base64 encoded audio data resampled to 16kHz
187
- * - position: Current position in the audio stream
188
- * - fileUri: URI of the recording file
189
- * - eventDataSize: Size of the current audio data chunk
190
- * - totalSize: Total size of recorded audio so far
191
122
  * @returns {Subscription} A subscription object that can be used to unsubscribe from the events
192
- * @throws {Error} If encoded audio data is missing from the event
193
123
  */
194
124
  static subscribeToAudioEvents(
195
125
  onMicrophoneStream: (event: AudioDataEvent) => Promise<void>
@@ -213,18 +143,6 @@ export class ExpoPlayAudioStream {
213
143
  });
214
144
  }
215
145
 
216
- /**
217
- * Subscribes to events emitted when a sound chunk has finished playing.
218
- * @param onSoundChunkPlayed - Callback function that will be called when a sound chunk is played.
219
- * The callback receives a SoundChunkPlayedEventPayload indicating if this was the final chunk.
220
- * @returns {Subscription} A subscription object that can be used to unsubscribe from the events.
221
- */
222
- static subscribeToSoundChunkPlayed(
223
- onSoundChunkPlayed: (event: SoundChunkPlayedEventPayload) => Promise<void>
224
- ): Subscription {
225
- return addSoundChunkPlayedListener(onSoundChunkPlayed);
226
- }
227
-
228
146
  /**
229
147
  * Subscribes to events emitted by the audio stream module, for advanced use cases.
230
148
  * @param eventName - The name of the event to subscribe to.
@@ -238,21 +156,6 @@ export class ExpoPlayAudioStream {
238
156
  return subscribeToEvent(eventName, onEvent);
239
157
  }
240
158
 
241
- /**
242
- * Sets the sound player configuration.
243
- * @param {SoundConfig} config - Configuration options for the sound player.
244
- * @returns {Promise<void>}
245
- * @throws {Error} If the configuration fails to update.
246
- */
247
- static async setSoundConfig(config: SoundConfig): Promise<void> {
248
- try {
249
- await ExpoPlayAudioStreamModule.setSoundConfig(config);
250
- } catch (error) {
251
- console.error(error);
252
- throw new Error(`Failed to set sound configuration: ${error}`);
253
- }
254
- }
255
-
256
159
  /**
257
160
  * Prompts the user to select the microphone mode.
258
161
  * @returns {Promise<void>}
@@ -308,15 +211,12 @@ export class ExpoPlayAudioStream {
308
211
 
309
212
  export {
310
213
  AudioDataEvent,
311
- SoundChunkPlayedEventPayload,
312
214
  DeviceReconnectedReason,
313
215
  DeviceReconnectedEventPayload,
314
216
  AudioRecording,
315
217
  RecordingConfig,
316
218
  StartRecordingResult,
317
219
  AudioEvents,
318
- SuspendSoundEventTurnId,
319
- SoundConfig,
320
220
  PlaybackMode,
321
221
  Encoding,
322
222
  EncodingTypes,
package/src/types.ts CHANGED
@@ -16,41 +16,6 @@ export const PlaybackModes = {
16
16
  export type PlaybackMode =
17
17
  (typeof PlaybackModes)[keyof typeof PlaybackModes];
18
18
 
19
- /**
20
- * Configuration for audio playback settings
21
- */
22
- export interface SoundConfig {
23
- /**
24
- * The sample rate for audio playback in Hz
25
- */
26
- sampleRate?: SampleRate;
27
-
28
- /**
29
- * The playback mode (regular, voiceProcessing, or conversation)
30
- */
31
- playbackMode?: PlaybackMode;
32
-
33
- /**
34
- * When true, resets to default configuration regardless of other parameters
35
- */
36
- useDefault?: boolean;
37
-
38
- /**
39
- * Enable jitter buffering for audio streams
40
- */
41
- enableBuffering?: boolean;
42
-
43
- /**
44
- * Automatically enable buffering based on network conditions
45
- */
46
- autoBuffer?: boolean;
47
-
48
- /**
49
- * Configuration for the jitter buffer when enableBuffering is true
50
- */
51
- bufferConfig?: Partial<IAudioBufferConfig>;
52
- }
53
-
54
19
  /**
55
20
  * Configuration for buffered audio streaming
56
21
  */