@edkimmel/expo-audio-stream 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/.eslintrc.js +5 -0
  2. package/.yarnrc.yml +8 -0
  3. package/NATIVE_EVENTS.md +270 -0
  4. package/README.md +289 -0
  5. package/android/build.gradle +92 -0
  6. package/android/src/main/AndroidManifest.xml +4 -0
  7. package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
  8. package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
  9. package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
  10. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
  11. package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
  12. package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
  13. package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
  14. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
  15. package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
  16. package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
  17. package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
  18. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
  19. package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
  20. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
  21. package/app.plugin.js +1 -0
  22. package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
  23. package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
  24. package/build/ExpoPlayAudioStreamModule.js +5 -0
  25. package/build/ExpoPlayAudioStreamModule.js.map +1 -0
  26. package/build/events.d.ts +36 -0
  27. package/build/events.d.ts.map +1 -0
  28. package/build/events.js +25 -0
  29. package/build/events.js.map +1 -0
  30. package/build/index.d.ts +125 -0
  31. package/build/index.d.ts.map +1 -0
  32. package/build/index.js +222 -0
  33. package/build/index.js.map +1 -0
  34. package/build/pipeline/index.d.ts +81 -0
  35. package/build/pipeline/index.d.ts.map +1 -0
  36. package/build/pipeline/index.js +140 -0
  37. package/build/pipeline/index.js.map +1 -0
  38. package/build/pipeline/types.d.ts +132 -0
  39. package/build/pipeline/types.d.ts.map +1 -0
  40. package/build/pipeline/types.js +5 -0
  41. package/build/pipeline/types.js.map +1 -0
  42. package/build/types.d.ts +221 -0
  43. package/build/types.d.ts.map +1 -0
  44. package/build/types.js +10 -0
  45. package/build/types.js.map +1 -0
  46. package/expo-module.config.json +9 -0
  47. package/ios/AudioPipeline.swift +562 -0
  48. package/ios/AudioUtils.swift +356 -0
  49. package/ios/ExpoPlayAudioStream.podspec +27 -0
  50. package/ios/ExpoPlayAudioStreamModule.swift +436 -0
  51. package/ios/ExpoPlayAudioStreamView.swift +7 -0
  52. package/ios/JitterBuffer.swift +208 -0
  53. package/ios/Logger.swift +7 -0
  54. package/ios/Microphone.swift +221 -0
  55. package/ios/MicrophoneDataDelegate.swift +4 -0
  56. package/ios/PipelineIntegration.swift +214 -0
  57. package/ios/RecordingResult.swift +10 -0
  58. package/ios/RecordingSettings.swift +11 -0
  59. package/ios/SharedAudioEngine.swift +484 -0
  60. package/ios/SoundConfig.swift +45 -0
  61. package/ios/SoundPlayer.swift +408 -0
  62. package/ios/SoundPlayerDelegate.swift +7 -0
  63. package/package.json +49 -0
  64. package/plugin/build/index.d.ts +5 -0
  65. package/plugin/build/index.js +28 -0
  66. package/plugin/src/index.ts +53 -0
  67. package/plugin/tsconfig.json +9 -0
  68. package/plugin/tsconfig.tsbuildinfo +1 -0
  69. package/src/ExpoPlayAudioStreamModule.ts +5 -0
  70. package/src/events.ts +66 -0
  71. package/src/index.ts +359 -0
  72. package/src/pipeline/index.ts +216 -0
  73. package/src/pipeline/types.ts +169 -0
  74. package/src/types.ts +270 -0
  75. package/tsconfig.json +9 -0
@@ -0,0 +1,408 @@
1
+ import AVFoundation
2
+ import ExpoModulesCore
3
+
4
+ class SoundPlayer: SharedAudioEngineDelegate {
5
+ weak var delegate: SoundPlayerDelegate?
6
+ private var audioPlayerNode: AVAudioPlayerNode!
7
+ private weak var sharedEngine: SharedAudioEngine?
8
+
9
+ private let bufferAccessQueue = DispatchQueue(label: "com.expoaudiostream.bufferAccessQueue")
10
+
11
+ private var audioQueue: [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock, turnId: String)] = [] // Queue for audio segments
12
+ // needed to track segments in progress in order to send playbackevents properly
13
+ private var segmentsLeftToPlay: Int = 0
14
+ private var isPlaying: Bool = false // Tracks if audio is currently playing
15
+ public var isAudioEngineIsSetup: Bool = false
16
+
17
+ // specific turnID to ignore sound events
18
+ internal let suspendSoundEventTurnId: String = "suspend-sound-events"
19
+
20
+ // Debounce mechanism for isFinal signal - prevents premature isFinal when chunks arrive with network latency
21
+ private var pendingFinalWorkItem: DispatchWorkItem?
22
+ private let finalDebounceDelay: TimeInterval = 0.8 // 800ms for smooth debounce
23
+
24
+ private var audioPlaybackFormat: AVAudioFormat!
25
+ private var config: SoundConfig
26
+
27
+ init(config: SoundConfig = SoundConfig()) {
28
+ self.config = config
29
+ self.audioPlaybackFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: config.sampleRate, channels: 1, interleaved: false)
30
+ }
31
+
32
+ /// Set the shared audio engine reference. Called by the module after creation.
33
+ func setSharedEngine(_ engine: SharedAudioEngine) {
34
+ self.sharedEngine = engine
35
+ }
36
+
37
+ // ── SharedAudioEngineDelegate ────────────────────────────────────────
38
+
39
+ func engineDidRestartAfterRouteChange() {
40
+ Logger.debug("[SoundPlayer] Engine restarted after route change")
41
+ // Node has already been re-attached and played by SharedAudioEngine.
42
+ // Notify delegate so JS layer knows about the route change.
43
+ self.delegate?.onDeviceReconnected(.newDeviceAvailable)
44
+
45
+ // Re-trigger playback if there are still queued buffers.
46
+ // The scheduling chain was broken when the node was stopped during rebuild.
47
+ self.bufferAccessQueue.async { [weak self] in
48
+ guard let self = self else { return }
49
+ if !self.audioQueue.isEmpty {
50
+ Logger.debug("[SoundPlayer] Re-scheduling \(self.audioQueue.count) queued buffers after route change")
51
+ self.playNextInQueue()
52
+ }
53
+ }
54
+ }
55
+
56
+ func engineDidRebuild() {
57
+ Logger.debug("[SoundPlayer] Engine rebuilt — creating fresh node")
58
+ // Old node is invalid. Nil it out and set up a fresh one.
59
+ self.audioPlayerNode = nil
60
+ self.isAudioEngineIsSetup = false
61
+
62
+ do {
63
+ try ensureAudioEngineIsSetup()
64
+ Logger.debug("[SoundPlayer] Fresh node attached after rebuild")
65
+ } catch {
66
+ Logger.debug("[SoundPlayer] Failed to create fresh node after rebuild: \(error)")
67
+ // Fall through — next play() call will retry ensureAudioEngineIsSetup
68
+ }
69
+
70
+ // Notify JS about the route change
71
+ self.delegate?.onDeviceReconnected(.newDeviceAvailable)
72
+
73
+ // Re-trigger playback if there are still queued buffers
74
+ self.bufferAccessQueue.async { [weak self] in
75
+ guard let self = self else { return }
76
+ if !self.audioQueue.isEmpty {
77
+ Logger.debug("[SoundPlayer] Re-scheduling \(self.audioQueue.count) queued buffers after rebuild")
78
+ self.playNextInQueue()
79
+ }
80
+ }
81
+ }
82
+
83
+ func audioSessionInterruptionBegan() {
84
+ Logger.debug("[SoundPlayer] Audio session interruption began")
85
+ // Nothing specific needed — playback buffers just won't produce sound.
86
+ }
87
+
88
+ func audioSessionInterruptionEnded() {
89
+ Logger.debug("[SoundPlayer] Audio session interruption ended")
90
+ // Engine already restarted by SharedAudioEngine. Node re-started.
91
+ // If there are queued buffers, playback continues automatically.
92
+ }
93
+
94
+ func engineDidDie(reason: String) {
95
+ Logger.debug("[SoundPlayer] Engine died: \(reason)")
96
+ // Clear our node reference — engine is already torn down.
97
+ self.audioPlayerNode = nil
98
+ self.isAudioEngineIsSetup = false
99
+
100
+ // Clear queued buffers and notify JS
101
+ self.bufferAccessQueue.async { [weak self] in
102
+ guard let self = self else { return }
103
+ self.pendingFinalWorkItem?.cancel()
104
+ self.pendingFinalWorkItem = nil
105
+ self.audioQueue.removeAll()
106
+ self.segmentsLeftToPlay = 0
107
+ }
108
+
109
+ // Notify JS layer about the device issue
110
+ DispatchQueue.main.async { [weak self] in
111
+ self?.delegate?.onDeviceReconnected(.oldDeviceUnavailable)
112
+ }
113
+ }
114
+
115
+ /// Detaches and cleans up the existing audio player node from the shared engine
116
+ private func detachOldAvNodesFromEngine() {
117
+ Logger.debug("[SoundPlayer] Detaching old audio node")
118
+ guard let playerNode = self.audioPlayerNode else { return }
119
+
120
+ sharedEngine?.detachNode(playerNode)
121
+
122
+ // Set to nil, ARC deallocates it if no other references exist
123
+ self.audioPlayerNode = nil
124
+ }
125
+
126
+ /// Updates the audio configuration and re-attaches the player node with the new format.
127
+ ///
128
+ /// Engine reconfiguration (for playbackMode changes) is handled by the module
129
+ /// via `SharedAudioEngine.configure()` before calling this method.
130
+ ///
131
+ /// - Parameter newConfig: The new configuration to apply
132
+ /// - Throws: Error if node setup fails
133
+ public func updateConfig(_ newConfig: SoundConfig) throws {
134
+ Logger.debug("[SoundPlayer] Updating configuration - sampleRate: \(newConfig.sampleRate), playbackMode: \(newConfig.playbackMode)")
135
+
136
+ // Check if anything has changed
137
+ let configChanged = newConfig.sampleRate != self.config.sampleRate ||
138
+ newConfig.playbackMode != self.config.playbackMode
139
+
140
+ guard configChanged else {
141
+ Logger.debug("[SoundPlayer] Configuration unchanged, skipping update")
142
+ return
143
+ }
144
+
145
+ // Detach existing node
146
+ self.detachOldAvNodesFromEngine()
147
+
148
+ // Update configuration
149
+ self.config = newConfig
150
+
151
+ // Update format with new sample rate
152
+ self.audioPlaybackFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: newConfig.sampleRate, channels: 1, interleaved: false)
153
+
154
+ // Attach a fresh node with the new format
155
+ try self.ensureAudioEngineIsSetup()
156
+ }
157
+
158
+ /// Resets the audio configuration to default values and reconfigures the audio engine
159
+ /// - Throws: Error if audio engine setup fails
160
+ public func resetConfigToDefault() throws {
161
+ Logger.debug("[SoundPlayer] Resetting configuration to default values")
162
+ try updateConfig(SoundConfig.defaultConfig)
163
+ }
164
+
165
+ /// Attaches a fresh player node to the shared engine.
166
+ /// - Throws: Error if shared engine is not available
167
+ public func ensureAudioEngineIsSetup() throws {
168
+ guard let sharedEngine = self.sharedEngine else {
169
+ throw NSError(domain: "SoundPlayer", code: -1,
170
+ userInfo: [NSLocalizedDescriptionKey: "SharedAudioEngine not set"])
171
+ }
172
+
173
+ // Detach any existing node first
174
+ self.detachOldAvNodesFromEngine()
175
+
176
+ // Create a fresh player node and attach to the shared engine
177
+ let node = AVAudioPlayerNode()
178
+ sharedEngine.attachNode(node, format: self.audioPlaybackFormat)
179
+ self.audioPlayerNode = node
180
+ self.isAudioEngineIsSetup = true
181
+
182
+ Logger.debug("[SoundPlayer] Node attached to shared engine — sampleRate=\(config.sampleRate)")
183
+ }
184
+
185
+ /// Clears all pending audio chunks from the playback queue
186
+ /// - Parameter promise: Promise to resolve when queue is cleared
187
+ func clearSoundQueue(turnIdToClear turnId: String = "", resolver promise: Promise) {
188
+ Logger.debug("[SoundPlayer] Clearing Sound Queue...")
189
+ self.bufferAccessQueue.async { [weak self] in
190
+ guard let self = self else {
191
+ promise.resolve(nil)
192
+ return
193
+ }
194
+
195
+ // Cancel any pending final signal when clearing queue
196
+ self.pendingFinalWorkItem?.cancel()
197
+ self.pendingFinalWorkItem = nil
198
+
199
+ if !self.audioQueue.isEmpty {
200
+ Logger.debug("[SoundPlayer] Queue is not empty clearing")
201
+ let removedCount = self.audioQueue.filter { $0.turnId == turnId }.count
202
+ self.audioQueue.removeAll(where: { $0.turnId == turnId })
203
+ // Adjust segmentsLeftToPlay to account for removed items
204
+ self.segmentsLeftToPlay = max(0, self.segmentsLeftToPlay - removedCount)
205
+ } else {
206
+ Logger.debug("[SoundPlayer] Queue is empty")
207
+ }
208
+ promise.resolve(nil)
209
+ }
210
+ }
211
+
212
+ /// Stops audio playback and clears the queue
213
+ /// - Parameter promise: Promise to resolve when stopped
214
+ func stop(_ promise: Promise) {
215
+ Logger.debug("[SoundPlayer] Stopping Audio")
216
+
217
+ // Stop the audio player node (engine stays running — it's shared)
218
+ if self.audioPlayerNode != nil && self.audioPlayerNode.isPlaying {
219
+ Logger.debug("[SoundPlayer] Player is playing, stopping")
220
+ self.audioPlayerNode.pause()
221
+ self.audioPlayerNode.stop()
222
+ } else {
223
+ Logger.debug("Player is not playing")
224
+ }
225
+
226
+ // Clear queue and reset segment count on bufferAccessQueue for thread safety
227
+ self.bufferAccessQueue.async { [weak self] in
228
+ guard let self = self else {
229
+ promise.resolve(nil)
230
+ return
231
+ }
232
+
233
+ // Cancel any pending final signal
234
+ self.pendingFinalWorkItem?.cancel()
235
+ self.pendingFinalWorkItem = nil
236
+
237
+ if !self.audioQueue.isEmpty {
238
+ Logger.debug("[SoundPlayer] Queue is not empty clearing")
239
+ self.audioQueue.removeAll()
240
+ }
241
+ self.segmentsLeftToPlay = 0
242
+ promise.resolve(nil)
243
+ }
244
+ }
245
+
246
+ /// Processes audio chunk based on common format
247
+ /// - Parameters:
248
+ /// - base64String: Base64 encoded audio data
249
+ /// - commonFormat: The common format of the audio data
250
+ /// - Returns: Processed audio buffer or nil if processing fails
251
+ /// - Throws: SoundPlayerError if format is unsupported
252
+ private func processAudioChunk(_ base64String: String, commonFormat: AVAudioCommonFormat) throws -> AVAudioPCMBuffer? {
253
+ switch commonFormat {
254
+ case .pcmFormatFloat32:
255
+ return AudioUtils.processFloat32LEAudioChunk(base64String, audioFormat: self.audioPlaybackFormat)
256
+ case .pcmFormatInt16:
257
+ return AudioUtils.processPCM16LEAudioChunk(base64String, audioFormat: self.audioPlaybackFormat)
258
+ default:
259
+ Logger.debug("[SoundPlayer] Unsupported audio format: \(commonFormat)")
260
+ throw SoundPlayerError.unsupportedFormat
261
+ }
262
+ }
263
+
264
+ /// Plays an audio chunk from base64 encoded string
265
+ /// - Parameters:
266
+ /// - base64String: Base64 encoded audio data
267
+ /// - strTurnId: Identifier for the turn/segment
268
+ /// - resolver: Promise resolver callback
269
+ /// - rejecter: Promise rejection callback
270
+ /// - commonFormat: The common format of the audio data (defaults to .pcmFormatFloat32)
271
+ /// - Throws: Error if audio processing fails
272
+ public func play(
273
+ audioChunk base64String: String,
274
+ turnId strTurnId: String,
275
+ resolver: @escaping RCTPromiseResolveBlock,
276
+ rejecter: @escaping RCTPromiseRejectBlock,
277
+ commonFormat: AVAudioCommonFormat = .pcmFormatFloat32
278
+ ) throws {
279
+ do {
280
+ if !self.isAudioEngineIsSetup {
281
+ try ensureAudioEngineIsSetup()
282
+ }
283
+
284
+ guard let buffer = try processAudioChunk(base64String, commonFormat: commonFormat) else {
285
+ Logger.debug("[SoundPlayer] Failed to process audio chunk")
286
+ throw SoundPlayerError.invalidBase64String
287
+ }
288
+
289
+ // Use bufferAccessQueue for all queue and segment count access to ensure thread safety
290
+ self.bufferAccessQueue.async { [weak self] in
291
+ guard let self = self else {
292
+ resolver(nil)
293
+ return
294
+ }
295
+
296
+ // Cancel any pending "final" signal - new chunk arrived, so we're not done yet
297
+ self.pendingFinalWorkItem?.cancel()
298
+ self.pendingFinalWorkItem = nil
299
+
300
+ let bufferTuple = (buffer: buffer, promise: resolver, turnId: strTurnId)
301
+ self.audioQueue.append(bufferTuple)
302
+ if self.segmentsLeftToPlay == 0 && strTurnId != self.suspendSoundEventTurnId {
303
+ DispatchQueue.main.async {
304
+ self.delegate?.onSoundStartedPlaying()
305
+ }
306
+ }
307
+ self.segmentsLeftToPlay += 1
308
+ // If not already playing, start playback
309
+ if self.audioQueue.count == 1 {
310
+ self.playNextInQueue()
311
+ }
312
+ }
313
+ } catch {
314
+ Logger.debug("[SoundPlayer] Failed to enqueue audio chunk: \(error.localizedDescription)")
315
+ rejecter("ERROR_SOUND_PLAYER", "Failed to enqueue audio chunk: \(error.localizedDescription)", nil)
316
+ }
317
+ }
318
+
319
+ /// Plays the next audio buffer in the queue
320
+ /// This method is responsible for:
321
+ /// 1. Checking if there are audio chunks in the queue
322
+ /// 2. Starting the audio player node if it's not already playing
323
+ /// 3. Scheduling the next audio buffer for playback
324
+ /// 4. Handling completion callbacks and recursively playing the next chunk
325
+ /// - Note: This method should be called from bufferAccessQueue to ensure thread safety
326
+ private func playNextInQueue() {
327
+ // Ensure we're on the buffer access queue for thread safety
328
+ // If called from elsewhere, dispatch to the queue
329
+ dispatchPrecondition(condition: .onQueue(bufferAccessQueue))
330
+
331
+ // Bail out if the shared engine is mid-rebuild (route change).
332
+ // engineDidRestartAfterRouteChange will re-trigger us when ready.
333
+ if sharedEngine?.isRebuilding == true {
334
+ Logger.debug("[SoundPlayer] Engine rebuilding — deferring playNextInQueue")
335
+ return
336
+ }
337
+
338
+ // Check if queue is empty
339
+ guard !self.audioQueue.isEmpty else {
340
+ Logger.debug("[SoundPlayer] Queue is empty, nothing to play")
341
+ return
342
+ }
343
+
344
+ // Start the audio player node if it's not already playing
345
+ if !self.audioPlayerNode.isPlaying {
346
+ Logger.debug("[SoundPlayer] Starting Player")
347
+ self.audioPlayerNode.play()
348
+ }
349
+
350
+ // Get the first buffer tuple from the queue (buffer, promise, turnId)
351
+ if let (buffer, promise, turnId) = self.audioQueue.first {
352
+ // Remove the buffer from the queue immediately to avoid playing it twice
353
+ self.audioQueue.removeFirst()
354
+
355
+ // Schedule the buffer for playback with a completion handler
356
+ self.audioPlayerNode.scheduleBuffer(buffer) { [weak self] in
357
+ guard let self = self else {
358
+ promise(nil)
359
+ return
360
+ }
361
+
362
+ // Use bufferAccessQueue for all queue and segment count access
363
+ self.bufferAccessQueue.async {
364
+ // Decrement the count of segments left to play
365
+ self.segmentsLeftToPlay -= 1
366
+
367
+ // Check if this is the final segment in the current sequence
368
+ let isFinalSegment = self.segmentsLeftToPlay == 0
369
+
370
+ // Resolve the promise on main thread
371
+ DispatchQueue.main.async {
372
+ promise(nil)
373
+ }
374
+
375
+ // ✅ Notify delegate about playback completion
376
+ if turnId != self.suspendSoundEventTurnId {
377
+ if isFinalSegment {
378
+ // Debounce the isFinal signal - wait to see if more chunks arrive
379
+ // This prevents premature isFinal when chunks arrive with network latency
380
+ let workItem = DispatchWorkItem { [weak self] in
381
+ guard let self = self else { return }
382
+ // Double-check we're still at 0 segments (no new chunks arrived)
383
+ if self.segmentsLeftToPlay == 0 {
384
+ Logger.debug("[SoundPlayer] Debounced isFinal - no more chunks arrived, sending isFinal: true")
385
+ DispatchQueue.main.async {
386
+ self.delegate?.onSoundChunkPlayed(true)
387
+ }
388
+ }
389
+ }
390
+ self.pendingFinalWorkItem = workItem
391
+ self.bufferAccessQueue.asyncAfter(deadline: .now() + self.finalDebounceDelay, execute: workItem)
392
+ } else {
393
+ // Not the final segment, send immediately
394
+ DispatchQueue.main.async {
395
+ self.delegate?.onSoundChunkPlayed(false)
396
+ }
397
+ }
398
+ }
399
+
400
+ // Recursively play the next chunk if queue is not empty
401
+ if !self.audioQueue.isEmpty {
402
+ self.playNextInQueue()
403
+ }
404
+ }
405
+ }
406
+ }
407
+ }
408
+ }
@@ -0,0 +1,7 @@
1
+ import AVFoundation
2
+
3
+ protocol SoundPlayerDelegate: AnyObject {
4
+ func onSoundChunkPlayed(_ isFinal: Bool)
5
+ func onSoundStartedPlaying()
6
+ func onDeviceReconnected(_ reason: AVAudioSession.RouteChangeReason)
7
+ }
package/package.json ADDED
@@ -0,0 +1,49 @@
1
+ {
2
+ "name": "@edkimmel/expo-audio-stream",
3
+ "version": "0.2.0",
4
+ "description": "Expo Play Audio Stream module",
5
+ "main": "build/index.js",
6
+ "types": "build/index.d.ts",
7
+ "source": "src/index.ts",
8
+ "scripts": {
9
+ "build": "expo-module build",
10
+ "clean": "expo-module clean",
11
+ "lint": "expo-module lint",
12
+ "test": "expo-module test",
13
+ "prepare": "expo-module prepare && husky || true",
14
+ "prepublishOnly": "expo-module prepublishOnly",
15
+ "expo-module": "expo-module",
16
+ "open:ios": "open -a \"Xcode\" example/ios",
17
+ "open:android": "open -a \"Android Studio\" example/android",
18
+ "bump:patch": "npm version patch"
19
+ },
20
+ "keywords": [
21
+ "react-native",
22
+ "expo",
23
+ "expo-play-audio-stream",
24
+ "ExpoPlayAudioStream"
25
+ ],
26
+ "repository": "https://github.com/edkimmel/expo-audio-stream",
27
+ "bugs": {
28
+ "url": "https://github.com/edkimmel/expo-audio-stream/issues"
29
+ },
30
+ "author": "pax <nzpopa@users.noreply.github.com> (https://github.com/hyphen-id)",
31
+ "license": "MIT",
32
+ "homepage": "https://github.com/edkimmel/expo-audio-stream#readme",
33
+ "devDependencies": {
34
+ "@types/react": "^19.0.0",
35
+ "expo-module-scripts": "^4.0.0",
36
+ "expo-modules-core": "^2.2.0",
37
+ "husky": "^9.0.11"
38
+ },
39
+ "peerDependencies": {
40
+ "expo": "*",
41
+ "react": "*",
42
+ "react-native": "*"
43
+ },
44
+ "packageManager": "yarn@4.1.1",
45
+ "publishConfig": {
46
+ "access": "public",
47
+ "registry": "https://registry.npmjs.org/"
48
+ }
49
+ }
@@ -0,0 +1,5 @@
1
+ import { ConfigPlugin } from '@expo/config-plugins';
2
+ declare const withRecordingPermission: ConfigPlugin<{
3
+ microphonePermission: string;
4
+ }>;
5
+ export default withRecordingPermission;
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const config_plugins_1 = require("@expo/config-plugins");
4
+ const MICROPHONE_USAGE = 'Allow $(PRODUCT_NAME) to access your microphone';
5
+ const withRecordingPermission = (config, existingPerms) => {
6
+ if (!existingPerms) {
7
+ console.warn('No previous permissions provided');
8
+ }
9
+ config = (0, config_plugins_1.withInfoPlist)(config, (config) => {
10
+ config.modResults['NSMicrophoneUsageDescription'] = config.modResults['NSMicrophoneUsageDescription'] || MICROPHONE_USAGE;
11
+ // Add audio to UIBackgroundModes to allow background audio recording
12
+ const existingBackgroundModes = config.modResults.UIBackgroundModes || [];
13
+ if (!existingBackgroundModes.includes('audio')) {
14
+ existingBackgroundModes.push('audio');
15
+ }
16
+ config.modResults.UIBackgroundModes = existingBackgroundModes;
17
+ return config;
18
+ });
19
+ config = (0, config_plugins_1.withAndroidManifest)(config, (config) => {
20
+ const mainApplication = config_plugins_1.AndroidConfig.Manifest.getMainApplicationOrThrow(config.modResults);
21
+ config_plugins_1.AndroidConfig.Manifest.addMetaDataItemToMainApplication(mainApplication, 'android.permission.RECORD_AUDIO', MICROPHONE_USAGE);
22
+ // Add FOREGROUND_SERVICE permission for handling background recording
23
+ config_plugins_1.AndroidConfig.Manifest.addMetaDataItemToMainApplication(mainApplication, 'android.permission.FOREGROUND_SERVICE', 'This apps needs access to the foreground service to record audio in the background');
24
+ return config;
25
+ });
26
+ return config;
27
+ };
28
+ exports.default = withRecordingPermission;
@@ -0,0 +1,53 @@
1
+ import {
2
+ AndroidConfig,
3
+ ConfigPlugin,
4
+ withAndroidManifest,
5
+ withInfoPlist,
6
+ } from '@expo/config-plugins'
7
+
8
+ const MICROPHONE_USAGE = 'Allow $(PRODUCT_NAME) to access your microphone'
9
+
10
+ const withRecordingPermission: ConfigPlugin<{
11
+ microphonePermission: string
12
+ }> = (config, existingPerms) => {
13
+ if (!existingPerms) {
14
+ console.warn('No previous permissions provided')
15
+ }
16
+ config = withInfoPlist(config, (config) => {
17
+ config.modResults['NSMicrophoneUsageDescription'] = config.modResults['NSMicrophoneUsageDescription'] || MICROPHONE_USAGE
18
+
19
+ // Add audio to UIBackgroundModes to allow background audio recording
20
+ const existingBackgroundModes =
21
+ config.modResults.UIBackgroundModes || []
22
+ if (!existingBackgroundModes.includes('audio')) {
23
+ existingBackgroundModes.push('audio')
24
+ }
25
+ config.modResults.UIBackgroundModes = existingBackgroundModes
26
+
27
+ return config
28
+ })
29
+
30
+ config = withAndroidManifest(config, (config) => {
31
+ const mainApplication =
32
+ AndroidConfig.Manifest.getMainApplicationOrThrow(config.modResults)
33
+
34
+ AndroidConfig.Manifest.addMetaDataItemToMainApplication(
35
+ mainApplication,
36
+ 'android.permission.RECORD_AUDIO',
37
+ MICROPHONE_USAGE
38
+ )
39
+
40
+ // Add FOREGROUND_SERVICE permission for handling background recording
41
+ AndroidConfig.Manifest.addMetaDataItemToMainApplication(
42
+ mainApplication,
43
+ 'android.permission.FOREGROUND_SERVICE',
44
+ 'This apps needs access to the foreground service to record audio in the background'
45
+ )
46
+
47
+ return config
48
+ })
49
+
50
+ return config
51
+ }
52
+
53
+ export default withRecordingPermission
@@ -0,0 +1,9 @@
1
+ {
2
+ "extends": "expo-module-scripts/tsconfig.plugin",
3
+ "compilerOptions": {
4
+ "outDir": "build",
5
+ "rootDir": "src"
6
+ },
7
+ "include": ["./src"],
8
+ "exclude": ["**/__mocks__/*", "**/__tests__/*"]
9
+ }
@@ -0,0 +1 @@
1
+ {"root":["./src/index.ts"],"version":"5.9.3"}
@@ -0,0 +1,5 @@
1
+ import { requireNativeModule } from 'expo-modules-core';
2
+
3
+ // It loads the native module object from the JSI or falls back to
4
+ // the bridge module (from NativeModulesProxy) if the remote debugger is on.
5
+ export default requireNativeModule('ExpoPlayAudioStream');
package/src/events.ts ADDED
@@ -0,0 +1,66 @@
1
+ // packages/expo-audio-stream/src/events.ts
2
+
3
+ import { EventEmitter, type EventSubscription } from "expo-modules-core";
4
+
5
+ // Type alias for backwards compatibility
6
+ export type Subscription = EventSubscription;
7
+
8
+ import ExpoPlayAudioStreamModule from "./ExpoPlayAudioStreamModule";
9
+
10
+ const emitter = new EventEmitter(ExpoPlayAudioStreamModule);
11
+
12
+ export interface AudioEventPayload {
13
+ encoded?: string;
14
+ buffer?: Float32Array;
15
+ fileUri: string;
16
+ lastEmittedSize: number;
17
+ position: number;
18
+ deltaSize: number;
19
+ totalSize: number;
20
+ mimeType: string;
21
+ streamUuid: string;
22
+ soundLevel?: number;
23
+ }
24
+
25
+ export type SoundChunkPlayedEventPayload = {
26
+ isFinal: boolean;
27
+ };
28
+
29
+ export const DeviceReconnectedReasons = {
30
+ newDeviceAvailable: "newDeviceAvailable",
31
+ oldDeviceUnavailable: "oldDeviceUnavailable",
32
+ unknown: "unknown",
33
+ } as const;
34
+
35
+ export type DeviceReconnectedReason =
36
+ (typeof DeviceReconnectedReasons)[keyof typeof DeviceReconnectedReasons];
37
+
38
+ export type DeviceReconnectedEventPayload = {
39
+ reason: DeviceReconnectedReason;
40
+ };
41
+
42
+ export const AudioEvents = {
43
+ AudioData: "AudioData",
44
+ SoundChunkPlayed: "SoundChunkPlayed",
45
+ SoundStarted: "SoundStarted",
46
+ DeviceReconnected: "DeviceReconnected",
47
+ };
48
+
49
+ export function addAudioEventListener(
50
+ listener: (event: AudioEventPayload) => Promise<void>
51
+ ): EventSubscription {
52
+ return (emitter as any).addListener("AudioData", listener);
53
+ }
54
+
55
+ export function addSoundChunkPlayedListener(
56
+ listener: (event: SoundChunkPlayedEventPayload) => Promise<void>
57
+ ): EventSubscription {
58
+ return (emitter as any).addListener("SoundChunkPlayed", listener);
59
+ }
60
+
61
+ export function subscribeToEvent<T extends unknown>(
62
+ eventName: string,
63
+ listener: (event: T | undefined) => Promise<void>
64
+ ): EventSubscription {
65
+ return (emitter as any).addListener(eventName, listener);
66
+ }