@edkimmel/expo-audio-stream 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/.eslintrc.js +5 -0
  2. package/.yarnrc.yml +8 -0
  3. package/NATIVE_EVENTS.md +270 -0
  4. package/README.md +289 -0
  5. package/android/build.gradle +92 -0
  6. package/android/src/main/AndroidManifest.xml +4 -0
  7. package/android/src/main/java/expo/modules/audiostream/AudioDataEncoder.kt +178 -0
  8. package/android/src/main/java/expo/modules/audiostream/AudioEffectsManager.kt +107 -0
  9. package/android/src/main/java/expo/modules/audiostream/AudioPlaybackManager.kt +651 -0
  10. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +509 -0
  11. package/android/src/main/java/expo/modules/audiostream/Constants.kt +21 -0
  12. package/android/src/main/java/expo/modules/audiostream/EventSender.kt +7 -0
  13. package/android/src/main/java/expo/modules/audiostream/ExpoAudioStreamView.kt +7 -0
  14. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +280 -0
  15. package/android/src/main/java/expo/modules/audiostream/PermissionUtils.kt +16 -0
  16. package/android/src/main/java/expo/modules/audiostream/RecordingConfig.kt +60 -0
  17. package/android/src/main/java/expo/modules/audiostream/SoundConfig.kt +46 -0
  18. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +685 -0
  19. package/android/src/main/java/expo/modules/audiostream/pipeline/JitterBuffer.kt +227 -0
  20. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +315 -0
  21. package/app.plugin.js +1 -0
  22. package/build/ExpoPlayAudioStreamModule.d.ts +3 -0
  23. package/build/ExpoPlayAudioStreamModule.d.ts.map +1 -0
  24. package/build/ExpoPlayAudioStreamModule.js +5 -0
  25. package/build/ExpoPlayAudioStreamModule.js.map +1 -0
  26. package/build/events.d.ts +36 -0
  27. package/build/events.d.ts.map +1 -0
  28. package/build/events.js +25 -0
  29. package/build/events.js.map +1 -0
  30. package/build/index.d.ts +125 -0
  31. package/build/index.d.ts.map +1 -0
  32. package/build/index.js +222 -0
  33. package/build/index.js.map +1 -0
  34. package/build/pipeline/index.d.ts +81 -0
  35. package/build/pipeline/index.d.ts.map +1 -0
  36. package/build/pipeline/index.js +140 -0
  37. package/build/pipeline/index.js.map +1 -0
  38. package/build/pipeline/types.d.ts +132 -0
  39. package/build/pipeline/types.d.ts.map +1 -0
  40. package/build/pipeline/types.js +5 -0
  41. package/build/pipeline/types.js.map +1 -0
  42. package/build/types.d.ts +221 -0
  43. package/build/types.d.ts.map +1 -0
  44. package/build/types.js +10 -0
  45. package/build/types.js.map +1 -0
  46. package/expo-module.config.json +9 -0
  47. package/ios/AudioPipeline.swift +562 -0
  48. package/ios/AudioUtils.swift +356 -0
  49. package/ios/ExpoPlayAudioStream.podspec +27 -0
  50. package/ios/ExpoPlayAudioStreamModule.swift +436 -0
  51. package/ios/ExpoPlayAudioStreamView.swift +7 -0
  52. package/ios/JitterBuffer.swift +208 -0
  53. package/ios/Logger.swift +7 -0
  54. package/ios/Microphone.swift +221 -0
  55. package/ios/MicrophoneDataDelegate.swift +4 -0
  56. package/ios/PipelineIntegration.swift +214 -0
  57. package/ios/RecordingResult.swift +10 -0
  58. package/ios/RecordingSettings.swift +11 -0
  59. package/ios/SharedAudioEngine.swift +484 -0
  60. package/ios/SoundConfig.swift +45 -0
  61. package/ios/SoundPlayer.swift +408 -0
  62. package/ios/SoundPlayerDelegate.swift +7 -0
  63. package/package.json +49 -0
  64. package/plugin/build/index.d.ts +5 -0
  65. package/plugin/build/index.js +28 -0
  66. package/plugin/src/index.ts +53 -0
  67. package/plugin/tsconfig.json +9 -0
  68. package/plugin/tsconfig.tsbuildinfo +1 -0
  69. package/src/ExpoPlayAudioStreamModule.ts +5 -0
  70. package/src/events.ts +66 -0
  71. package/src/index.ts +359 -0
  72. package/src/pipeline/index.ts +216 -0
  73. package/src/pipeline/types.ts +169 -0
  74. package/src/types.ts +270 -0
  75. package/tsconfig.json +9 -0
@@ -0,0 +1,221 @@
1
+ import AVFoundation
2
+ import ExpoModulesCore
3
+
4
+
5
+ class Microphone {
6
+ weak var delegate: MicrophoneDataDelegate?
7
+
8
+ private var audioEngine: AVAudioEngine!
9
+ private var audioConverter: AVAudioConverter!
10
+ private var inputNode: AVAudioInputNode!
11
+
12
+ public private(set) var isVoiceProcessingEnabled: Bool = false
13
+
14
+
15
+ internal var lastEmittedSize: Int64 = 0
16
+ private var totalDataSize: Int64 = 0
17
+ internal var recordingSettings: RecordingSettings?
18
+
19
+ internal var mimeType: String = "audio/wav"
20
+ private var lastBufferTime: AVAudioTime?
21
+
22
+ private var startTime: Date?
23
+ private var pauseStartTime: Date?
24
+
25
+
26
+ private var inittedAudioSession = false
27
+ private var isRecording: Bool = false
28
+ private var isSilent: Bool = false
29
+
30
+ init() {
31
+ NotificationCenter.default.addObserver(
32
+ self,
33
+ selector: #selector(handleRouteChange),
34
+ name: AVAudioSession.routeChangeNotification,
35
+ object: nil
36
+ )
37
+ }
38
+
39
+ /// Handles audio route changes (e.g. headphones connected/disconnected)
40
+ /// - Parameter notification: The notification object containing route change information
41
+ @objc private func handleRouteChange(notification: Notification) {
42
+ guard let info = notification.userInfo,
43
+ let reasonValue = info[AVAudioSessionRouteChangeReasonKey] as? UInt,
44
+ let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else {
45
+ return
46
+ }
47
+
48
+ Logger.debug("[Microphone] Route is changed \(reason)")
49
+
50
+ switch reason {
51
+ case .newDeviceAvailable, .oldDeviceUnavailable:
52
+ if isRecording {
53
+ stopRecording(resolver: nil)
54
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
55
+ guard let self = self, let settings = self.recordingSettings else { return }
56
+
57
+ _ = startRecording(settings: self.recordingSettings!, intervalMilliseconds: 100)
58
+ }
59
+ }
60
+ case .categoryChange:
61
+ Logger.debug("[Microphone] Audio Session category changed")
62
+ default:
63
+ break
64
+ }
65
+ }
66
+
67
+ func toggleSilence(isSilent: Bool) {
68
+ Logger.debug("[Microphone] toggleSilence")
69
+ self.isSilent = isSilent
70
+ }
71
+
72
+ func startRecording(settings: RecordingSettings, intervalMilliseconds: Int) -> StartRecordingResult? {
73
+ guard !isRecording else {
74
+ Logger.debug("Debug: Recording is already in progress.")
75
+ return StartRecordingResult(error: "Recording is already in progress.")
76
+ }
77
+
78
+ if self.audioEngine == nil {
79
+ self.audioEngine = AVAudioEngine()
80
+ }
81
+
82
+ if self.audioEngine != nil && audioEngine.isRunning {
83
+ Logger.debug("Debug: Audio engine already running.")
84
+ audioEngine.stop()
85
+ }
86
+
87
+ var newSettings = settings // Make settings mutable
88
+
89
+ totalDataSize = 0
90
+
91
+ // Use the hardware's native format for the tap to avoid Core Audio format mismatch crashes.
92
+ // The inputNode delivers audio in the hardware format (e.g. 48kHz Float32).
93
+ // Resampling and format conversion to the desired settings happens in processAudioBuffer.
94
+ let hardwareFormat = audioEngine.inputNode.inputFormat(forBus: 0)
95
+ newSettings.sampleRate = hardwareFormat.sampleRate
96
+ Logger.debug("Debug: Hardware sample rate is \(hardwareFormat.sampleRate) Hz, desired sample rate is \(settings.sampleRate) Hz")
97
+
98
+ recordingSettings = newSettings // Update the class property with the new settings
99
+
100
+ // Compute tap buffer size from interval so Core Audio delivers at the right cadence
101
+ let intervalSamples = AVAudioFrameCount(
102
+ Double(intervalMilliseconds) / 1000.0 * hardwareFormat.sampleRate
103
+ )
104
+ let tapBufferSize = max(intervalSamples, 256) // floor at 256 frames (~5ms at 48kHz)
105
+
106
+ // Pass nil for format to use the hardware's native format, avoiding format mismatch crashes.
107
+ // Core Audio does not support format conversion (e.g. Float32 -> Int16) on the tap itself.
108
+ audioEngine.inputNode.installTap(onBus: 0, bufferSize: tapBufferSize, format: nil) { [weak self] (buffer, time) in
109
+ guard let self = self else { return }
110
+
111
+ guard buffer.frameLength > 0 else {
112
+ Logger.debug("Error: received empty buffer in tap callback")
113
+ self.delegate?.onMicrophoneError("READ_ERROR", "Received empty audio buffer")
114
+ return
115
+ }
116
+
117
+ self.processAudioBuffer(buffer)
118
+ self.lastBufferTime = time
119
+ }
120
+
121
+ do {
122
+ startTime = Date()
123
+ try audioEngine.start()
124
+ isRecording = true
125
+ Logger.debug("Debug: Recording started successfully.")
126
+ return StartRecordingResult(
127
+ fileUri: "",
128
+ mimeType: mimeType,
129
+ channels: settings.numberOfChannels,
130
+ bitDepth: settings.bitDepth,
131
+ sampleRate: settings.sampleRate
132
+ )
133
+ } catch {
134
+ Logger.debug("Error: Could not start the audio engine: \(error.localizedDescription)")
135
+ isRecording = false
136
+ return StartRecordingResult(error: "Error: Could not start the audio engine: \(error.localizedDescription)")
137
+ }
138
+ }
139
+
140
+ public func stopRecording(resolver promise: Promise?) {
141
+ guard self.isRecording else {
142
+ if let promiseResolver = promise {
143
+ promiseResolver.resolve(nil)
144
+ }
145
+ return
146
+ }
147
+ self.isRecording = false
148
+ self.isVoiceProcessingEnabled = false
149
+
150
+ // Remove tap before stopping the engine
151
+ if audioEngine != nil {
152
+ audioEngine.inputNode.removeTap(onBus: 0)
153
+ audioEngine.stop()
154
+ }
155
+
156
+ if let promiseResolver = promise {
157
+ promiseResolver.resolve(nil)
158
+ }
159
+ }
160
+
161
+ /// Processes the audio buffer and writes data to the file. Also handles audio processing if enabled.
162
+ /// - Parameters:
163
+ /// - buffer: The audio buffer to process.
164
+ /// - fileURL: The URL of the file to write the data to.
165
+ private func processAudioBuffer(_ buffer: AVAudioPCMBuffer) {
166
+ let targetSampleRate = recordingSettings?.desiredSampleRate ?? buffer.format.sampleRate
167
+ let targetBitDepth = recordingSettings?.bitDepth ?? 16
168
+ var currentBuffer = buffer
169
+
170
+ // Resample if needed
171
+ if currentBuffer.format.sampleRate != targetSampleRate {
172
+ if let resampledBuffer = AudioUtils.resampleAudioBuffer(currentBuffer, from: currentBuffer.format.sampleRate, to: targetSampleRate) {
173
+ currentBuffer = resampledBuffer
174
+ } else if let convertedBuffer = AudioUtils.tryConvertToFormat(
175
+ inputBuffer: currentBuffer,
176
+ desiredSampleRate: targetSampleRate,
177
+ desiredChannel: 1,
178
+ bitDepth: targetBitDepth
179
+ ) {
180
+ currentBuffer = convertedBuffer
181
+ } else {
182
+ Logger.debug("Failed to resample audio buffer.")
183
+ }
184
+ }
185
+
186
+ let powerLevel: Float = self.isSilent ? -160.0 : AudioUtils.calculatePowerLevel(from: currentBuffer)
187
+
188
+ // Convert Float32 → Int16 PCM if needed (the tap delivers hardware-native Float32)
189
+ let data: Data
190
+ if isSilent {
191
+ let byteCount = Int(currentBuffer.frameCapacity) * Int(currentBuffer.format.streamDescription.pointee.mBytesPerFrame)
192
+ data = Data(repeating: 0, count: byteCount)
193
+ } else if targetBitDepth == 16 && currentBuffer.format.commonFormat == .pcmFormatFloat32,
194
+ let floatData = currentBuffer.floatChannelData {
195
+ let frameCount = Int(currentBuffer.frameLength)
196
+ let channelCount = Int(currentBuffer.format.channelCount)
197
+ var int16Data = Data(capacity: frameCount * channelCount * 2)
198
+ for frame in 0..<frameCount {
199
+ for ch in 0..<channelCount {
200
+ let sample = max(-1.0, min(1.0, floatData[ch][frame]))
201
+ var int16Sample = Int16(sample * 32767.0)
202
+ int16Data.append(Data(bytes: &int16Sample, count: 2))
203
+ }
204
+ }
205
+ data = int16Data
206
+ } else {
207
+ let audioData = currentBuffer.audioBufferList.pointee.mBuffers
208
+ guard let bufferData = audioData.mData else {
209
+ Logger.debug("Buffer data is nil.")
210
+ return
211
+ }
212
+ data = Data(bytes: bufferData, count: Int(audioData.mDataByteSize))
213
+ }
214
+
215
+ totalDataSize += Int64(data.count)
216
+
217
+ // Emit immediately — tap buffer size is already interval-aligned
218
+ self.delegate?.onMicrophoneData(data, powerLevel)
219
+ self.lastEmittedSize = totalDataSize
220
+ }
221
+ }
@@ -0,0 +1,4 @@
1
+ protocol MicrophoneDataDelegate: AnyObject {
2
+ func onMicrophoneData(_ microphoneData: Data, _ soundLevel: Float?)
3
+ func onMicrophoneError(_ error: String, _ errorMessage: String)
4
+ }
@@ -0,0 +1,214 @@
1
+ import Foundation
2
+
3
+ /// Protocol for sending pipeline events to the Expo module (analogous to Android's EventSender).
4
+ protocol PipelineEventSender: AnyObject {
5
+ func sendPipelineEvent(_ eventName: String, _ params: [String: Any])
6
+ }
7
+
8
+ /// Bridge layer wiring AudioPipeline into ExpoPlayAudioStreamModule.
9
+ ///
10
+ /// Holds the pipeline instance, implements PipelineListener to forward native events
11
+ /// as Expo bridge events, and exposes the 7 bridge methods that the module's
12
+ /// definition() block declares.
13
+ class PipelineIntegration: PipelineListener {
14
+ private static let TAG = "PipelineIntegration"
15
+
16
+ // ── Event name constants (match the TS PipelineEventMap keys) ─────
17
+ static let EVENT_STATE_CHANGED = "PipelineStateChanged"
18
+ static let EVENT_PLAYBACK_STARTED = "PipelinePlaybackStarted"
19
+ static let EVENT_ERROR = "PipelineError"
20
+ static let EVENT_ZOMBIE_DETECTED = "PipelineZombieDetected"
21
+ static let EVENT_UNDERRUN = "PipelineUnderrun"
22
+ static let EVENT_DRAINED = "PipelineDrained"
23
+ static let EVENT_AUDIO_FOCUS_LOST = "PipelineAudioFocusLost"
24
+ static let EVENT_AUDIO_FOCUS_RESUMED = "PipelineAudioFocusResumed"
25
+
26
+ private weak var eventSender: PipelineEventSender?
27
+ private weak var sharedEngine: SharedAudioEngine?
28
+ private var pipeline: AudioPipeline?
29
+
30
+ init(eventSender: PipelineEventSender, sharedEngine: SharedAudioEngine) {
31
+ self.eventSender = eventSender
32
+ self.sharedEngine = sharedEngine
33
+ }
34
+
35
+ // ════════════════════════════════════════════════════════════════════
36
+ // Bridge methods
37
+ // ════════════════════════════════════════════════════════════════════
38
+
39
+ /// Connect the pipeline. Creates a new AudioPipeline with the given options.
40
+ ///
41
+ /// Options:
42
+ /// - `sampleRate` (Int, default 24000)
43
+ /// - `channelCount` (Int, default 1)
44
+ /// - `targetBufferMs` (Int, default 80)
45
+ ///
46
+ /// Returns a dictionary with the resolved config on success.
47
+ func connect(options: [String: Any]) throws -> [String: Any] {
48
+ // Tear down any existing pipeline first
49
+ pipeline?.disconnect()
50
+
51
+ guard let sharedEngine = sharedEngine else {
52
+ throw NSError(domain: "PipelineIntegration", code: -1,
53
+ userInfo: [NSLocalizedDescriptionKey: "SharedAudioEngine not set"])
54
+ }
55
+
56
+ let sampleRate = (options["sampleRate"] as? NSNumber)?.intValue ?? 24000
57
+ let channelCount = (options["channelCount"] as? NSNumber)?.intValue ?? 1
58
+ let targetBufferMs = (options["targetBufferMs"] as? NSNumber)?.intValue ?? 80
59
+
60
+ let p = AudioPipeline(
61
+ sampleRate: sampleRate,
62
+ channelCount: channelCount,
63
+ targetBufferMs: targetBufferMs,
64
+ sharedEngine: sharedEngine,
65
+ listener: self
66
+ )
67
+ p.connect()
68
+ pipeline = p
69
+
70
+ return [
71
+ "sampleRate": sampleRate,
72
+ "channelCount": channelCount,
73
+ "targetBufferMs": targetBufferMs,
74
+ "frameSizeSamples": p.frameSizeSamples
75
+ ]
76
+ }
77
+
78
+ /// Push base64-encoded PCM audio into the jitter buffer (async path).
79
+ ///
80
+ /// Options:
81
+ /// - `audio` (String) — base64-encoded PCM16 LE data
82
+ /// - `turnId` (String) — conversation turn identifier
83
+ /// - `isFirstChunk` (Boolean, default false)
84
+ /// - `isLastChunk` (Boolean, default false)
85
+ func pushAudio(options: [String: Any]) throws {
86
+ guard let audio = options["audio"] as? String else {
87
+ throw NSError(domain: "PipelineIntegration", code: -1,
88
+ userInfo: [NSLocalizedDescriptionKey: "Missing 'audio' field"])
89
+ }
90
+ guard let turnId = options["turnId"] as? String else {
91
+ throw NSError(domain: "PipelineIntegration", code: -1,
92
+ userInfo: [NSLocalizedDescriptionKey: "Missing 'turnId' field"])
93
+ }
94
+ let isFirstChunk = options["isFirstChunk"] as? Bool ?? false
95
+ let isLastChunk = options["isLastChunk"] as? Bool ?? false
96
+
97
+ guard let p = pipeline else {
98
+ throw NSError(domain: "PipelineIntegration", code: -1,
99
+ userInfo: [NSLocalizedDescriptionKey: "Pipeline not connected"])
100
+ }
101
+ p.pushAudio(base64Audio: audio, turnId: turnId, isFirstChunk: isFirstChunk, isLastChunk: isLastChunk)
102
+ }
103
+
104
+ /// Push base64-encoded PCM audio synchronously (no Promise overhead).
105
+ /// Returns true on success, false on failure.
106
+ func pushAudioSync(options: [String: Any]) -> Bool {
107
+ guard let audio = options["audio"] as? String,
108
+ let turnId = options["turnId"] as? String else {
109
+ return false
110
+ }
111
+ let isFirstChunk = options["isFirstChunk"] as? Bool ?? false
112
+ let isLastChunk = options["isLastChunk"] as? Bool ?? false
113
+
114
+ guard let p = pipeline else { return false }
115
+ p.pushAudio(base64Audio: audio, turnId: turnId, isFirstChunk: isFirstChunk, isLastChunk: isLastChunk)
116
+ return true
117
+ }
118
+
119
+ /// Disconnect the pipeline. Tears down AVAudioEngine, timers, etc.
120
+ func disconnect() {
121
+ pipeline?.disconnect()
122
+ pipeline = nil
123
+ }
124
+
125
+ /// Invalidate the current turn — discards stale audio in the jitter buffer.
126
+ ///
127
+ /// Options:
128
+ /// - `turnId` (String) — the new turn identifier
129
+ func invalidateTurn(options: [String: Any]) throws {
130
+ guard let turnId = options["turnId"] as? String else {
131
+ throw NSError(domain: "PipelineIntegration", code: -1,
132
+ userInfo: [NSLocalizedDescriptionKey: "Missing 'turnId' field"])
133
+ }
134
+ pipeline?.invalidateTurn(newTurnId: turnId)
135
+ }
136
+
137
+ /// Get current pipeline telemetry as a dictionary (returned to JS as a map).
138
+ func getTelemetry() -> [String: Any] {
139
+ return pipeline?.getTelemetry() ?? ["state": PipelineState.idle.rawValue]
140
+ }
141
+
142
+ /// Get current pipeline state string.
143
+ func getState() -> String {
144
+ return pipeline?.getState().rawValue ?? PipelineState.idle.rawValue
145
+ }
146
+
147
+ /// Register the pipeline as a delegate on the shared engine.
148
+ /// Called by the module after connect() so route changes and interruptions
149
+ /// are forwarded to the AudioPipeline instance.
150
+ func setAsActiveDelegate(on engine: SharedAudioEngine) {
151
+ if let p = pipeline {
152
+ engine.addDelegate(p)
153
+ }
154
+ }
155
+
156
+ /// Remove the pipeline delegate from the shared engine.
157
+ /// Called by the module before disconnect so stale callbacks aren't delivered.
158
+ func removeAsDelegate(from engine: SharedAudioEngine) {
159
+ if let p = pipeline {
160
+ engine.removeDelegate(p)
161
+ }
162
+ }
163
+
164
+ /// Destroy the integration — called from module destroy().
165
+ func destroy() {
166
+ if let p = pipeline, let engine = sharedEngine {
167
+ engine.removeDelegate(p)
168
+ }
169
+ pipeline?.disconnect()
170
+ pipeline = nil
171
+ }
172
+
173
+ // ════════════════════════════════════════════════════════════════════
174
+ // PipelineListener implementation → Expo bridge events
175
+ // ════════════════════════════════════════════════════════════════════
176
+
177
+ func onStateChanged(_ state: PipelineState) {
178
+ sendEvent(PipelineIntegration.EVENT_STATE_CHANGED, ["state": state.rawValue])
179
+ }
180
+
181
+ func onPlaybackStarted(turnId: String) {
182
+ sendEvent(PipelineIntegration.EVENT_PLAYBACK_STARTED, ["turnId": turnId])
183
+ }
184
+
185
+ func onError(code: String, message: String) {
186
+ sendEvent(PipelineIntegration.EVENT_ERROR, ["code": code, "message": message])
187
+ }
188
+
189
+ func onZombieDetected(stalledMs: Int64) {
190
+ sendEvent(PipelineIntegration.EVENT_ZOMBIE_DETECTED, ["stalledMs": stalledMs])
191
+ }
192
+
193
+ func onUnderrun(count: Int) {
194
+ sendEvent(PipelineIntegration.EVENT_UNDERRUN, ["count": count])
195
+ }
196
+
197
+ func onDrained(turnId: String) {
198
+ sendEvent(PipelineIntegration.EVENT_DRAINED, ["turnId": turnId])
199
+ }
200
+
201
+ func onAudioFocusLost() {
202
+ sendEvent(PipelineIntegration.EVENT_AUDIO_FOCUS_LOST, [:])
203
+ }
204
+
205
+ func onAudioFocusResumed() {
206
+ sendEvent(PipelineIntegration.EVENT_AUDIO_FOCUS_RESUMED, [:])
207
+ }
208
+
209
+ // ── Helper ────────────────────────────────────────────────────────
210
+
211
+ private func sendEvent(_ eventName: String, _ params: [String: Any]) {
212
+ eventSender?.sendPipelineEvent(eventName, params)
213
+ }
214
+ }
@@ -0,0 +1,10 @@
1
+ // RecordingResult.swift
2
+
3
+ struct StartRecordingResult {
4
+ var fileUri: String?
5
+ var mimeType: String?
6
+ var channels: Int?
7
+ var bitDepth: Int?
8
+ var sampleRate: Double?
9
+ var error: String?
10
+ }
@@ -0,0 +1,11 @@
1
+ // RecordingSettings.swift
2
+
3
+ struct RecordingSettings {
4
+ var sampleRate: Double
5
+ var desiredSampleRate: Double
6
+ var numberOfChannels: Int = 1
7
+ var bitDepth: Int = 16
8
+ var maxRecentDataDuration: Double? = 10.0 // Default to 10 seconds
9
+ var pointsPerSecond: Int? = 1000 // Default value
10
+ }
11
+