@edkimmel/expo-audio-stream 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/NATIVE_EVENTS.md +26 -4
  2. package/README.md +33 -4
  3. package/android/src/main/java/expo/modules/audiostream/AudioRecorderManager.kt +25 -0
  4. package/android/src/main/java/expo/modules/audiostream/ExpoPlayAudioStreamModule.kt +2 -1
  5. package/android/src/main/java/expo/modules/audiostream/FrequencyBandAnalyzer.kt +153 -0
  6. package/android/src/main/java/expo/modules/audiostream/pipeline/AudioPipeline.kt +55 -0
  7. package/android/src/main/java/expo/modules/audiostream/pipeline/PipelineIntegration.kt +16 -0
  8. package/build/events.d.ts +5 -0
  9. package/build/events.d.ts.map +1 -1
  10. package/build/events.js.map +1 -1
  11. package/build/index.d.ts +2 -2
  12. package/build/index.d.ts.map +1 -1
  13. package/build/index.js +4 -2
  14. package/build/index.js.map +1 -1
  15. package/build/pipeline/types.d.ts +9 -1
  16. package/build/pipeline/types.d.ts.map +1 -1
  17. package/build/pipeline/types.js.map +1 -1
  18. package/build/types.d.ts +17 -0
  19. package/build/types.d.ts.map +1 -1
  20. package/build/types.js.map +1 -1
  21. package/docs/superpowers/plans/2026-03-13-frequency-band-analysis.md +1006 -0
  22. package/docs/superpowers/specs/2026-03-13-frequency-band-analysis-design.md +276 -0
  23. package/ios/AudioPipeline.swift +69 -2
  24. package/ios/ExpoPlayAudioStreamModule.swift +19 -3
  25. package/ios/FrequencyBandAnalyzer.swift +135 -0
  26. package/ios/Microphone.swift +29 -4
  27. package/ios/MicrophoneDataDelegate.swift +1 -1
  28. package/ios/PipelineIntegration.swift +14 -0
  29. package/package.json +1 -1
  30. package/src/events.ts +1 -0
  31. package/src/index.ts +6 -1
  32. package/src/pipeline/types.ts +9 -1
  33. package/src/types.ts +19 -0
@@ -26,6 +26,8 @@ class Microphone {
26
26
  private var inittedAudioSession = false
27
27
  private var isRecording: Bool = false
28
28
  private var isSilent: Bool = false
29
+ private var frequencyBandAnalyzer: FrequencyBandAnalyzer?
30
+ private var frequencyBandConfig: (lowCrossoverHz: Float, highCrossoverHz: Float)?
29
31
 
30
32
  init() {
31
33
  NotificationCenter.default.addObserver(
@@ -54,7 +56,7 @@ class Microphone {
54
56
  DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
55
57
  guard let self = self, let settings = self.recordingSettings else { return }
56
58
 
57
- _ = startRecording(settings: self.recordingSettings!, intervalMilliseconds: 100)
59
+ _ = startRecording(settings: self.recordingSettings!, intervalMilliseconds: 100, frequencyBandConfig: self.frequencyBandConfig)
58
60
  }
59
61
  }
60
62
  case .categoryChange:
@@ -69,7 +71,8 @@ class Microphone {
69
71
  self.isSilent = isSilent
70
72
  }
71
73
 
72
- func startRecording(settings: RecordingSettings, intervalMilliseconds: Int) -> StartRecordingResult? {
74
+ func startRecording(settings: RecordingSettings, intervalMilliseconds: Int,
75
+ frequencyBandConfig: (lowCrossoverHz: Float, highCrossoverHz: Float)? = nil) -> StartRecordingResult? {
73
76
  guard !isRecording else {
74
77
  Logger.debug("Debug: Recording is already in progress.")
75
78
  return StartRecordingResult(error: "Recording is already in progress.")
@@ -97,6 +100,16 @@ class Microphone {
97
100
 
98
101
  recordingSettings = newSettings // Update the class property with the new settings
99
102
 
103
+ self.frequencyBandConfig = frequencyBandConfig
104
+ // Analyzer uses the desired (target) sample rate, not hardware rate
105
+ let targetRate = Int(settings.desiredSampleRate ?? settings.sampleRate)
106
+ let fbConfig = frequencyBandConfig ?? (lowCrossoverHz: Float(300), highCrossoverHz: Float(2000))
107
+ frequencyBandAnalyzer = FrequencyBandAnalyzer(
108
+ sampleRate: targetRate,
109
+ lowCrossoverHz: fbConfig.lowCrossoverHz,
110
+ highCrossoverHz: fbConfig.highCrossoverHz
111
+ )
112
+
100
113
  // Compute tap buffer size from interval so Core Audio delivers at the right cadence
101
114
  let intervalSamples = AVAudioFrameCount(
102
115
  Double(intervalMilliseconds) / 1000.0 * hardwareFormat.sampleRate
@@ -151,8 +164,9 @@ class Microphone {
151
164
  if audioEngine != nil {
152
165
  audioEngine.inputNode.removeTap(onBus: 0)
153
166
  audioEngine.stop()
167
+ frequencyBandAnalyzer = nil
154
168
  }
155
-
169
+
156
170
  if let promiseResolver = promise {
157
171
  promiseResolver.resolve(nil)
158
172
  }
@@ -212,10 +226,21 @@ class Microphone {
212
226
  data = Data(bytes: bufferData, count: Int(audioData.mDataByteSize))
213
227
  }
214
228
 
229
+ // Compute frequency bands from the Int16 PCM data
230
+ let bands: FrequencyBands?
231
+ if isSilent {
232
+ bands = .zero
233
+ } else if let analyzer = frequencyBandAnalyzer {
234
+ analyzer.processSamplesFromData(data)
235
+ bands = analyzer.harvest()
236
+ } else {
237
+ bands = nil
238
+ }
239
+
215
240
  totalDataSize += Int64(data.count)
216
241
 
217
242
  // Emit immediately — tap buffer size is already interval-aligned
218
- self.delegate?.onMicrophoneData(data, powerLevel)
243
+ self.delegate?.onMicrophoneData(data, powerLevel, bands)
219
244
  self.lastEmittedSize = totalDataSize
220
245
  }
221
246
  }
@@ -1,4 +1,4 @@
1
1
  protocol MicrophoneDataDelegate: AnyObject {
2
- func onMicrophoneData(_ microphoneData: Data, _ soundLevel: Float?)
2
+ func onMicrophoneData(_ microphoneData: Data, _ soundLevel: Float?, _ frequencyBands: FrequencyBands?)
3
3
  func onMicrophoneError(_ error: String, _ errorMessage: String)
4
4
  }
@@ -22,6 +22,7 @@ class PipelineIntegration: PipelineListener {
22
22
  static let EVENT_DRAINED = "PipelineDrained"
23
23
  static let EVENT_AUDIO_FOCUS_LOST = "PipelineAudioFocusLost"
24
24
  static let EVENT_AUDIO_FOCUS_RESUMED = "PipelineAudioFocusResumed"
25
+ static let EVENT_FREQUENCY_BANDS = "PipelineFrequencyBands"
25
26
 
26
27
  private weak var eventSender: PipelineEventSender?
27
28
  private weak var sharedEngine: SharedAudioEngine?
@@ -56,11 +57,18 @@ class PipelineIntegration: PipelineListener {
56
57
  let sampleRate = (options["sampleRate"] as? NSNumber)?.intValue ?? 24000
57
58
  let channelCount = (options["channelCount"] as? NSNumber)?.intValue ?? 1
58
59
  let targetBufferMs = (options["targetBufferMs"] as? NSNumber)?.intValue ?? 80
60
+ let frequencyBandIntervalMs = (options["frequencyBandIntervalMs"] as? NSNumber)?.intValue ?? 100
61
+ let bandConfig = options["frequencyBandConfig"] as? [String: Any]
62
+ let lowCrossoverHz = (bandConfig?["lowCrossoverHz"] as? NSNumber)?.floatValue ?? 300
63
+ let highCrossoverHz = (bandConfig?["highCrossoverHz"] as? NSNumber)?.floatValue ?? 2000
59
64
 
60
65
  let p = AudioPipeline(
61
66
  sampleRate: sampleRate,
62
67
  channelCount: channelCount,
63
68
  targetBufferMs: targetBufferMs,
69
+ frequencyBandIntervalMs: frequencyBandIntervalMs,
70
+ lowCrossoverHz: lowCrossoverHz,
71
+ highCrossoverHz: highCrossoverHz,
64
72
  sharedEngine: sharedEngine,
65
73
  listener: self
66
74
  )
@@ -206,6 +214,12 @@ class PipelineIntegration: PipelineListener {
206
214
  sendEvent(PipelineIntegration.EVENT_AUDIO_FOCUS_RESUMED, [:])
207
215
  }
208
216
 
217
+ func onFrequencyBands(low: Float, mid: Float, high: Float) {
218
+ sendEvent(PipelineIntegration.EVENT_FREQUENCY_BANDS, [
219
+ "low": low, "mid": mid, "high": high
220
+ ])
221
+ }
222
+
209
223
  // ── Helper ────────────────────────────────────────────────────────
210
224
 
211
225
  private func sendEvent(_ eventName: String, _ params: [String: Any]) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@edkimmel/expo-audio-stream",
3
- "version": "0.2.0",
3
+ "version": "0.3.0",
4
4
  "description": "Expo Play Audio Stream module",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",
package/src/events.ts CHANGED
@@ -20,6 +20,7 @@ export interface AudioEventPayload {
20
20
  mimeType: string;
21
21
  streamUuid: string;
22
22
  soundLevel?: number;
23
+ frequencyBands?: { low: number; mid: number; high: number };
23
24
  }
24
25
 
25
26
  export type SoundChunkPlayedEventPayload = {
package/src/index.ts CHANGED
@@ -12,6 +12,7 @@ import {
12
12
  PlaybackMode,
13
13
  Encoding,
14
14
  EncodingTypes,
15
+ FrequencyBands,
15
16
  PlaybackModes,
16
17
  // Audio jitter buffer types
17
18
  IAudioBufferConfig,
@@ -132,6 +133,7 @@ export class ExpoPlayAudioStream {
132
133
  position,
133
134
  encoded,
134
135
  soundLevel,
136
+ frequencyBands,
135
137
  } = event;
136
138
  if (!encoded) {
137
139
  console.error(
@@ -146,6 +148,7 @@ export class ExpoPlayAudioStream {
146
148
  eventDataSize: deltaSize,
147
149
  totalSize,
148
150
  soundLevel,
151
+ frequencyBands,
149
152
  });
150
153
  }
151
154
  );
@@ -192,7 +195,7 @@ export class ExpoPlayAudioStream {
192
195
  onMicrophoneStream: (event: AudioDataEvent) => Promise<void>
193
196
  ): Subscription {
194
197
  return addAudioEventListener(async (event: AudioEventPayload) => {
195
- const { fileUri, deltaSize, totalSize, position, encoded, soundLevel } =
198
+ const { fileUri, deltaSize, totalSize, position, encoded, soundLevel, frequencyBands } =
196
199
  event;
197
200
  if (!encoded) {
198
201
  console.error(`[ExpoPlayAudioStream] Encoded audio data is missing`);
@@ -205,6 +208,7 @@ export class ExpoPlayAudioStream {
205
208
  eventDataSize: deltaSize,
206
209
  totalSize,
207
210
  soundLevel,
211
+ frequencyBands,
208
212
  });
209
213
  });
210
214
  }
@@ -316,6 +320,7 @@ export {
316
320
  PlaybackMode,
317
321
  Encoding,
318
322
  EncodingTypes,
323
+ FrequencyBands,
319
324
  PlaybackModes,
320
325
  // Audio jitter buffer types
321
326
  IAudioBufferConfig,
@@ -2,7 +2,7 @@
2
2
  // Native Audio Pipeline — V3 TypeScript Types
3
3
  // ────────────────────────────────────────────────────────────────────────────
4
4
 
5
- import { PlaybackMode } from "../types";
5
+ import { PlaybackMode, FrequencyBandConfig, FrequencyBands } from "../types";
6
6
 
7
7
  // ── Connect ─────────────────────────────────────────────────────────────────
8
8
 
@@ -21,6 +21,10 @@ export interface ConnectPipelineOptions {
21
21
  * Playback mode hint for native optimizations. Affects thread priority and
22
22
  */
23
23
  playbackMode?: PlaybackMode;
24
+ /** Interval in ms for PipelineFrequencyBands events (default 100). */
25
+ frequencyBandIntervalMs?: number;
26
+ /** Optional frequency band crossover configuration. */
27
+ frequencyBandConfig?: FrequencyBandConfig;
24
28
  }
25
29
 
26
30
  /** Result returned from a successful `connectPipeline()` call. */
@@ -116,6 +120,9 @@ export type PipelineAudioFocusLostEvent = Record<string, never>;
116
120
  /** Payload for `PipelineAudioFocusResumed` (empty — presence is the signal). */
117
121
  export type PipelineAudioFocusResumedEvent = Record<string, never>;
118
122
 
123
+ /** Payload for `PipelineFrequencyBands`. */
124
+ export interface PipelineFrequencyBandsEvent extends FrequencyBands {}
125
+
119
126
  /**
120
127
  * Map of all pipeline event names to their payload types.
121
128
  * Used with `Pipeline.subscribe<K>()` for type-safe event subscriptions.
@@ -129,6 +136,7 @@ export interface PipelineEventMap {
129
136
  PipelineDrained: PipelineDrainedEvent;
130
137
  PipelineAudioFocusLost: PipelineAudioFocusLostEvent;
131
138
  PipelineAudioFocusResumed: PipelineAudioFocusResumedEvent;
139
+ PipelineFrequencyBands: PipelineFrequencyBandsEvent;
132
140
  }
133
141
 
134
142
  /** Union of all pipeline event name strings. */
package/src/types.ts CHANGED
@@ -87,6 +87,21 @@ export const EncodingTypes = {
87
87
  export type Encoding =
88
88
  (typeof EncodingTypes)[keyof typeof EncodingTypes];
89
89
 
90
+ /** RMS energy per frequency band, range [0, 1]. */
91
+ export interface FrequencyBands {
92
+ low: number;
93
+ mid: number;
94
+ high: number;
95
+ }
96
+
97
+ /** Crossover frequency configuration for band analysis. */
98
+ export interface FrequencyBandConfig {
99
+ /** Low/mid crossover in Hz (default 300). */
100
+ lowCrossoverHz?: number;
101
+ /** Mid/high crossover in Hz (default 2000). */
102
+ highCrossoverHz?: number;
103
+ }
104
+
90
105
  /**
91
106
  * Smart buffering mode options
92
107
  */
@@ -135,6 +150,8 @@ export interface AudioDataEvent {
135
150
  eventDataSize: number;
136
151
  totalSize: number;
137
152
  soundLevel?: number;
153
+ /** Frequency band RMS energy, present when recording is active. */
154
+ frequencyBands?: FrequencyBands;
138
155
  }
139
156
 
140
157
  export interface RecordingConfig {
@@ -147,6 +164,8 @@ export interface RecordingConfig {
147
164
  enableProcessing?: boolean; // Boolean to enable/disable audio processing (default is false)
148
165
  pointsPerSecond?: number; // Number of data points to extract per second of audio (default is 1000)
149
166
  onAudioStream?: (event: AudioDataEvent) => Promise<void>; // Callback function to handle audio stream
167
+ /** Optional frequency band crossover configuration. */
168
+ frequencyBandConfig?: FrequencyBandConfig;
150
169
  }
151
170
 
152
171
  export interface Chunk {