@idealyst/microphone 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,502 @@
1
+ import type {
2
+ IMicrophone,
3
+ AudioConfig,
4
+ AudioLevel,
5
+ MicrophoneStatus,
6
+ MicrophoneState,
7
+ PermissionResult,
8
+ PermissionStatus,
9
+ PCMData,
10
+ AudioDataCallback,
11
+ AudioLevelCallback,
12
+ StateChangeCallback,
13
+ ErrorCallback,
14
+ MicrophoneError,
15
+ } from './types';
16
+ import {
17
+ DEFAULT_AUDIO_CONFIG,
18
+ DEFAULT_AUDIO_LEVEL,
19
+ DEFAULT_LEVEL_UPDATE_INTERVAL,
20
+ } from './constants';
21
+ import {
22
+ createPCMTypedArray,
23
+ calculateAudioLevels,
24
+ float32ToInt16,
25
+ float32ToInt8,
26
+ createMicrophoneError,
27
+ mergeConfig,
28
+ } from './utils';
29
+ import { checkPermission, requestPermission } from './permissions/permissions.web';
30
+
31
+ // AudioWorklet processor code as a string (will be created as a Blob URL)
32
+ const WORKLET_PROCESSOR_CODE = `
33
+ class PCMProcessor extends AudioWorkletProcessor {
34
+ constructor(options) {
35
+ super();
36
+ this.bufferSize = options.processorOptions?.bufferSize || 4096;
37
+ this.buffer = [];
38
+ }
39
+
40
+ process(inputs) {
41
+ const input = inputs[0];
42
+ if (!input || !input[0]) return true;
43
+
44
+ // Get mono or interleaved data
45
+ const channelData = input[0];
46
+
47
+ // Accumulate samples
48
+ for (let i = 0; i < channelData.length; i++) {
49
+ this.buffer.push(channelData[i]);
50
+ }
51
+
52
+ // When buffer is full, send to main thread
53
+ while (this.buffer.length >= this.bufferSize) {
54
+ const samples = this.buffer.splice(0, this.bufferSize);
55
+ const float32Array = new Float32Array(samples);
56
+ this.port.postMessage({ samples: float32Array }, [float32Array.buffer]);
57
+ }
58
+
59
+ return true;
60
+ }
61
+ }
62
+
63
+ registerProcessor('pcm-processor', PCMProcessor);
64
+ `;
65
+
66
+ export class WebMicrophone implements IMicrophone {
67
+ private audioContext: AudioContext | null = null;
68
+ private mediaStream: MediaStream | null = null;
69
+ private workletNode: AudioWorkletNode | null = null;
70
+ private analyserNode: AnalyserNode | null = null;
71
+ private sourceNode: MediaStreamAudioSourceNode | null = null;
72
+
73
+ private config: AudioConfig = DEFAULT_AUDIO_CONFIG;
74
+ private _status: MicrophoneStatus;
75
+ private peakLevel: number = 0;
76
+ private startTime: number = 0;
77
+ private levelIntervalId: ReturnType<typeof setInterval> | null = null;
78
+
79
+ // Callbacks
80
+ private audioDataCallbacks: Set<AudioDataCallback> = new Set();
81
+ private audioLevelCallbacks: Map<AudioLevelCallback, number> = new Map();
82
+ private stateChangeCallbacks: Set<StateChangeCallback> = new Set();
83
+ private errorCallbacks: Set<ErrorCallback> = new Set();
84
+
85
+ constructor() {
86
+ this._status = this.createInitialStatus();
87
+ }
88
+
89
+ get status(): MicrophoneStatus {
90
+ return { ...this._status };
91
+ }
92
+
93
+ async checkPermission(): Promise<PermissionResult> {
94
+ const result = await checkPermission();
95
+ this.updateStatus({ permission: result.status });
96
+ return result;
97
+ }
98
+
99
+ async requestPermission(): Promise<PermissionResult> {
100
+ const result = await requestPermission();
101
+ this.updateStatus({ permission: result.status });
102
+ return result;
103
+ }
104
+
105
+ async start(config?: Partial<AudioConfig>): Promise<void> {
106
+ if (this._status.state === 'recording') {
107
+ return;
108
+ }
109
+
110
+ this.updateState('starting');
111
+ this.config = mergeConfig(config, DEFAULT_AUDIO_CONFIG);
112
+
113
+ try {
114
+ // Check/request permission
115
+ const permResult = await this.requestPermission();
116
+ if (permResult.status !== 'granted') {
117
+ throw createMicrophoneError(
118
+ permResult.status === 'blocked' ? 'PERMISSION_BLOCKED' : 'PERMISSION_DENIED',
119
+ 'Microphone permission not granted'
120
+ );
121
+ }
122
+
123
+ // Get microphone access
124
+ this.mediaStream = await navigator.mediaDevices.getUserMedia({
125
+ audio: {
126
+ channelCount: this.config.channels,
127
+ sampleRate: this.config.sampleRate,
128
+ echoCancellation: false,
129
+ noiseSuppression: false,
130
+ autoGainControl: false,
131
+ },
132
+ });
133
+
134
+ // Create audio context with specified sample rate
135
+ this.audioContext = new AudioContext({
136
+ sampleRate: this.config.sampleRate,
137
+ });
138
+
139
+ // Create and load AudioWorklet
140
+ const workletBlob = new Blob([WORKLET_PROCESSOR_CODE], {
141
+ type: 'application/javascript',
142
+ });
143
+ const workletUrl = URL.createObjectURL(workletBlob);
144
+
145
+ await this.audioContext.audioWorklet.addModule(workletUrl);
146
+ URL.revokeObjectURL(workletUrl);
147
+
148
+ // Create worklet node
149
+ this.workletNode = new AudioWorkletNode(
150
+ this.audioContext,
151
+ 'pcm-processor',
152
+ {
153
+ processorOptions: {
154
+ bufferSize: this.config.bufferSize,
155
+ },
156
+ }
157
+ );
158
+
159
+ // Create source from microphone stream
160
+ this.sourceNode = this.audioContext.createMediaStreamSource(
161
+ this.mediaStream
162
+ );
163
+
164
+ // Create analyser for level metering
165
+ this.analyserNode = this.audioContext.createAnalyser();
166
+ this.analyserNode.fftSize = 256;
167
+
168
+ // Connect nodes: source -> worklet (for PCM data)
169
+ // source -> analyser (for level metering)
170
+ this.sourceNode.connect(this.workletNode);
171
+ this.sourceNode.connect(this.analyserNode);
172
+
173
+ // Handle audio data from worklet
174
+ this.workletNode.port.onmessage = (event) => {
175
+ const float32Samples: Float32Array = event.data.samples;
176
+ this.handleAudioData(float32Samples);
177
+ };
178
+
179
+ // Start level metering
180
+ this.startLevelMetering();
181
+
182
+ // Update state
183
+ this.startTime = Date.now();
184
+ this.peakLevel = 0;
185
+ this.updateState('recording');
186
+ this.updateStatus({ config: this.config });
187
+ } catch (error) {
188
+ this.handleError(error);
189
+ throw error;
190
+ }
191
+ }
192
+
193
+ async stop(): Promise<void> {
194
+ if (this._status.state === 'idle' || this._status.state === 'stopping') {
195
+ return;
196
+ }
197
+
198
+ this.updateState('stopping');
199
+ this.cleanup();
200
+ this.updateState('idle');
201
+ }
202
+
203
+ async pause(): Promise<void> {
204
+ // Web doesn't support true pause - just stop
205
+ // Users should stop and restart
206
+ await this.stop();
207
+ }
208
+
209
+ async resume(): Promise<void> {
210
+ // Web doesn't support resume - need to start fresh
211
+ if (this._status.state === 'idle') {
212
+ await this.start(this.config);
213
+ }
214
+ }
215
+
216
+ onAudioData(callback: AudioDataCallback): () => void {
217
+ this.audioDataCallbacks.add(callback);
218
+ return () => {
219
+ this.audioDataCallbacks.delete(callback);
220
+ };
221
+ }
222
+
223
+ onAudioLevel(
224
+ callback: AudioLevelCallback,
225
+ intervalMs: number = DEFAULT_LEVEL_UPDATE_INTERVAL
226
+ ): () => void {
227
+ this.audioLevelCallbacks.set(callback, intervalMs);
228
+ return () => {
229
+ this.audioLevelCallbacks.delete(callback);
230
+ };
231
+ }
232
+
233
+ onStateChange(callback: StateChangeCallback): () => void {
234
+ this.stateChangeCallbacks.add(callback);
235
+ return () => {
236
+ this.stateChangeCallbacks.delete(callback);
237
+ };
238
+ }
239
+
240
+ onError(callback: ErrorCallback): () => void {
241
+ this.errorCallbacks.add(callback);
242
+ return () => {
243
+ this.errorCallbacks.delete(callback);
244
+ };
245
+ }
246
+
247
+ resetPeakLevel(): void {
248
+ this.peakLevel = 0;
249
+ }
250
+
251
+ dispose(): void {
252
+ this.cleanup();
253
+ this.audioDataCallbacks.clear();
254
+ this.audioLevelCallbacks.clear();
255
+ this.stateChangeCallbacks.clear();
256
+ this.errorCallbacks.clear();
257
+ }
258
+
259
+ // Private methods
260
+
261
+ private createInitialStatus(): MicrophoneStatus {
262
+ return {
263
+ state: 'idle',
264
+ permission: 'undetermined',
265
+ isRecording: false,
266
+ duration: 0,
267
+ level: DEFAULT_AUDIO_LEVEL,
268
+ config: DEFAULT_AUDIO_CONFIG,
269
+ };
270
+ }
271
+
272
+ private handleAudioData(float32Samples: Float32Array): void {
273
+ // Convert to appropriate bit depth
274
+ let buffer: ArrayBuffer;
275
+ let samples: Int8Array | Int16Array | Float32Array;
276
+
277
+ switch (this.config.bitDepth) {
278
+ case 8:
279
+ samples = float32ToInt8(float32Samples);
280
+ buffer = samples.buffer;
281
+ break;
282
+ case 16:
283
+ samples = float32ToInt16(float32Samples);
284
+ buffer = samples.buffer;
285
+ break;
286
+ case 32:
287
+ default:
288
+ samples = float32Samples;
289
+ buffer = float32Samples.buffer.slice(0);
290
+ break;
291
+ }
292
+
293
+ const pcmData: PCMData = {
294
+ buffer,
295
+ samples,
296
+ timestamp: Date.now(),
297
+ config: this.config,
298
+ };
299
+
300
+ // Notify all audio data listeners
301
+ this.audioDataCallbacks.forEach((callback) => {
302
+ try {
303
+ callback(pcmData);
304
+ } catch (e) {
305
+ console.error('Error in audio data callback:', e);
306
+ }
307
+ });
308
+
309
+ // Update levels from samples
310
+ const level = calculateAudioLevels(samples, this.config.bitDepth, this.peakLevel);
311
+ this.peakLevel = level.peak;
312
+ this.updateStatus({ level });
313
+ }
314
+
315
+ private startLevelMetering(): void {
316
+ if (this.levelIntervalId) {
317
+ clearInterval(this.levelIntervalId);
318
+ }
319
+
320
+ // Use the smallest interval from all registered callbacks
321
+ let minInterval = DEFAULT_LEVEL_UPDATE_INTERVAL;
322
+ this.audioLevelCallbacks.forEach((interval) => {
323
+ if (interval < minInterval) {
324
+ minInterval = interval;
325
+ }
326
+ });
327
+
328
+ this.levelIntervalId = setInterval(() => {
329
+ if (!this.analyserNode) return;
330
+
331
+ const dataArray = new Uint8Array(this.analyserNode.frequencyBinCount);
332
+ this.analyserNode.getByteTimeDomainData(dataArray);
333
+
334
+ // Calculate level from time-domain data
335
+ let sum = 0;
336
+ let current = 0;
337
+ for (let i = 0; i < dataArray.length; i++) {
338
+ const normalized = Math.abs(dataArray[i] - 128) / 128;
339
+ sum += normalized * normalized;
340
+ if (normalized > current) {
341
+ current = normalized;
342
+ }
343
+ }
344
+
345
+ const rms = Math.sqrt(sum / dataArray.length);
346
+ const db = rms > 0 ? 20 * Math.log10(rms) : -Infinity;
347
+
348
+ if (current > this.peakLevel) {
349
+ this.peakLevel = current;
350
+ }
351
+
352
+ const level: AudioLevel = {
353
+ current,
354
+ peak: this.peakLevel,
355
+ rms,
356
+ db,
357
+ };
358
+
359
+ // Notify level callbacks
360
+ this.audioLevelCallbacks.forEach((_, callback) => {
361
+ try {
362
+ callback(level);
363
+ } catch (e) {
364
+ console.error('Error in audio level callback:', e);
365
+ }
366
+ });
367
+
368
+ // Update duration
369
+ if (this._status.state === 'recording') {
370
+ this.updateStatus({
371
+ duration: Date.now() - this.startTime,
372
+ level,
373
+ });
374
+ }
375
+ }, minInterval);
376
+ }
377
+
378
+ private cleanup(): void {
379
+ if (this.levelIntervalId) {
380
+ clearInterval(this.levelIntervalId);
381
+ this.levelIntervalId = null;
382
+ }
383
+
384
+ if (this.workletNode) {
385
+ this.workletNode.port.onmessage = null;
386
+ this.workletNode.disconnect();
387
+ this.workletNode = null;
388
+ }
389
+
390
+ if (this.analyserNode) {
391
+ this.analyserNode.disconnect();
392
+ this.analyserNode = null;
393
+ }
394
+
395
+ if (this.sourceNode) {
396
+ this.sourceNode.disconnect();
397
+ this.sourceNode = null;
398
+ }
399
+
400
+ if (this.mediaStream) {
401
+ this.mediaStream.getTracks().forEach((track) => track.stop());
402
+ this.mediaStream = null;
403
+ }
404
+
405
+ if (this.audioContext) {
406
+ this.audioContext.close().catch(() => {});
407
+ this.audioContext = null;
408
+ }
409
+ }
410
+
411
+ private updateState(state: MicrophoneState): void {
412
+ this.updateStatus({
413
+ state,
414
+ isRecording: state === 'recording',
415
+ });
416
+ }
417
+
418
+ private updateStatus(partial: Partial<MicrophoneStatus>): void {
419
+ this._status = { ...this._status, ...partial };
420
+
421
+ // Notify state change listeners
422
+ this.stateChangeCallbacks.forEach((callback) => {
423
+ try {
424
+ callback(this._status);
425
+ } catch (e) {
426
+ console.error('Error in state change callback:', e);
427
+ }
428
+ });
429
+ }
430
+
431
+ private handleError(error: unknown): void {
432
+ let micError: MicrophoneError;
433
+
434
+ if (error && typeof error === 'object' && 'code' in error) {
435
+ micError = error as MicrophoneError;
436
+ } else if (error instanceof Error) {
437
+ micError = this.mapWebError(error);
438
+ } else {
439
+ micError = createMicrophoneError(
440
+ 'UNKNOWN',
441
+ String(error),
442
+ error instanceof Error ? error : undefined
443
+ );
444
+ }
445
+
446
+ this.updateStatus({ state: 'error', error: micError });
447
+ this.cleanup();
448
+
449
+ // Notify error listeners
450
+ this.errorCallbacks.forEach((callback) => {
451
+ try {
452
+ callback(micError);
453
+ } catch (e) {
454
+ console.error('Error in error callback:', e);
455
+ }
456
+ });
457
+ }
458
+
459
+ private mapWebError(error: Error): MicrophoneError {
460
+ switch (error.name) {
461
+ case 'NotAllowedError':
462
+ case 'PermissionDeniedError':
463
+ return createMicrophoneError(
464
+ 'PERMISSION_DENIED',
465
+ 'Microphone access denied',
466
+ error
467
+ );
468
+ case 'NotFoundError':
469
+ return createMicrophoneError(
470
+ 'DEVICE_NOT_FOUND',
471
+ 'No microphone found',
472
+ error
473
+ );
474
+ case 'NotReadableError':
475
+ case 'TrackStartError':
476
+ return createMicrophoneError(
477
+ 'DEVICE_IN_USE',
478
+ 'Microphone is in use by another application',
479
+ error
480
+ );
481
+ case 'OverconstrainedError':
482
+ return createMicrophoneError(
483
+ 'INVALID_CONFIG',
484
+ 'Audio configuration not supported',
485
+ error
486
+ );
487
+ default:
488
+ return createMicrophoneError(
489
+ 'INITIALIZATION_FAILED',
490
+ error.message,
491
+ error
492
+ );
493
+ }
494
+ }
495
+ }
496
+
497
+ /**
498
+ * Create a new WebMicrophone instance.
499
+ */
500
+ export function createMicrophone(): IMicrophone {
501
+ return new WebMicrophone();
502
+ }
@@ -0,0 +1 @@
1
+ export { checkPermission, requestPermission } from './permissions.native';
@@ -0,0 +1 @@
1
+ export { checkPermission, requestPermission } from './permissions.web';
@@ -0,0 +1 @@
1
+ export { checkPermission, requestPermission } from './permissions.web';
@@ -0,0 +1,112 @@
1
+ import { Platform, PermissionsAndroid } from 'react-native';
2
+ import type { PermissionResult, PermissionStatus } from '../types';
3
+
4
+ /**
5
+ * Check microphone permission status on React Native.
6
+ */
7
+ export async function checkPermission(): Promise<PermissionResult> {
8
+ if (Platform.OS === 'android') {
9
+ return checkAndroidPermission();
10
+ } else if (Platform.OS === 'ios') {
11
+ return checkIOSPermission();
12
+ }
13
+
14
+ return { status: 'unavailable', canAskAgain: false };
15
+ }
16
+
17
+ /**
18
+ * Request microphone permission on React Native.
19
+ */
20
+ export async function requestPermission(): Promise<PermissionResult> {
21
+ if (Platform.OS === 'android') {
22
+ return requestAndroidPermission();
23
+ } else if (Platform.OS === 'ios') {
24
+ return requestIOSPermission();
25
+ }
26
+
27
+ return { status: 'unavailable', canAskAgain: false };
28
+ }
29
+
30
+ /**
31
+ * Check Android microphone permission.
32
+ */
33
+ async function checkAndroidPermission(): Promise<PermissionResult> {
34
+ try {
35
+ const granted = await PermissionsAndroid.check(
36
+ PermissionsAndroid.PERMISSIONS.RECORD_AUDIO
37
+ );
38
+
39
+ if (granted) {
40
+ return { status: 'granted', canAskAgain: true };
41
+ }
42
+
43
+ // Permission not granted - we don't know if it's undetermined or blocked
44
+ // without actually requesting
45
+ return { status: 'undetermined', canAskAgain: true };
46
+ } catch {
47
+ return { status: 'unavailable', canAskAgain: false };
48
+ }
49
+ }
50
+
51
+ /**
52
+ * Request Android microphone permission.
53
+ */
54
+ async function requestAndroidPermission(): Promise<PermissionResult> {
55
+ try {
56
+ const result = await PermissionsAndroid.request(
57
+ PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
58
+ {
59
+ title: 'Microphone Permission',
60
+ message: 'This app needs access to your microphone to record audio.',
61
+ buttonNeutral: 'Ask Me Later',
62
+ buttonNegative: 'Cancel',
63
+ buttonPositive: 'OK',
64
+ }
65
+ );
66
+
67
+ return mapAndroidResult(result);
68
+ } catch {
69
+ return { status: 'unavailable', canAskAgain: false };
70
+ }
71
+ }
72
+
73
+ /**
74
+ * Map Android permission result to our PermissionResult type.
75
+ */
76
+ function mapAndroidResult(
77
+ result: (typeof PermissionsAndroid.RESULTS)[keyof typeof PermissionsAndroid.RESULTS]
78
+ ): PermissionResult {
79
+ switch (result) {
80
+ case PermissionsAndroid.RESULTS.GRANTED:
81
+ return { status: 'granted', canAskAgain: true };
82
+ case PermissionsAndroid.RESULTS.DENIED:
83
+ return { status: 'denied', canAskAgain: true };
84
+ case PermissionsAndroid.RESULTS.NEVER_ASK_AGAIN:
85
+ return { status: 'blocked', canAskAgain: false };
86
+ default:
87
+ return { status: 'undetermined', canAskAgain: true };
88
+ }
89
+ }
90
+
91
+ /**
92
+ * Check iOS microphone permission.
93
+ * On iOS, permission is checked automatically when the audio session starts.
94
+ * We can't pre-check without actually requesting.
95
+ */
96
+ async function checkIOSPermission(): Promise<PermissionResult> {
97
+ // iOS doesn't have a way to check permission status without requesting
98
+ // The permission is requested when starting the audio session
99
+ return { status: 'undetermined', canAskAgain: true };
100
+ }
101
+
102
+ /**
103
+ * Request iOS microphone permission.
104
+ * On iOS, the permission dialog is shown automatically when starting
105
+ * the audio stream. This function serves as a placeholder.
106
+ */
107
+ async function requestIOSPermission(): Promise<PermissionResult> {
108
+ // iOS permission is requested automatically when the audio stream starts
109
+ // The native module handles this internally
110
+ // We return undetermined here - the actual status will be known after start()
111
+ return { status: 'undetermined', canAskAgain: true };
112
+ }
@@ -0,0 +1,78 @@
1
+ import type { PermissionResult, PermissionStatus } from '../types';
2
+
3
+ /**
4
+ * Check microphone permission status on web.
5
+ */
6
+ export async function checkPermission(): Promise<PermissionResult> {
7
+ // Check if permissions API is available
8
+ if (!navigator.permissions) {
9
+ // Fallback: we can't check, so assume undetermined
10
+ return { status: 'undetermined', canAskAgain: true };
11
+ }
12
+
13
+ try {
14
+ const result = await navigator.permissions.query({
15
+ name: 'microphone' as PermissionName,
16
+ });
17
+
18
+ const status = mapPermissionState(result.state);
19
+ return {
20
+ status,
21
+ canAskAgain: status !== 'denied', // On web, denied can still be asked again (browser UI)
22
+ };
23
+ } catch {
24
+ // Safari doesn't support querying microphone permission
25
+ return { status: 'undetermined', canAskAgain: true };
26
+ }
27
+ }
28
+
29
+ /**
30
+ * Request microphone permission on web.
31
+ * This triggers the browser's permission dialog.
32
+ */
33
+ export async function requestPermission(): Promise<PermissionResult> {
34
+ // Check if getUserMedia is available
35
+ if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
36
+ return { status: 'unavailable', canAskAgain: false };
37
+ }
38
+
39
+ try {
40
+ // Request microphone access - this triggers the permission prompt
41
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
42
+
43
+ // Stop all tracks immediately - we just needed to trigger the permission
44
+ stream.getTracks().forEach((track) => track.stop());
45
+
46
+ return { status: 'granted', canAskAgain: true };
47
+ } catch (error) {
48
+ if (error instanceof Error) {
49
+ if (
50
+ error.name === 'NotAllowedError' ||
51
+ error.name === 'PermissionDeniedError'
52
+ ) {
53
+ return { status: 'denied', canAskAgain: true };
54
+ }
55
+ if (error.name === 'NotFoundError') {
56
+ return { status: 'unavailable', canAskAgain: false };
57
+ }
58
+ }
59
+ return { status: 'denied', canAskAgain: true };
60
+ }
61
+ }
62
+
63
+ /**
64
+ * Map browser permission state to our PermissionStatus type.
65
+ */
66
+ function mapPermissionState(
67
+ state: PermissionState
68
+ ): PermissionStatus {
69
+ switch (state) {
70
+ case 'granted':
71
+ return 'granted';
72
+ case 'denied':
73
+ return 'denied';
74
+ case 'prompt':
75
+ default:
76
+ return 'undetermined';
77
+ }
78
+ }