@idealyst/audio 1.2.48
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +69 -0
- package/src/constants.ts +161 -0
- package/src/context/AudioContext.native.ts +84 -0
- package/src/context/AudioContext.web.ts +97 -0
- package/src/context/index.native.ts +1 -0
- package/src/context/index.ts +1 -0
- package/src/hooks/index.ts +3 -0
- package/src/hooks/useAudio.ts +129 -0
- package/src/hooks/usePlayer.ts +247 -0
- package/src/hooks/useRecorder.ts +176 -0
- package/src/index.native.ts +114 -0
- package/src/index.ts +114 -0
- package/src/index.web.ts +8 -0
- package/src/playback/Player.native.ts +517 -0
- package/src/playback/Player.web.ts +518 -0
- package/src/playback/index.native.ts +1 -0
- package/src/playback/index.ts +1 -0
- package/src/recording/Recorder.native.ts +330 -0
- package/src/recording/Recorder.web.ts +399 -0
- package/src/recording/index.native.ts +1 -0
- package/src/recording/index.ts +1 -0
- package/src/session/AudioSession.native.ts +204 -0
- package/src/session/AudioSession.web.ts +69 -0
- package/src/session/index.native.ts +5 -0
- package/src/session/index.ts +1 -0
- package/src/types.ts +470 -0
- package/src/utils.ts +379 -0
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Native Recorder
|
|
3
|
+
*
|
|
4
|
+
* Uses react-native-audio-api for recording on React Native.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import {
|
|
8
|
+
AudioContext as RNAudioContext,
|
|
9
|
+
AudioManager,
|
|
10
|
+
} from 'react-native-audio-api';
|
|
11
|
+
import type {
|
|
12
|
+
IRecorder,
|
|
13
|
+
RecorderStatus,
|
|
14
|
+
RecorderState,
|
|
15
|
+
PermissionStatus,
|
|
16
|
+
AudioConfig,
|
|
17
|
+
AudioLevel,
|
|
18
|
+
PCMData,
|
|
19
|
+
RecorderDataCallback,
|
|
20
|
+
RecorderLevelCallback,
|
|
21
|
+
RecorderStateCallback,
|
|
22
|
+
IAudioContext,
|
|
23
|
+
} from '../types';
|
|
24
|
+
import {
|
|
25
|
+
DEFAULT_AUDIO_CONFIG,
|
|
26
|
+
DEFAULT_AUDIO_LEVEL,
|
|
27
|
+
DEFAULT_RECORDER_STATUS,
|
|
28
|
+
DEFAULT_LEVEL_UPDATE_INTERVAL,
|
|
29
|
+
DEFAULT_BUFFER_SIZE,
|
|
30
|
+
} from '../constants';
|
|
31
|
+
import {
|
|
32
|
+
createAudioError,
|
|
33
|
+
calculateAudioLevels,
|
|
34
|
+
mergeConfig,
|
|
35
|
+
arrayBufferToBase64,
|
|
36
|
+
float32ToInt16,
|
|
37
|
+
} from '../utils';
|
|
38
|
+
|
|
39
|
+
export class NativeRecorder implements IRecorder {
|
|
40
|
+
private audioContext: IAudioContext;
|
|
41
|
+
private mediaStreamNode: any = null;
|
|
42
|
+
private scriptProcessorNode: any = null;
|
|
43
|
+
|
|
44
|
+
private config: AudioConfig = DEFAULT_AUDIO_CONFIG;
|
|
45
|
+
private _status: RecorderStatus = { ...DEFAULT_RECORDER_STATUS };
|
|
46
|
+
private peakLevel: number = 0;
|
|
47
|
+
private startTime: number = 0;
|
|
48
|
+
private levelIntervalId: ReturnType<typeof setInterval> | null = null;
|
|
49
|
+
private lastLevel: AudioLevel = DEFAULT_AUDIO_LEVEL;
|
|
50
|
+
|
|
51
|
+
// Callbacks
|
|
52
|
+
private dataCallbacks: Set<RecorderDataCallback> = new Set();
|
|
53
|
+
private levelCallbacks: Map<RecorderLevelCallback, number> = new Map();
|
|
54
|
+
private stateCallbacks: Set<RecorderStateCallback> = new Set();
|
|
55
|
+
|
|
56
|
+
constructor(audioContext: IAudioContext) {
|
|
57
|
+
this.audioContext = audioContext;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
get status(): RecorderStatus {
|
|
61
|
+
return { ...this._status };
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async checkPermission(): Promise<PermissionStatus> {
|
|
65
|
+
try {
|
|
66
|
+
const granted = await AudioManager.checkRecordingPermissions();
|
|
67
|
+
const status = granted ? 'granted' : 'undetermined';
|
|
68
|
+
this.updateStatus({ permission: status });
|
|
69
|
+
return status;
|
|
70
|
+
} catch {
|
|
71
|
+
return 'undetermined';
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async requestPermission(): Promise<PermissionStatus> {
|
|
76
|
+
try {
|
|
77
|
+
const granted = await AudioManager.requestRecordingPermissions();
|
|
78
|
+
const status = granted ? 'granted' : 'denied';
|
|
79
|
+
this.updateStatus({ permission: status });
|
|
80
|
+
return status;
|
|
81
|
+
} catch (error: any) {
|
|
82
|
+
this.updateStatus({ permission: 'denied' });
|
|
83
|
+
return 'denied';
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async start(configOverride?: Partial<AudioConfig>): Promise<void> {
|
|
88
|
+
if (this._status.state === 'recording') {
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
this.updateState('requesting_permission');
|
|
93
|
+
|
|
94
|
+
try {
|
|
95
|
+
// Request microphone permission
|
|
96
|
+
const permission = await this.requestPermission();
|
|
97
|
+
if (permission !== 'granted') {
|
|
98
|
+
throw createAudioError('PERMISSION_DENIED', 'Microphone permission denied');
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
this.updateState('starting');
|
|
102
|
+
this.config = mergeConfig(configOverride, DEFAULT_AUDIO_CONFIG);
|
|
103
|
+
|
|
104
|
+
// Ensure audio context is initialized
|
|
105
|
+
if (!this.audioContext.isInitialized) {
|
|
106
|
+
await this.audioContext.initialize();
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
const ctx = this.audioContext.getContext() as RNAudioContext;
|
|
110
|
+
if (!ctx) {
|
|
111
|
+
throw createAudioError('INITIALIZATION_FAILED', 'AudioContext not available');
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Create media stream source for microphone input
|
|
115
|
+
// react-native-audio-api provides this through createMediaStreamSource
|
|
116
|
+
this.mediaStreamNode = (ctx as any).createMediaStreamSource();
|
|
117
|
+
|
|
118
|
+
// Create script processor for capturing audio data
|
|
119
|
+
// Note: ScriptProcessorNode is deprecated but may be what's available in react-native-audio-api
|
|
120
|
+
const bufferSize = DEFAULT_BUFFER_SIZE;
|
|
121
|
+
this.scriptProcessorNode = (ctx as any).createScriptProcessor(
|
|
122
|
+
bufferSize,
|
|
123
|
+
this.config.channels,
|
|
124
|
+
this.config.channels
|
|
125
|
+
);
|
|
126
|
+
|
|
127
|
+
// Handle audio data
|
|
128
|
+
this.scriptProcessorNode.onaudioprocess = (event: any) => {
|
|
129
|
+
if (this._status.state !== 'recording') return;
|
|
130
|
+
|
|
131
|
+
const inputData = event.inputBuffer.getChannelData(0);
|
|
132
|
+
this.handleAudioData(new Float32Array(inputData));
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
// Connect nodes
|
|
136
|
+
this.mediaStreamNode.connect(this.scriptProcessorNode);
|
|
137
|
+
this.scriptProcessorNode.connect(ctx.destination);
|
|
138
|
+
|
|
139
|
+
// Start level metering
|
|
140
|
+
this.startLevelMetering();
|
|
141
|
+
|
|
142
|
+
// Update state
|
|
143
|
+
this.startTime = Date.now();
|
|
144
|
+
this.peakLevel = 0;
|
|
145
|
+
this.updateState('recording');
|
|
146
|
+
this.updateStatus({ config: this.config });
|
|
147
|
+
} catch (error: any) {
|
|
148
|
+
this.handleError(error);
|
|
149
|
+
throw error;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
async stop(): Promise<void> {
|
|
154
|
+
if (this._status.state === 'idle' || this._status.state === 'stopping') {
|
|
155
|
+
return;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
this.updateState('stopping');
|
|
159
|
+
this.cleanup();
|
|
160
|
+
this.updateState('idle');
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
async pause(): Promise<void> {
|
|
164
|
+
if (this._status.state !== 'recording') {
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Disconnect the processor to pause
|
|
169
|
+
if (this.scriptProcessorNode) {
|
|
170
|
+
this.scriptProcessorNode.disconnect();
|
|
171
|
+
}
|
|
172
|
+
this.updateState('paused');
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async resume(): Promise<void> {
|
|
176
|
+
if (this._status.state !== 'paused') {
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
const ctx = this.audioContext.getContext() as RNAudioContext;
|
|
181
|
+
if (this.scriptProcessorNode && ctx) {
|
|
182
|
+
this.scriptProcessorNode.connect(ctx.destination);
|
|
183
|
+
}
|
|
184
|
+
this.updateState('recording');
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
onData(callback: RecorderDataCallback): () => void {
|
|
188
|
+
this.dataCallbacks.add(callback);
|
|
189
|
+
return () => {
|
|
190
|
+
this.dataCallbacks.delete(callback);
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
onLevel(callback: RecorderLevelCallback, intervalMs = DEFAULT_LEVEL_UPDATE_INTERVAL): () => void {
|
|
195
|
+
this.levelCallbacks.set(callback, intervalMs);
|
|
196
|
+
return () => {
|
|
197
|
+
this.levelCallbacks.delete(callback);
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
onStateChange(callback: RecorderStateCallback): () => void {
|
|
202
|
+
this.stateCallbacks.add(callback);
|
|
203
|
+
return () => {
|
|
204
|
+
this.stateCallbacks.delete(callback);
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
resetPeakLevel(): void {
|
|
209
|
+
this.peakLevel = 0;
|
|
210
|
+
this.lastLevel = { ...this.lastLevel, peak: 0 };
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
dispose(): void {
|
|
214
|
+
this.cleanup();
|
|
215
|
+
this.dataCallbacks.clear();
|
|
216
|
+
this.levelCallbacks.clear();
|
|
217
|
+
this.stateCallbacks.clear();
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// Private methods
|
|
221
|
+
|
|
222
|
+
private handleAudioData(float32Samples: Float32Array): void {
|
|
223
|
+
// Convert to Int16 for PCM data
|
|
224
|
+
const int16Samples = float32ToInt16(float32Samples);
|
|
225
|
+
const buffer = int16Samples.buffer;
|
|
226
|
+
|
|
227
|
+
// Calculate levels
|
|
228
|
+
const level = calculateAudioLevels(int16Samples, 16, this.peakLevel);
|
|
229
|
+
this.peakLevel = level.peak;
|
|
230
|
+
this.lastLevel = level;
|
|
231
|
+
|
|
232
|
+
// Create PCM data object
|
|
233
|
+
const pcmData: PCMData = {
|
|
234
|
+
buffer,
|
|
235
|
+
samples: int16Samples,
|
|
236
|
+
timestamp: Date.now(),
|
|
237
|
+
config: this.config,
|
|
238
|
+
toBase64: () => arrayBufferToBase64(buffer),
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
// Notify callbacks
|
|
242
|
+
this.dataCallbacks.forEach((callback) => {
|
|
243
|
+
try {
|
|
244
|
+
callback(pcmData);
|
|
245
|
+
} catch (e) {
|
|
246
|
+
console.error('Error in data callback:', e);
|
|
247
|
+
}
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
private startLevelMetering(): void {
|
|
252
|
+
if (this.levelIntervalId) {
|
|
253
|
+
clearInterval(this.levelIntervalId);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
let minInterval = DEFAULT_LEVEL_UPDATE_INTERVAL;
|
|
257
|
+
this.levelCallbacks.forEach((interval) => {
|
|
258
|
+
if (interval < minInterval) minInterval = interval;
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
this.levelIntervalId = setInterval(() => {
|
|
262
|
+
this.levelCallbacks.forEach((_, callback) => {
|
|
263
|
+
try {
|
|
264
|
+
callback(this.lastLevel);
|
|
265
|
+
} catch (e) {
|
|
266
|
+
console.error('Error in level callback:', e);
|
|
267
|
+
}
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
if (this._status.state === 'recording') {
|
|
271
|
+
this.updateStatus({
|
|
272
|
+
duration: Date.now() - this.startTime,
|
|
273
|
+
level: this.lastLevel,
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
}, minInterval);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
private cleanup(): void {
|
|
280
|
+
if (this.levelIntervalId) {
|
|
281
|
+
clearInterval(this.levelIntervalId);
|
|
282
|
+
this.levelIntervalId = null;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (this.scriptProcessorNode) {
|
|
286
|
+
this.scriptProcessorNode.disconnect();
|
|
287
|
+
this.scriptProcessorNode = null;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
if (this.mediaStreamNode) {
|
|
291
|
+
this.mediaStreamNode.disconnect();
|
|
292
|
+
this.mediaStreamNode = null;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
private updateState(state: RecorderState): void {
|
|
297
|
+
this.updateStatus({
|
|
298
|
+
state,
|
|
299
|
+
isRecording: state === 'recording',
|
|
300
|
+
isPaused: state === 'paused',
|
|
301
|
+
});
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
private updateStatus(partial: Partial<RecorderStatus>): void {
|
|
305
|
+
this._status = { ...this._status, ...partial };
|
|
306
|
+
|
|
307
|
+
this.stateCallbacks.forEach((callback) => {
|
|
308
|
+
try {
|
|
309
|
+
callback(this._status);
|
|
310
|
+
} catch (e) {
|
|
311
|
+
console.error('Error in state callback:', e);
|
|
312
|
+
}
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
private handleError(error: any): void {
|
|
317
|
+
const audioError = createAudioError(
|
|
318
|
+
error.code || 'UNKNOWN',
|
|
319
|
+
error.message || String(error),
|
|
320
|
+
error instanceof Error ? error : undefined
|
|
321
|
+
);
|
|
322
|
+
|
|
323
|
+
this.updateStatus({ state: 'error', error: audioError });
|
|
324
|
+
this.cleanup();
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
export function createRecorder(audioContext: IAudioContext): IRecorder {
|
|
329
|
+
return new NativeRecorder(audioContext);
|
|
330
|
+
}
|
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Web Recorder
|
|
3
|
+
*
|
|
4
|
+
* Uses Web Audio API with AudioWorklet for recording on web.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type {
|
|
8
|
+
IRecorder,
|
|
9
|
+
RecorderStatus,
|
|
10
|
+
RecorderState,
|
|
11
|
+
PermissionStatus,
|
|
12
|
+
AudioConfig,
|
|
13
|
+
AudioLevel,
|
|
14
|
+
PCMData,
|
|
15
|
+
RecorderDataCallback,
|
|
16
|
+
RecorderLevelCallback,
|
|
17
|
+
RecorderStateCallback,
|
|
18
|
+
IAudioContext,
|
|
19
|
+
} from '../types';
|
|
20
|
+
import {
|
|
21
|
+
DEFAULT_AUDIO_CONFIG,
|
|
22
|
+
DEFAULT_AUDIO_LEVEL,
|
|
23
|
+
DEFAULT_RECORDER_STATUS,
|
|
24
|
+
DEFAULT_LEVEL_UPDATE_INTERVAL,
|
|
25
|
+
} from '../constants';
|
|
26
|
+
import {
|
|
27
|
+
createAudioError,
|
|
28
|
+
calculateAudioLevels,
|
|
29
|
+
mergeConfig,
|
|
30
|
+
arrayBufferToBase64,
|
|
31
|
+
float32ToInt16,
|
|
32
|
+
} from '../utils';
|
|
33
|
+
|
|
34
|
+
// AudioWorklet processor code as a string (will be loaded as blob URL)
|
|
35
|
+
const PROCESSOR_CODE = `
|
|
36
|
+
class RecorderProcessor extends AudioWorkletProcessor {
|
|
37
|
+
constructor() {
|
|
38
|
+
super();
|
|
39
|
+
this.bufferSize = 4096;
|
|
40
|
+
this.buffer = [];
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
process(inputs, outputs, parameters) {
|
|
44
|
+
const input = inputs[0];
|
|
45
|
+
if (input && input.length > 0) {
|
|
46
|
+
const channelData = input[0];
|
|
47
|
+
|
|
48
|
+
// Copy samples to buffer
|
|
49
|
+
for (let i = 0; i < channelData.length; i++) {
|
|
50
|
+
this.buffer.push(channelData[i]);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Send buffer when full
|
|
54
|
+
if (this.buffer.length >= this.bufferSize) {
|
|
55
|
+
const samples = new Float32Array(this.buffer.splice(0, this.bufferSize));
|
|
56
|
+
this.port.postMessage({ samples });
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return true;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
registerProcessor('recorder-processor', RecorderProcessor);
|
|
64
|
+
`;
|
|
65
|
+
|
|
66
|
+
export class WebRecorder implements IRecorder {
|
|
67
|
+
private audioContext: IAudioContext;
|
|
68
|
+
private mediaStream: MediaStream | null = null;
|
|
69
|
+
private sourceNode: MediaStreamAudioSourceNode | null = null;
|
|
70
|
+
private workletNode: AudioWorkletNode | null = null;
|
|
71
|
+
private processorUrl: string | null = null;
|
|
72
|
+
|
|
73
|
+
private config: AudioConfig = DEFAULT_AUDIO_CONFIG;
|
|
74
|
+
private _status: RecorderStatus = { ...DEFAULT_RECORDER_STATUS };
|
|
75
|
+
private peakLevel: number = 0;
|
|
76
|
+
private startTime: number = 0;
|
|
77
|
+
private levelIntervalId: ReturnType<typeof setInterval> | null = null;
|
|
78
|
+
private lastLevel: AudioLevel = DEFAULT_AUDIO_LEVEL;
|
|
79
|
+
|
|
80
|
+
// Callbacks
|
|
81
|
+
private dataCallbacks: Set<RecorderDataCallback> = new Set();
|
|
82
|
+
private levelCallbacks: Map<RecorderLevelCallback, number> = new Map();
|
|
83
|
+
private stateCallbacks: Set<RecorderStateCallback> = new Set();
|
|
84
|
+
|
|
85
|
+
constructor(audioContext: IAudioContext) {
|
|
86
|
+
this.audioContext = audioContext;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
get status(): RecorderStatus {
|
|
90
|
+
return { ...this._status };
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async checkPermission(): Promise<PermissionStatus> {
|
|
94
|
+
try {
|
|
95
|
+
const result = await navigator.permissions.query({ name: 'microphone' as PermissionName });
|
|
96
|
+
const status = this.mapPermissionState(result.state);
|
|
97
|
+
this.updateStatus({ permission: status });
|
|
98
|
+
return status;
|
|
99
|
+
} catch {
|
|
100
|
+
// Firefox doesn't support permissions.query for microphone
|
|
101
|
+
return 'undetermined';
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async requestPermission(): Promise<PermissionStatus> {
|
|
106
|
+
try {
|
|
107
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
108
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
109
|
+
this.updateStatus({ permission: 'granted' });
|
|
110
|
+
return 'granted';
|
|
111
|
+
} catch (error: any) {
|
|
112
|
+
if (error.name === 'NotAllowedError') {
|
|
113
|
+
this.updateStatus({ permission: 'denied' });
|
|
114
|
+
return 'denied';
|
|
115
|
+
}
|
|
116
|
+
this.updateStatus({ permission: 'denied' });
|
|
117
|
+
return 'denied';
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async start(configOverride?: Partial<AudioConfig>): Promise<void> {
|
|
122
|
+
if (this._status.state === 'recording') {
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
this.updateState('requesting_permission');
|
|
127
|
+
|
|
128
|
+
try {
|
|
129
|
+
// Request microphone access
|
|
130
|
+
const permission = await this.requestPermission();
|
|
131
|
+
if (permission !== 'granted') {
|
|
132
|
+
throw createAudioError('PERMISSION_DENIED', 'Microphone permission denied');
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
this.updateState('starting');
|
|
136
|
+
this.config = mergeConfig(configOverride, DEFAULT_AUDIO_CONFIG);
|
|
137
|
+
|
|
138
|
+
// Ensure audio context is initialized
|
|
139
|
+
if (!this.audioContext.isInitialized) {
|
|
140
|
+
await this.audioContext.initialize();
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
const ctx = this.audioContext.getContext() as AudioContext;
|
|
144
|
+
if (!ctx) {
|
|
145
|
+
throw createAudioError('INITIALIZATION_FAILED', 'AudioContext not available');
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Get microphone stream
|
|
149
|
+
this.mediaStream = await navigator.mediaDevices.getUserMedia({
|
|
150
|
+
audio: {
|
|
151
|
+
sampleRate: this.config.sampleRate,
|
|
152
|
+
channelCount: this.config.channels,
|
|
153
|
+
echoCancellation: true,
|
|
154
|
+
noiseSuppression: true,
|
|
155
|
+
},
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
// Create source node
|
|
159
|
+
this.sourceNode = ctx.createMediaStreamSource(this.mediaStream);
|
|
160
|
+
|
|
161
|
+
// Create and load AudioWorklet processor
|
|
162
|
+
if (!this.processorUrl) {
|
|
163
|
+
const blob = new Blob([PROCESSOR_CODE], { type: 'application/javascript' });
|
|
164
|
+
this.processorUrl = URL.createObjectURL(blob);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
try {
|
|
168
|
+
await ctx.audioWorklet.addModule(this.processorUrl);
|
|
169
|
+
} catch {
|
|
170
|
+
// Module might already be loaded
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Create worklet node
|
|
174
|
+
this.workletNode = new AudioWorkletNode(ctx, 'recorder-processor');
|
|
175
|
+
|
|
176
|
+
// Handle audio data from worklet
|
|
177
|
+
this.workletNode.port.onmessage = (event) => {
|
|
178
|
+
this.handleAudioData(event.data.samples);
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
// Connect nodes
|
|
182
|
+
this.sourceNode.connect(this.workletNode);
|
|
183
|
+
// Don't connect to destination (we don't want to hear ourselves)
|
|
184
|
+
|
|
185
|
+
// Start level metering
|
|
186
|
+
this.startLevelMetering();
|
|
187
|
+
|
|
188
|
+
// Update state
|
|
189
|
+
this.startTime = Date.now();
|
|
190
|
+
this.peakLevel = 0;
|
|
191
|
+
this.updateState('recording');
|
|
192
|
+
this.updateStatus({ config: this.config });
|
|
193
|
+
} catch (error: any) {
|
|
194
|
+
this.handleError(error);
|
|
195
|
+
throw error;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async stop(): Promise<void> {
|
|
200
|
+
if (this._status.state === 'idle' || this._status.state === 'stopping') {
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
this.updateState('stopping');
|
|
205
|
+
this.cleanup();
|
|
206
|
+
this.updateState('idle');
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async pause(): Promise<void> {
|
|
210
|
+
if (this._status.state !== 'recording') {
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Web Audio doesn't have native pause, we just stop processing
|
|
215
|
+
if (this.workletNode) {
|
|
216
|
+
this.workletNode.disconnect();
|
|
217
|
+
}
|
|
218
|
+
this.updateState('paused');
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
async resume(): Promise<void> {
|
|
222
|
+
if (this._status.state !== 'paused') {
|
|
223
|
+
return;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Reconnect the worklet
|
|
227
|
+
if (this.sourceNode && this.workletNode) {
|
|
228
|
+
this.sourceNode.connect(this.workletNode);
|
|
229
|
+
}
|
|
230
|
+
this.updateState('recording');
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
onData(callback: RecorderDataCallback): () => void {
|
|
234
|
+
this.dataCallbacks.add(callback);
|
|
235
|
+
return () => {
|
|
236
|
+
this.dataCallbacks.delete(callback);
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
onLevel(callback: RecorderLevelCallback, intervalMs = DEFAULT_LEVEL_UPDATE_INTERVAL): () => void {
|
|
241
|
+
this.levelCallbacks.set(callback, intervalMs);
|
|
242
|
+
return () => {
|
|
243
|
+
this.levelCallbacks.delete(callback);
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
onStateChange(callback: RecorderStateCallback): () => void {
|
|
248
|
+
this.stateCallbacks.add(callback);
|
|
249
|
+
return () => {
|
|
250
|
+
this.stateCallbacks.delete(callback);
|
|
251
|
+
};
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
resetPeakLevel(): void {
|
|
255
|
+
this.peakLevel = 0;
|
|
256
|
+
this.lastLevel = { ...this.lastLevel, peak: 0 };
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
dispose(): void {
|
|
260
|
+
this.cleanup();
|
|
261
|
+
this.dataCallbacks.clear();
|
|
262
|
+
this.levelCallbacks.clear();
|
|
263
|
+
this.stateCallbacks.clear();
|
|
264
|
+
|
|
265
|
+
if (this.processorUrl) {
|
|
266
|
+
URL.revokeObjectURL(this.processorUrl);
|
|
267
|
+
this.processorUrl = null;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// Private methods
|
|
272
|
+
|
|
273
|
+
private mapPermissionState(state: PermissionState): PermissionStatus {
|
|
274
|
+
switch (state) {
|
|
275
|
+
case 'granted':
|
|
276
|
+
return 'granted';
|
|
277
|
+
case 'denied':
|
|
278
|
+
return 'denied';
|
|
279
|
+
default:
|
|
280
|
+
return 'undetermined';
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
private handleAudioData(float32Samples: Float32Array): void {
|
|
285
|
+
// Convert to Int16 for PCM data
|
|
286
|
+
const int16Samples = float32ToInt16(float32Samples);
|
|
287
|
+
const buffer = int16Samples.buffer;
|
|
288
|
+
|
|
289
|
+
// Calculate levels
|
|
290
|
+
const level = calculateAudioLevels(int16Samples, 16, this.peakLevel);
|
|
291
|
+
this.peakLevel = level.peak;
|
|
292
|
+
this.lastLevel = level;
|
|
293
|
+
|
|
294
|
+
// Create PCM data object
|
|
295
|
+
const pcmData: PCMData = {
|
|
296
|
+
buffer,
|
|
297
|
+
samples: int16Samples,
|
|
298
|
+
timestamp: Date.now(),
|
|
299
|
+
config: this.config,
|
|
300
|
+
toBase64: () => arrayBufferToBase64(buffer),
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
// Notify callbacks
|
|
304
|
+
this.dataCallbacks.forEach((callback) => {
|
|
305
|
+
try {
|
|
306
|
+
callback(pcmData);
|
|
307
|
+
} catch (e) {
|
|
308
|
+
console.error('Error in data callback:', e);
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
private startLevelMetering(): void {
|
|
314
|
+
if (this.levelIntervalId) {
|
|
315
|
+
clearInterval(this.levelIntervalId);
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
let minInterval = DEFAULT_LEVEL_UPDATE_INTERVAL;
|
|
319
|
+
this.levelCallbacks.forEach((interval) => {
|
|
320
|
+
if (interval < minInterval) minInterval = interval;
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
this.levelIntervalId = setInterval(() => {
|
|
324
|
+
// Notify level callbacks
|
|
325
|
+
this.levelCallbacks.forEach((_, callback) => {
|
|
326
|
+
try {
|
|
327
|
+
callback(this.lastLevel);
|
|
328
|
+
} catch (e) {
|
|
329
|
+
console.error('Error in level callback:', e);
|
|
330
|
+
}
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
// Update duration
|
|
334
|
+
if (this._status.state === 'recording') {
|
|
335
|
+
this.updateStatus({
|
|
336
|
+
duration: Date.now() - this.startTime,
|
|
337
|
+
level: this.lastLevel,
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
}, minInterval);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
private cleanup(): void {
|
|
344
|
+
if (this.levelIntervalId) {
|
|
345
|
+
clearInterval(this.levelIntervalId);
|
|
346
|
+
this.levelIntervalId = null;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
if (this.workletNode) {
|
|
350
|
+
this.workletNode.disconnect();
|
|
351
|
+
this.workletNode = null;
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
if (this.sourceNode) {
|
|
355
|
+
this.sourceNode.disconnect();
|
|
356
|
+
this.sourceNode = null;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
if (this.mediaStream) {
|
|
360
|
+
this.mediaStream.getTracks().forEach((track) => track.stop());
|
|
361
|
+
this.mediaStream = null;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
private updateState(state: RecorderState): void {
|
|
366
|
+
this.updateStatus({
|
|
367
|
+
state,
|
|
368
|
+
isRecording: state === 'recording',
|
|
369
|
+
isPaused: state === 'paused',
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
private updateStatus(partial: Partial<RecorderStatus>): void {
|
|
374
|
+
this._status = { ...this._status, ...partial };
|
|
375
|
+
|
|
376
|
+
this.stateCallbacks.forEach((callback) => {
|
|
377
|
+
try {
|
|
378
|
+
callback(this._status);
|
|
379
|
+
} catch (e) {
|
|
380
|
+
console.error('Error in state callback:', e);
|
|
381
|
+
}
|
|
382
|
+
});
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
private handleError(error: any): void {
|
|
386
|
+
const audioError = createAudioError(
|
|
387
|
+
error.code || 'UNKNOWN',
|
|
388
|
+
error.message || String(error),
|
|
389
|
+
error instanceof Error ? error : undefined
|
|
390
|
+
);
|
|
391
|
+
|
|
392
|
+
this.updateStatus({ state: 'error', error: audioError });
|
|
393
|
+
this.cleanup();
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
export function createRecorder(audioContext: IAudioContext): IRecorder {
|
|
398
|
+
return new WebRecorder(audioContext);
|
|
399
|
+
}
|