@idealyst/microphone 1.1.6 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +10 -1
- package/src/examples/MicrophoneExamples.tsx +454 -0
- package/src/examples/index.ts +1 -0
- package/src/index.native.ts +1 -0
- package/src/index.ts +1 -0
- package/src/index.web.ts +1 -0
- package/src/microphone.native.ts +18 -1
- package/src/microphone.web.ts +41 -18
- package/src/types.ts +21 -0
- package/src/utils.ts +83 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@idealyst/microphone",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.8",
|
|
4
4
|
"description": "Cross-platform microphone streaming for React and React Native",
|
|
5
5
|
"documentation": "https://github.com/IdealystIO/idealyst-framework/tree/main/packages/microphone#readme",
|
|
6
6
|
"readme": "README.md",
|
|
@@ -25,6 +25,11 @@
|
|
|
25
25
|
"import": "./src/index.ts",
|
|
26
26
|
"require": "./src/index.ts",
|
|
27
27
|
"types": "./src/index.ts"
|
|
28
|
+
},
|
|
29
|
+
"./examples": {
|
|
30
|
+
"import": "./src/examples/index.ts",
|
|
31
|
+
"require": "./src/examples/index.ts",
|
|
32
|
+
"types": "./src/examples/index.ts"
|
|
28
33
|
}
|
|
29
34
|
},
|
|
30
35
|
"scripts": {
|
|
@@ -32,11 +37,15 @@
|
|
|
32
37
|
"publish:npm": "npm publish"
|
|
33
38
|
},
|
|
34
39
|
"peerDependencies": {
|
|
40
|
+
"@idealyst/components": "^1.1.8",
|
|
35
41
|
"react": ">=16.8.0",
|
|
36
42
|
"react-native": ">=0.60.0",
|
|
37
43
|
"react-native-live-audio-stream": ">=1.1.0"
|
|
38
44
|
},
|
|
39
45
|
"peerDependenciesMeta": {
|
|
46
|
+
"@idealyst/components": {
|
|
47
|
+
"optional": true
|
|
48
|
+
},
|
|
40
49
|
"react-native": {
|
|
41
50
|
"optional": true
|
|
42
51
|
},
|
|
@@ -0,0 +1,454 @@
|
|
|
1
|
+
import React, { useEffect, useState, useRef, useCallback } from 'react';
|
|
2
|
+
import { Screen, Text, View, Button, Card } from '@idealyst/components';
|
|
3
|
+
import { useMicrophone, useRecorder } from '../hooks/index.web';
|
|
4
|
+
import { AUDIO_PROFILES } from '../constants';
|
|
5
|
+
import type { PCMData } from '../types';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Audio level visualization bar
|
|
9
|
+
*/
|
|
10
|
+
const LevelMeter = ({ level, label }: { level: number; label: string }) => {
|
|
11
|
+
const percentage = Math.min(100, Math.max(0, level * 100));
|
|
12
|
+
const color = percentage > 80 ? '#ef4444' : percentage > 50 ? '#f59e0b' : '#22c55e';
|
|
13
|
+
|
|
14
|
+
return (
|
|
15
|
+
<View style={{ marginBottom: 8 }}>
|
|
16
|
+
<Text size="sm" style={{ marginBottom: 4 }}>
|
|
17
|
+
{label}: {percentage.toFixed(1)}%
|
|
18
|
+
</Text>
|
|
19
|
+
<View
|
|
20
|
+
style={{
|
|
21
|
+
height: 20,
|
|
22
|
+
backgroundColor: '#e5e7eb',
|
|
23
|
+
borderRadius: 4,
|
|
24
|
+
overflow: 'hidden',
|
|
25
|
+
}}
|
|
26
|
+
>
|
|
27
|
+
<View
|
|
28
|
+
style={{
|
|
29
|
+
height: '100%',
|
|
30
|
+
width: `${percentage}%`,
|
|
31
|
+
backgroundColor: color,
|
|
32
|
+
transition: 'width 50ms ease-out',
|
|
33
|
+
}}
|
|
34
|
+
/>
|
|
35
|
+
</View>
|
|
36
|
+
</View>
|
|
37
|
+
);
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Simple waveform visualization
|
|
42
|
+
*/
|
|
43
|
+
const Waveform = ({ samples }: { samples: number[] }) => {
|
|
44
|
+
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
45
|
+
|
|
46
|
+
useEffect(() => {
|
|
47
|
+
const canvas = canvasRef.current;
|
|
48
|
+
if (!canvas || samples.length === 0) return;
|
|
49
|
+
|
|
50
|
+
const ctx = canvas.getContext('2d');
|
|
51
|
+
if (!ctx) return;
|
|
52
|
+
|
|
53
|
+
const width = canvas.width;
|
|
54
|
+
const height = canvas.height;
|
|
55
|
+
const centerY = height / 2;
|
|
56
|
+
|
|
57
|
+
// Clear canvas
|
|
58
|
+
ctx.fillStyle = '#f3f4f6';
|
|
59
|
+
ctx.fillRect(0, 0, width, height);
|
|
60
|
+
|
|
61
|
+
// Draw center line
|
|
62
|
+
ctx.strokeStyle = '#d1d5db';
|
|
63
|
+
ctx.lineWidth = 1;
|
|
64
|
+
ctx.beginPath();
|
|
65
|
+
ctx.moveTo(0, centerY);
|
|
66
|
+
ctx.lineTo(width, centerY);
|
|
67
|
+
ctx.stroke();
|
|
68
|
+
|
|
69
|
+
// Draw waveform
|
|
70
|
+
ctx.strokeStyle = '#3b82f6';
|
|
71
|
+
ctx.lineWidth = 2;
|
|
72
|
+
ctx.beginPath();
|
|
73
|
+
|
|
74
|
+
const step = Math.ceil(samples.length / width);
|
|
75
|
+
for (let i = 0; i < width; i++) {
|
|
76
|
+
const sampleIndex = i * step;
|
|
77
|
+
const sample = samples[Math.min(sampleIndex, samples.length - 1)] || 0;
|
|
78
|
+
const y = centerY + sample * centerY * 0.9;
|
|
79
|
+
|
|
80
|
+
if (i === 0) {
|
|
81
|
+
ctx.moveTo(i, y);
|
|
82
|
+
} else {
|
|
83
|
+
ctx.lineTo(i, y);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
ctx.stroke();
|
|
87
|
+
}, [samples]);
|
|
88
|
+
|
|
89
|
+
return (
|
|
90
|
+
<canvas
|
|
91
|
+
ref={canvasRef}
|
|
92
|
+
width={600}
|
|
93
|
+
height={150}
|
|
94
|
+
style={{
|
|
95
|
+
width: '100%',
|
|
96
|
+
maxWidth: 600,
|
|
97
|
+
height: 150,
|
|
98
|
+
borderRadius: 8,
|
|
99
|
+
border: '1px solid #e5e7eb',
|
|
100
|
+
}}
|
|
101
|
+
/>
|
|
102
|
+
);
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Audio config selector
|
|
107
|
+
*/
|
|
108
|
+
const ConfigSelector = ({
|
|
109
|
+
selectedProfile,
|
|
110
|
+
onSelect,
|
|
111
|
+
}: {
|
|
112
|
+
selectedProfile: string;
|
|
113
|
+
onSelect: (profile: string) => void;
|
|
114
|
+
}) => {
|
|
115
|
+
const profiles = [
|
|
116
|
+
{ key: 'speech', label: 'Speech (16kHz)', desc: 'Optimized for voice recognition' },
|
|
117
|
+
{ key: 'highQuality', label: 'High Quality (44.1kHz)', desc: 'Music and high-fidelity audio' },
|
|
118
|
+
{ key: 'lowLatency', label: 'Low Latency', desc: 'Real-time feedback' },
|
|
119
|
+
{ key: 'minimal', label: 'Minimal (8kHz)', desc: 'Low bandwidth voice' },
|
|
120
|
+
];
|
|
121
|
+
|
|
122
|
+
return (
|
|
123
|
+
<View style={{ gap: 8 }}>
|
|
124
|
+
<Text size="md" weight="semibold">
|
|
125
|
+
Audio Profile
|
|
126
|
+
</Text>
|
|
127
|
+
<View style={{ flexDirection: 'row', flexWrap: 'wrap', gap: 8 }}>
|
|
128
|
+
{profiles.map((profile) => (
|
|
129
|
+
<Button
|
|
130
|
+
key={profile.key}
|
|
131
|
+
variant={selectedProfile === profile.key ? 'solid' : 'outline'}
|
|
132
|
+
size="sm"
|
|
133
|
+
onPress={() => onSelect(profile.key)}
|
|
134
|
+
>
|
|
135
|
+
{profile.label}
|
|
136
|
+
</Button>
|
|
137
|
+
))}
|
|
138
|
+
</View>
|
|
139
|
+
</View>
|
|
140
|
+
);
|
|
141
|
+
};
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Microphone Examples - Demonstrates @idealyst/microphone package usage
|
|
145
|
+
*/
|
|
146
|
+
export const MicrophoneExamples = () => {
|
|
147
|
+
const [selectedProfile, setSelectedProfile] = useState<keyof typeof AUDIO_PROFILES>('speech');
|
|
148
|
+
const [waveformSamples, setWaveformSamples] = useState<number[]>([]);
|
|
149
|
+
const [sampleCount, setSampleCount] = useState(0);
|
|
150
|
+
|
|
151
|
+
// Get the audio config for the selected profile
|
|
152
|
+
const config = AUDIO_PROFILES[selectedProfile];
|
|
153
|
+
|
|
154
|
+
// Microphone hook for streaming
|
|
155
|
+
const {
|
|
156
|
+
isRecording,
|
|
157
|
+
level,
|
|
158
|
+
permission,
|
|
159
|
+
error,
|
|
160
|
+
status,
|
|
161
|
+
start,
|
|
162
|
+
stop,
|
|
163
|
+
requestPermission,
|
|
164
|
+
resetPeakLevel,
|
|
165
|
+
subscribeToAudioData,
|
|
166
|
+
} = useMicrophone({
|
|
167
|
+
config,
|
|
168
|
+
levelUpdateInterval: 50,
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
// Recorder hook for file recording
|
|
172
|
+
const {
|
|
173
|
+
isRecording: isFileRecording,
|
|
174
|
+
duration: recordingDuration,
|
|
175
|
+
startRecording,
|
|
176
|
+
stopRecording,
|
|
177
|
+
} = useRecorder();
|
|
178
|
+
|
|
179
|
+
const [lastRecording, setLastRecording] = useState<{
|
|
180
|
+
uri: string;
|
|
181
|
+
duration: number;
|
|
182
|
+
size: number;
|
|
183
|
+
} | null>(null);
|
|
184
|
+
|
|
185
|
+
// Subscribe to audio data for waveform visualization
|
|
186
|
+
useEffect(() => {
|
|
187
|
+
if (!isRecording) {
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
const unsubscribe = subscribeToAudioData((pcmData: PCMData) => {
|
|
192
|
+
// Update sample count
|
|
193
|
+
setSampleCount((prev) => prev + pcmData.samples.length);
|
|
194
|
+
|
|
195
|
+
// Convert samples to normalized float array for visualization
|
|
196
|
+
const normalizedSamples: number[] = [];
|
|
197
|
+
const maxValue = config.bitDepth === 8 ? 128 : config.bitDepth === 16 ? 32768 : 1;
|
|
198
|
+
|
|
199
|
+
// Take every Nth sample to reduce data
|
|
200
|
+
const step = Math.max(1, Math.floor(pcmData.samples.length / 200));
|
|
201
|
+
for (let i = 0; i < pcmData.samples.length; i += step) {
|
|
202
|
+
normalizedSamples.push(pcmData.samples[i] / maxValue);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Keep last 600 samples for display
|
|
206
|
+
setWaveformSamples((prev) => {
|
|
207
|
+
const combined = [...prev, ...normalizedSamples];
|
|
208
|
+
return combined.slice(-600);
|
|
209
|
+
});
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
return unsubscribe;
|
|
213
|
+
}, [isRecording, subscribeToAudioData, config.bitDepth]);
|
|
214
|
+
|
|
215
|
+
// Handle streaming start/stop
|
|
216
|
+
const handleToggleStream = useCallback(async () => {
|
|
217
|
+
if (isRecording) {
|
|
218
|
+
await stop();
|
|
219
|
+
setWaveformSamples([]);
|
|
220
|
+
setSampleCount(0);
|
|
221
|
+
} else {
|
|
222
|
+
setWaveformSamples([]);
|
|
223
|
+
setSampleCount(0);
|
|
224
|
+
await start();
|
|
225
|
+
}
|
|
226
|
+
}, [isRecording, start, stop]);
|
|
227
|
+
|
|
228
|
+
// Handle file recording start/stop
|
|
229
|
+
const handleToggleRecording = useCallback(async () => {
|
|
230
|
+
if (isFileRecording) {
|
|
231
|
+
const result = await stopRecording();
|
|
232
|
+
setLastRecording({
|
|
233
|
+
uri: result.uri,
|
|
234
|
+
duration: result.duration,
|
|
235
|
+
size: result.size,
|
|
236
|
+
});
|
|
237
|
+
} else {
|
|
238
|
+
setLastRecording(null);
|
|
239
|
+
await startRecording({
|
|
240
|
+
format: 'wav',
|
|
241
|
+
audioConfig: config,
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
}, [isFileRecording, startRecording, stopRecording, config]);
|
|
245
|
+
|
|
246
|
+
// Format duration
|
|
247
|
+
const formatDuration = (ms: number) => {
|
|
248
|
+
const seconds = Math.floor(ms / 1000);
|
|
249
|
+
const minutes = Math.floor(seconds / 60);
|
|
250
|
+
const remainingSeconds = seconds % 60;
|
|
251
|
+
return `${minutes}:${remainingSeconds.toString().padStart(2, '0')}`;
|
|
252
|
+
};
|
|
253
|
+
|
|
254
|
+
// Format file size
|
|
255
|
+
const formatSize = (bytes: number) => {
|
|
256
|
+
if (bytes < 1024) return `${bytes} B`;
|
|
257
|
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
258
|
+
return `${(bytes / (1024 * 1024)).toFixed(2)} MB`;
|
|
259
|
+
};
|
|
260
|
+
|
|
261
|
+
return (
|
|
262
|
+
<Screen scroll>
|
|
263
|
+
<View spacing="lg" style={{ padding: 16 }}>
|
|
264
|
+
<Text size="xl" weight="bold">
|
|
265
|
+
Microphone
|
|
266
|
+
</Text>
|
|
267
|
+
<Text size="md" color="secondary">
|
|
268
|
+
Cross-platform microphone access with streaming audio and file recording.
|
|
269
|
+
</Text>
|
|
270
|
+
|
|
271
|
+
{/* Permission Status */}
|
|
272
|
+
<Card>
|
|
273
|
+
<View spacing="md" style={{ padding: 16 }}>
|
|
274
|
+
<Text size="md" weight="semibold">
|
|
275
|
+
Permission Status
|
|
276
|
+
</Text>
|
|
277
|
+
<View style={{ flexDirection: 'row', alignItems: 'center', gap: 12 }}>
|
|
278
|
+
<View
|
|
279
|
+
style={{
|
|
280
|
+
width: 12,
|
|
281
|
+
height: 12,
|
|
282
|
+
borderRadius: 6,
|
|
283
|
+
backgroundColor:
|
|
284
|
+
permission === 'granted'
|
|
285
|
+
? '#22c55e'
|
|
286
|
+
: permission === 'denied' || permission === 'blocked'
|
|
287
|
+
? '#ef4444'
|
|
288
|
+
: '#f59e0b',
|
|
289
|
+
}}
|
|
290
|
+
/>
|
|
291
|
+
<Text size="sm">
|
|
292
|
+
{permission === 'granted'
|
|
293
|
+
? 'Microphone access granted'
|
|
294
|
+
: permission === 'denied'
|
|
295
|
+
? 'Permission denied'
|
|
296
|
+
: permission === 'blocked'
|
|
297
|
+
? 'Permission blocked - enable in browser settings'
|
|
298
|
+
: 'Permission not yet requested'}
|
|
299
|
+
</Text>
|
|
300
|
+
</View>
|
|
301
|
+
{permission !== 'granted' && (
|
|
302
|
+
<Button variant="outline" size="sm" onPress={requestPermission}>
|
|
303
|
+
Request Permission
|
|
304
|
+
</Button>
|
|
305
|
+
)}
|
|
306
|
+
</View>
|
|
307
|
+
</Card>
|
|
308
|
+
|
|
309
|
+
{/* Audio Config */}
|
|
310
|
+
<Card>
|
|
311
|
+
<View spacing="md" style={{ padding: 16 }}>
|
|
312
|
+
<ConfigSelector selectedProfile={selectedProfile} onSelect={(p) => setSelectedProfile(p as keyof typeof AUDIO_PROFILES)} />
|
|
313
|
+
<Text size="sm" color="secondary">
|
|
314
|
+
Sample Rate: {config.sampleRate} Hz | Channels: {config.channels} | Bit Depth: {config.bitDepth}-bit
|
|
315
|
+
</Text>
|
|
316
|
+
</View>
|
|
317
|
+
</Card>
|
|
318
|
+
|
|
319
|
+
{/* Streaming Section */}
|
|
320
|
+
<Card>
|
|
321
|
+
<View spacing="md" style={{ padding: 16 }}>
|
|
322
|
+
<Text size="md" weight="semibold">
|
|
323
|
+
Audio Streaming
|
|
324
|
+
</Text>
|
|
325
|
+
<Text size="sm" color="secondary">
|
|
326
|
+
Stream raw PCM audio data for real-time processing
|
|
327
|
+
</Text>
|
|
328
|
+
|
|
329
|
+
{/* Controls */}
|
|
330
|
+
<View style={{ flexDirection: 'row', gap: 12 }}>
|
|
331
|
+
<Button
|
|
332
|
+
variant={isRecording ? 'destructive' : 'solid'}
|
|
333
|
+
onPress={handleToggleStream}
|
|
334
|
+
disabled={permission !== 'granted' && !isRecording}
|
|
335
|
+
>
|
|
336
|
+
{isRecording ? 'Stop Streaming' : 'Start Streaming'}
|
|
337
|
+
</Button>
|
|
338
|
+
{isRecording && (
|
|
339
|
+
<Button variant="outline" onPress={resetPeakLevel}>
|
|
340
|
+
Reset Peak
|
|
341
|
+
</Button>
|
|
342
|
+
)}
|
|
343
|
+
</View>
|
|
344
|
+
|
|
345
|
+
{/* Level Meters */}
|
|
346
|
+
{isRecording && (
|
|
347
|
+
<View style={{ marginTop: 16 }}>
|
|
348
|
+
<LevelMeter level={level.current} label="Current Level" />
|
|
349
|
+
<LevelMeter level={level.peak} label="Peak Level" />
|
|
350
|
+
<LevelMeter level={level.rms} label="RMS Level" />
|
|
351
|
+
<Text size="sm" color="secondary" style={{ marginTop: 8 }}>
|
|
352
|
+
dB: {level.db === -Infinity ? '-∞' : level.db.toFixed(1)} dB
|
|
353
|
+
</Text>
|
|
354
|
+
</View>
|
|
355
|
+
)}
|
|
356
|
+
|
|
357
|
+
{/* Waveform */}
|
|
358
|
+
{waveformSamples.length > 0 && (
|
|
359
|
+
<View style={{ marginTop: 16 }}>
|
|
360
|
+
<Text size="sm" weight="medium" style={{ marginBottom: 8 }}>
|
|
361
|
+
Waveform
|
|
362
|
+
</Text>
|
|
363
|
+
<Waveform samples={waveformSamples} />
|
|
364
|
+
<Text size="xs" color="secondary" style={{ marginTop: 4 }}>
|
|
365
|
+
Processed {sampleCount.toLocaleString()} samples
|
|
366
|
+
</Text>
|
|
367
|
+
</View>
|
|
368
|
+
)}
|
|
369
|
+
|
|
370
|
+
{/* Status */}
|
|
371
|
+
<View style={{ marginTop: 16 }}>
|
|
372
|
+
<Text size="xs" color="secondary">
|
|
373
|
+
State: {status.state} | Duration: {formatDuration(status.duration)}
|
|
374
|
+
</Text>
|
|
375
|
+
</View>
|
|
376
|
+
</View>
|
|
377
|
+
</Card>
|
|
378
|
+
|
|
379
|
+
{/* File Recording Section */}
|
|
380
|
+
<Card>
|
|
381
|
+
<View spacing="md" style={{ padding: 16 }}>
|
|
382
|
+
<Text size="md" weight="semibold">
|
|
383
|
+
File Recording
|
|
384
|
+
</Text>
|
|
385
|
+
<Text size="sm" color="secondary">
|
|
386
|
+
Record audio to a WAV file
|
|
387
|
+
</Text>
|
|
388
|
+
|
|
389
|
+
{/* Controls */}
|
|
390
|
+
<View style={{ flexDirection: 'row', gap: 12, alignItems: 'center' }}>
|
|
391
|
+
<Button
|
|
392
|
+
variant={isFileRecording ? 'destructive' : 'solid'}
|
|
393
|
+
onPress={handleToggleRecording}
|
|
394
|
+
disabled={permission !== 'granted' && !isFileRecording}
|
|
395
|
+
>
|
|
396
|
+
{isFileRecording ? 'Stop Recording' : 'Start Recording'}
|
|
397
|
+
</Button>
|
|
398
|
+
{isFileRecording && (
|
|
399
|
+
<Text size="md" weight="semibold">
|
|
400
|
+
{formatDuration(recordingDuration)}
|
|
401
|
+
</Text>
|
|
402
|
+
)}
|
|
403
|
+
</View>
|
|
404
|
+
|
|
405
|
+
{/* Last Recording */}
|
|
406
|
+
{lastRecording && (
|
|
407
|
+
<View style={{ marginTop: 16, padding: 12, backgroundColor: '#f3f4f6', borderRadius: 8 }}>
|
|
408
|
+
<Text size="sm" weight="medium">
|
|
409
|
+
Recording Complete
|
|
410
|
+
</Text>
|
|
411
|
+
<Text size="sm" color="secondary">
|
|
412
|
+
Duration: {formatDuration(lastRecording.duration)} | Size: {formatSize(lastRecording.size)}
|
|
413
|
+
</Text>
|
|
414
|
+
<View style={{ marginTop: 8 }}>
|
|
415
|
+
<a
|
|
416
|
+
href={lastRecording.uri}
|
|
417
|
+
download="recording.wav"
|
|
418
|
+
style={{
|
|
419
|
+
display: 'inline-block',
|
|
420
|
+
padding: '8px 16px',
|
|
421
|
+
backgroundColor: '#3b82f6',
|
|
422
|
+
color: 'white',
|
|
423
|
+
borderRadius: 6,
|
|
424
|
+
textDecoration: 'none',
|
|
425
|
+
fontSize: 14,
|
|
426
|
+
}}
|
|
427
|
+
>
|
|
428
|
+
Download WAV
|
|
429
|
+
</a>
|
|
430
|
+
</View>
|
|
431
|
+
</View>
|
|
432
|
+
)}
|
|
433
|
+
</View>
|
|
434
|
+
</Card>
|
|
435
|
+
|
|
436
|
+
{/* Error Display */}
|
|
437
|
+
{error && (
|
|
438
|
+
<Card>
|
|
439
|
+
<View style={{ padding: 16, backgroundColor: '#fef2f2' }}>
|
|
440
|
+
<Text size="sm" weight="semibold" style={{ color: '#dc2626' }}>
|
|
441
|
+
Error: {error.code}
|
|
442
|
+
</Text>
|
|
443
|
+
<Text size="sm" style={{ color: '#dc2626' }}>
|
|
444
|
+
{error.message}
|
|
445
|
+
</Text>
|
|
446
|
+
</View>
|
|
447
|
+
</Card>
|
|
448
|
+
)}
|
|
449
|
+
</View>
|
|
450
|
+
</Screen>
|
|
451
|
+
);
|
|
452
|
+
};
|
|
453
|
+
|
|
454
|
+
export default MicrophoneExamples;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { MicrophoneExamples } from './MicrophoneExamples';
|
package/src/index.native.ts
CHANGED
package/src/index.ts
CHANGED
package/src/index.web.ts
CHANGED
package/src/microphone.native.ts
CHANGED
|
@@ -95,11 +95,14 @@ export class NativeMicrophone implements IMicrophone {
|
|
|
95
95
|
}
|
|
96
96
|
|
|
97
97
|
// Initialize LiveAudioStream
|
|
98
|
+
// audioSource values (Android):
|
|
99
|
+
// 0 = DEFAULT, 1 = MIC, 6 = VOICE_RECOGNITION, 7 = VOICE_COMMUNICATION
|
|
100
|
+
// Using VOICE_COMMUNICATION (7) for speakerphone mode with better gain
|
|
98
101
|
LiveAudioStream.init({
|
|
99
102
|
sampleRate: this.config.sampleRate,
|
|
100
103
|
channels: this.config.channels,
|
|
101
104
|
bitsPerSample: this.config.bitDepth === 32 ? 16 : this.config.bitDepth, // Native doesn't support 32-bit
|
|
102
|
-
audioSource:
|
|
105
|
+
audioSource: 7, // VOICE_COMMUNICATION - speakerphone mode with better gain
|
|
103
106
|
bufferSize: this.config.bufferSize,
|
|
104
107
|
wavFile: '', // Empty string = streaming mode (no file output)
|
|
105
108
|
});
|
|
@@ -244,6 +247,20 @@ export class NativeMicrophone implements IMicrophone {
|
|
|
244
247
|
...this.config,
|
|
245
248
|
bitDepth: effectiveBitDepth, // Actual bit depth used
|
|
246
249
|
},
|
|
250
|
+
async toBlob(mimeType = 'application/octet-stream'): Promise<Blob> {
|
|
251
|
+
// React Native can't create Blob from ArrayBuffer/Uint8Array directly
|
|
252
|
+
// The only reliable way is to use fetch with a data URI
|
|
253
|
+
const dataUri = `data:${mimeType};base64,${base64Data}`;
|
|
254
|
+
const response = await fetch(dataUri);
|
|
255
|
+
return response.blob();
|
|
256
|
+
},
|
|
257
|
+
toBase64(): string {
|
|
258
|
+
// Native already has the base64 data, so just return it
|
|
259
|
+
return base64Data;
|
|
260
|
+
},
|
|
261
|
+
toDataUri(mimeType = 'application/octet-stream'): string {
|
|
262
|
+
return `data:${mimeType};base64,${base64Data}`;
|
|
263
|
+
},
|
|
247
264
|
};
|
|
248
265
|
|
|
249
266
|
// Calculate levels from samples
|
package/src/microphone.web.ts
CHANGED
|
@@ -25,6 +25,7 @@ import {
|
|
|
25
25
|
float32ToInt8,
|
|
26
26
|
createMicrophoneError,
|
|
27
27
|
mergeConfig,
|
|
28
|
+
arrayBufferToBase64,
|
|
28
29
|
} from './utils';
|
|
29
30
|
import { checkPermission, requestPermission } from './permissions/permissions.web';
|
|
30
31
|
|
|
@@ -111,26 +112,32 @@ export class WebMicrophone implements IMicrophone {
|
|
|
111
112
|
this.config = mergeConfig(config, DEFAULT_AUDIO_CONFIG);
|
|
112
113
|
|
|
113
114
|
try {
|
|
114
|
-
//
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
115
|
+
// Get microphone access directly - this will trigger permission prompt if needed
|
|
116
|
+
try {
|
|
117
|
+
this.mediaStream = await navigator.mediaDevices.getUserMedia({
|
|
118
|
+
audio: {
|
|
119
|
+
channelCount: this.config.channels,
|
|
120
|
+
sampleRate: this.config.sampleRate,
|
|
121
|
+
echoCancellation: false,
|
|
122
|
+
noiseSuppression: false,
|
|
123
|
+
autoGainControl: false,
|
|
124
|
+
},
|
|
125
|
+
});
|
|
126
|
+
this.updateStatus({ permission: 'granted' });
|
|
127
|
+
} catch (error) {
|
|
128
|
+
if (error instanceof Error) {
|
|
129
|
+
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
|
|
130
|
+
this.updateStatus({ permission: 'denied' });
|
|
131
|
+
throw createMicrophoneError('PERMISSION_DENIED', 'Microphone permission not granted', error);
|
|
132
|
+
}
|
|
133
|
+
if (error.name === 'NotFoundError') {
|
|
134
|
+
this.updateStatus({ permission: 'unavailable' });
|
|
135
|
+
throw createMicrophoneError('DEVICE_NOT_FOUND', 'No microphone found', error);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
throw error;
|
|
121
139
|
}
|
|
122
140
|
|
|
123
|
-
// Get microphone access
|
|
124
|
-
this.mediaStream = await navigator.mediaDevices.getUserMedia({
|
|
125
|
-
audio: {
|
|
126
|
-
channelCount: this.config.channels,
|
|
127
|
-
sampleRate: this.config.sampleRate,
|
|
128
|
-
echoCancellation: false,
|
|
129
|
-
noiseSuppression: false,
|
|
130
|
-
autoGainControl: false,
|
|
131
|
-
},
|
|
132
|
-
});
|
|
133
|
-
|
|
134
141
|
// Create audio context with specified sample rate
|
|
135
142
|
this.audioContext = new AudioContext({
|
|
136
143
|
sampleRate: this.config.sampleRate,
|
|
@@ -290,11 +297,27 @@ export class WebMicrophone implements IMicrophone {
|
|
|
290
297
|
break;
|
|
291
298
|
}
|
|
292
299
|
|
|
300
|
+
// Cache base64 for lazy conversion
|
|
301
|
+
let cachedBase64: string | null = null;
|
|
302
|
+
|
|
293
303
|
const pcmData: PCMData = {
|
|
294
304
|
buffer,
|
|
295
305
|
samples,
|
|
296
306
|
timestamp: Date.now(),
|
|
297
307
|
config: this.config,
|
|
308
|
+
async toBlob(mimeType = 'application/octet-stream'): Promise<Blob> {
|
|
309
|
+
// Web can create Blob directly from ArrayBuffer
|
|
310
|
+
return new Blob([buffer], { type: mimeType });
|
|
311
|
+
},
|
|
312
|
+
toBase64(): string {
|
|
313
|
+
if (cachedBase64 === null) {
|
|
314
|
+
cachedBase64 = arrayBufferToBase64(buffer);
|
|
315
|
+
}
|
|
316
|
+
return cachedBase64;
|
|
317
|
+
},
|
|
318
|
+
toDataUri(mimeType = 'application/octet-stream'): string {
|
|
319
|
+
return `data:${mimeType};base64,${this.toBase64()}`;
|
|
320
|
+
},
|
|
298
321
|
};
|
|
299
322
|
|
|
300
323
|
// Notify all audio data listeners
|
package/src/types.ts
CHANGED
|
@@ -42,6 +42,27 @@ export interface PCMData {
|
|
|
42
42
|
|
|
43
43
|
/** Audio configuration this data was captured with */
|
|
44
44
|
config: AudioConfig;
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Get the audio data as a Blob (cross-platform).
|
|
48
|
+
* On web, creates a Blob directly from ArrayBuffer.
|
|
49
|
+
* On native, uses fetch with data URI to create Blob.
|
|
50
|
+
* @param mimeType MIME type for the blob. Default: 'application/octet-stream'
|
|
51
|
+
*/
|
|
52
|
+
toBlob(mimeType?: string): Promise<Blob>;
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Get the audio data as a base64 string (cross-platform).
|
|
56
|
+
* Useful for sending audio data over APIs or storing.
|
|
57
|
+
*/
|
|
58
|
+
toBase64(): string;
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Get the audio data as a data URI (cross-platform).
|
|
62
|
+
* Useful for audio playback or display.
|
|
63
|
+
* @param mimeType MIME type for the data URI. Default: 'application/octet-stream'
|
|
64
|
+
*/
|
|
65
|
+
toDataUri(mimeType?: string): string;
|
|
45
66
|
}
|
|
46
67
|
|
|
47
68
|
export interface AudioLevel {
|
package/src/utils.ts
CHANGED
|
@@ -34,6 +34,89 @@ export function arrayBufferToBase64(buffer: ArrayBuffer): string {
|
|
|
34
34
|
: Buffer.from(binary, 'binary').toString('base64');
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
+
/**
|
|
38
|
+
* A Blob-like object for React Native that wraps an ArrayBuffer.
|
|
39
|
+
* Provides the essential Blob interface methods needed for our use case.
|
|
40
|
+
*/
|
|
41
|
+
class ArrayBufferBlob implements Blob {
|
|
42
|
+
private buffer: ArrayBuffer;
|
|
43
|
+
readonly size: number;
|
|
44
|
+
readonly type: string;
|
|
45
|
+
|
|
46
|
+
constructor(buffer: ArrayBuffer, mimeType: string) {
|
|
47
|
+
this.buffer = buffer;
|
|
48
|
+
this.size = buffer.byteLength;
|
|
49
|
+
this.type = mimeType;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async arrayBuffer(): Promise<ArrayBuffer> {
|
|
53
|
+
return this.buffer;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async text(): Promise<string> {
|
|
57
|
+
// TextDecoder may not be available in React Native
|
|
58
|
+
if (typeof TextDecoder !== 'undefined') {
|
|
59
|
+
const decoder = new TextDecoder();
|
|
60
|
+
return decoder.decode(this.buffer);
|
|
61
|
+
}
|
|
62
|
+
// Fallback: convert bytes to string manually
|
|
63
|
+
const bytes = new Uint8Array(this.buffer);
|
|
64
|
+
let result = '';
|
|
65
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
66
|
+
result += String.fromCharCode(bytes[i]);
|
|
67
|
+
}
|
|
68
|
+
return result;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
slice(start?: number, end?: number, contentType?: string): Blob {
|
|
72
|
+
const sliced = this.buffer.slice(start, end);
|
|
73
|
+
return new ArrayBufferBlob(sliced, contentType || this.type);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
stream(): ReadableStream<Uint8Array> {
|
|
77
|
+
// ReadableStream may not be available in React Native
|
|
78
|
+
if (typeof ReadableStream === 'undefined') {
|
|
79
|
+
throw new Error('ReadableStream not supported in this environment');
|
|
80
|
+
}
|
|
81
|
+
const buffer = this.buffer;
|
|
82
|
+
return new ReadableStream({
|
|
83
|
+
start(controller) {
|
|
84
|
+
controller.enqueue(new Uint8Array(buffer));
|
|
85
|
+
controller.close();
|
|
86
|
+
},
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Convert ArrayBuffer to Blob (cross-platform).
|
|
93
|
+
* On web, creates Blob directly.
|
|
94
|
+
* On React Native, creates a Blob-like wrapper since RN doesn't support Blob([ArrayBuffer]).
|
|
95
|
+
* @param buffer - The ArrayBuffer to convert
|
|
96
|
+
* @param mimeType - MIME type for the blob. Default: 'application/octet-stream'
|
|
97
|
+
*/
|
|
98
|
+
export async function arrayBufferToBlob(
|
|
99
|
+
buffer: ArrayBuffer,
|
|
100
|
+
mimeType = 'application/octet-stream'
|
|
101
|
+
): Promise<Blob> {
|
|
102
|
+
// Check if we're in an environment where Blob([ArrayBuffer]) works
|
|
103
|
+
// This is a runtime check since React Native's Blob doesn't support ArrayBuffer
|
|
104
|
+
try {
|
|
105
|
+
// Try the direct approach first (works on web)
|
|
106
|
+
const blob = new Blob([buffer], { type: mimeType });
|
|
107
|
+
// Verify it actually worked by checking size
|
|
108
|
+
if (blob.size === buffer.byteLength) {
|
|
109
|
+
return blob;
|
|
110
|
+
}
|
|
111
|
+
} catch {
|
|
112
|
+
// Fall through to wrapper approach
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// React Native path: use our ArrayBufferBlob wrapper
|
|
116
|
+
// This provides a Blob-like interface that works with arrayBuffer() calls
|
|
117
|
+
return new ArrayBufferBlob(buffer, mimeType) as Blob;
|
|
118
|
+
}
|
|
119
|
+
|
|
37
120
|
/**
|
|
38
121
|
* Create appropriate TypedArray for PCM data based on bit depth.
|
|
39
122
|
*/
|