@waveform-playlist/recording 5.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +232 -0
- package/dist/index.d.mts +234 -0
- package/dist/index.d.ts +234 -0
- package/dist/index.js +756 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +709 -0
- package/dist/index.mjs.map +1 -0
- package/dist/worklet/recording-processor.worklet.js +64 -0
- package/dist/worklet/recording-processor.worklet.js.map +1 -0
- package/dist/worklet/recording-processor.worklet.mjs +62 -0
- package/dist/worklet/recording-processor.worklet.mjs.map +1 -0
- package/package.json +59 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,709 @@
|
|
|
1
|
+
// src/hooks/useRecording.ts
|
|
2
|
+
import { useState, useRef, useCallback, useEffect } from "react";
|
|
3
|
+
|
|
4
|
+
// src/utils/audioBufferUtils.ts
|
|
5
|
+
function concatenateAudioData(chunks) {
|
|
6
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
7
|
+
const result = new Float32Array(totalLength);
|
|
8
|
+
let offset = 0;
|
|
9
|
+
for (const chunk of chunks) {
|
|
10
|
+
result.set(chunk, offset);
|
|
11
|
+
offset += chunk.length;
|
|
12
|
+
}
|
|
13
|
+
return result;
|
|
14
|
+
}
|
|
15
|
+
function createAudioBuffer(audioContext, samples, sampleRate, channelCount = 1) {
|
|
16
|
+
const buffer = audioContext.createBuffer(
|
|
17
|
+
channelCount,
|
|
18
|
+
samples.length,
|
|
19
|
+
sampleRate
|
|
20
|
+
);
|
|
21
|
+
const typedSamples = new Float32Array(samples);
|
|
22
|
+
buffer.copyToChannel(typedSamples, 0);
|
|
23
|
+
return buffer;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// src/utils/peaksGenerator.ts
|
|
27
|
+
function generatePeaks(samples, samplesPerPixel, bits = 16) {
|
|
28
|
+
const numPeaks = Math.ceil(samples.length / samplesPerPixel);
|
|
29
|
+
const peakArray = bits === 8 ? new Int8Array(numPeaks * 2) : new Int16Array(numPeaks * 2);
|
|
30
|
+
const maxValue = 2 ** (bits - 1);
|
|
31
|
+
for (let i = 0; i < numPeaks; i++) {
|
|
32
|
+
const start = i * samplesPerPixel;
|
|
33
|
+
const end = Math.min(start + samplesPerPixel, samples.length);
|
|
34
|
+
let min = 0;
|
|
35
|
+
let max = 0;
|
|
36
|
+
for (let j = start; j < end; j++) {
|
|
37
|
+
const value = samples[j];
|
|
38
|
+
if (value < min) min = value;
|
|
39
|
+
if (value > max) max = value;
|
|
40
|
+
}
|
|
41
|
+
peakArray[i * 2] = Math.floor(min * maxValue);
|
|
42
|
+
peakArray[i * 2 + 1] = Math.floor(max * maxValue);
|
|
43
|
+
}
|
|
44
|
+
return peakArray;
|
|
45
|
+
}
|
|
46
|
+
function appendPeaks(existingPeaks, newSamples, samplesPerPixel, totalSamplesProcessed, bits = 16) {
|
|
47
|
+
const maxValue = 2 ** (bits - 1);
|
|
48
|
+
const remainder = totalSamplesProcessed % samplesPerPixel;
|
|
49
|
+
let offset = 0;
|
|
50
|
+
if (remainder > 0 && existingPeaks.length > 0) {
|
|
51
|
+
const samplesToComplete = samplesPerPixel - remainder;
|
|
52
|
+
const endIndex = Math.min(samplesToComplete, newSamples.length);
|
|
53
|
+
let min = existingPeaks[existingPeaks.length - 2] / maxValue;
|
|
54
|
+
let max = existingPeaks[existingPeaks.length - 1] / maxValue;
|
|
55
|
+
for (let i = 0; i < endIndex; i++) {
|
|
56
|
+
const value = newSamples[i];
|
|
57
|
+
if (value < min) min = value;
|
|
58
|
+
if (value > max) max = value;
|
|
59
|
+
}
|
|
60
|
+
const updated = new (bits === 8 ? Int8Array : Int16Array)(existingPeaks.length);
|
|
61
|
+
updated.set(existingPeaks);
|
|
62
|
+
updated[existingPeaks.length - 2] = Math.floor(min * maxValue);
|
|
63
|
+
updated[existingPeaks.length - 1] = Math.floor(max * maxValue);
|
|
64
|
+
offset = endIndex;
|
|
65
|
+
const newPeaks2 = generatePeaks(newSamples.slice(offset), samplesPerPixel, bits);
|
|
66
|
+
const result2 = new (bits === 8 ? Int8Array : Int16Array)(updated.length + newPeaks2.length);
|
|
67
|
+
result2.set(updated);
|
|
68
|
+
result2.set(newPeaks2, updated.length);
|
|
69
|
+
return result2;
|
|
70
|
+
}
|
|
71
|
+
const newPeaks = generatePeaks(newSamples.slice(offset), samplesPerPixel, bits);
|
|
72
|
+
const result = new (bits === 8 ? Int8Array : Int16Array)(existingPeaks.length + newPeaks.length);
|
|
73
|
+
result.set(existingPeaks);
|
|
74
|
+
result.set(newPeaks, existingPeaks.length);
|
|
75
|
+
return result;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// src/hooks/useRecording.ts
|
|
79
|
+
import { getContext } from "tone";
|
|
80
|
+
function useRecording(stream, options = {}) {
|
|
81
|
+
const {
|
|
82
|
+
channelCount = 1,
|
|
83
|
+
samplesPerPixel = 1024
|
|
84
|
+
} = options;
|
|
85
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
86
|
+
const [isPaused, setIsPaused] = useState(false);
|
|
87
|
+
const [duration, setDuration] = useState(0);
|
|
88
|
+
const [peaks, setPeaks] = useState(new Int16Array(0));
|
|
89
|
+
const [audioBuffer, setAudioBuffer] = useState(null);
|
|
90
|
+
const [error, setError] = useState(null);
|
|
91
|
+
const [level, setLevel] = useState(0);
|
|
92
|
+
const [peakLevel, setPeakLevel] = useState(0);
|
|
93
|
+
const bits = 16;
|
|
94
|
+
const workletLoadedRef = useRef(false);
|
|
95
|
+
const workletNodeRef = useRef(null);
|
|
96
|
+
const mediaStreamSourceRef = useRef(null);
|
|
97
|
+
const recordedChunksRef = useRef([]);
|
|
98
|
+
const totalSamplesRef = useRef(0);
|
|
99
|
+
const animationFrameRef = useRef(null);
|
|
100
|
+
const startTimeRef = useRef(0);
|
|
101
|
+
const isRecordingRef = useRef(false);
|
|
102
|
+
const isPausedRef = useRef(false);
|
|
103
|
+
const loadWorklet = useCallback(async () => {
|
|
104
|
+
if (workletLoadedRef.current) {
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
try {
|
|
108
|
+
const context = getContext();
|
|
109
|
+
const workletUrl = new URL(
|
|
110
|
+
"./worklet/recording-processor.worklet.js",
|
|
111
|
+
import.meta.url
|
|
112
|
+
).href;
|
|
113
|
+
await context.addAudioWorkletModule(workletUrl);
|
|
114
|
+
workletLoadedRef.current = true;
|
|
115
|
+
} catch (err) {
|
|
116
|
+
console.error("Failed to load AudioWorklet module:", err);
|
|
117
|
+
throw new Error("Failed to load recording processor");
|
|
118
|
+
}
|
|
119
|
+
}, []);
|
|
120
|
+
const startRecording = useCallback(async () => {
|
|
121
|
+
if (!stream) {
|
|
122
|
+
setError(new Error("No microphone stream available"));
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
try {
|
|
126
|
+
setError(null);
|
|
127
|
+
const context = getContext();
|
|
128
|
+
if (context.state === "suspended") {
|
|
129
|
+
await context.resume();
|
|
130
|
+
}
|
|
131
|
+
await loadWorklet();
|
|
132
|
+
const source = context.createMediaStreamSource(stream);
|
|
133
|
+
mediaStreamSourceRef.current = source;
|
|
134
|
+
const workletNode = context.createAudioWorkletNode("recording-processor");
|
|
135
|
+
workletNodeRef.current = workletNode;
|
|
136
|
+
source.connect(workletNode);
|
|
137
|
+
workletNode.port.onmessage = (event) => {
|
|
138
|
+
const { samples } = event.data;
|
|
139
|
+
recordedChunksRef.current.push(samples);
|
|
140
|
+
totalSamplesRef.current += samples.length;
|
|
141
|
+
setPeaks(
|
|
142
|
+
(prevPeaks) => appendPeaks(
|
|
143
|
+
prevPeaks,
|
|
144
|
+
samples,
|
|
145
|
+
samplesPerPixel,
|
|
146
|
+
totalSamplesRef.current - samples.length,
|
|
147
|
+
bits
|
|
148
|
+
)
|
|
149
|
+
);
|
|
150
|
+
};
|
|
151
|
+
workletNode.port.postMessage({
|
|
152
|
+
command: "start",
|
|
153
|
+
sampleRate: context.sampleRate,
|
|
154
|
+
channelCount
|
|
155
|
+
});
|
|
156
|
+
recordedChunksRef.current = [];
|
|
157
|
+
totalSamplesRef.current = 0;
|
|
158
|
+
setPeaks(new Int16Array(0));
|
|
159
|
+
setAudioBuffer(null);
|
|
160
|
+
setLevel(0);
|
|
161
|
+
setPeakLevel(0);
|
|
162
|
+
isRecordingRef.current = true;
|
|
163
|
+
isPausedRef.current = false;
|
|
164
|
+
setIsRecording(true);
|
|
165
|
+
setIsPaused(false);
|
|
166
|
+
startTimeRef.current = performance.now();
|
|
167
|
+
const updateDuration = () => {
|
|
168
|
+
if (isRecordingRef.current && !isPausedRef.current) {
|
|
169
|
+
const elapsed = (performance.now() - startTimeRef.current) / 1e3;
|
|
170
|
+
setDuration(elapsed);
|
|
171
|
+
animationFrameRef.current = requestAnimationFrame(updateDuration);
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
updateDuration();
|
|
175
|
+
} catch (err) {
|
|
176
|
+
console.error("Failed to start recording:", err);
|
|
177
|
+
setError(err instanceof Error ? err : new Error("Failed to start recording"));
|
|
178
|
+
}
|
|
179
|
+
}, [stream, channelCount, samplesPerPixel, loadWorklet, isRecording, isPaused]);
|
|
180
|
+
const stopRecording = useCallback(async () => {
|
|
181
|
+
if (!isRecording) {
|
|
182
|
+
return null;
|
|
183
|
+
}
|
|
184
|
+
try {
|
|
185
|
+
if (workletNodeRef.current) {
|
|
186
|
+
workletNodeRef.current.port.postMessage({ command: "stop" });
|
|
187
|
+
if (mediaStreamSourceRef.current) {
|
|
188
|
+
try {
|
|
189
|
+
mediaStreamSourceRef.current.disconnect(workletNodeRef.current);
|
|
190
|
+
} catch (e) {
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
workletNodeRef.current.disconnect();
|
|
194
|
+
}
|
|
195
|
+
if (animationFrameRef.current !== null) {
|
|
196
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
197
|
+
animationFrameRef.current = null;
|
|
198
|
+
}
|
|
199
|
+
const allSamples = concatenateAudioData(recordedChunksRef.current);
|
|
200
|
+
const context = getContext();
|
|
201
|
+
const rawContext = context.rawContext;
|
|
202
|
+
const buffer = createAudioBuffer(
|
|
203
|
+
rawContext,
|
|
204
|
+
allSamples,
|
|
205
|
+
rawContext.sampleRate,
|
|
206
|
+
channelCount
|
|
207
|
+
);
|
|
208
|
+
setAudioBuffer(buffer);
|
|
209
|
+
setDuration(buffer.duration);
|
|
210
|
+
isRecordingRef.current = false;
|
|
211
|
+
isPausedRef.current = false;
|
|
212
|
+
setIsRecording(false);
|
|
213
|
+
setIsPaused(false);
|
|
214
|
+
setLevel(0);
|
|
215
|
+
return buffer;
|
|
216
|
+
} catch (err) {
|
|
217
|
+
console.error("Failed to stop recording:", err);
|
|
218
|
+
setError(err instanceof Error ? err : new Error("Failed to stop recording"));
|
|
219
|
+
return null;
|
|
220
|
+
}
|
|
221
|
+
}, [isRecording, channelCount]);
|
|
222
|
+
const pauseRecording = useCallback(() => {
|
|
223
|
+
if (isRecording && !isPaused) {
|
|
224
|
+
if (animationFrameRef.current !== null) {
|
|
225
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
226
|
+
animationFrameRef.current = null;
|
|
227
|
+
}
|
|
228
|
+
isPausedRef.current = true;
|
|
229
|
+
setIsPaused(true);
|
|
230
|
+
}
|
|
231
|
+
}, [isRecording, isPaused]);
|
|
232
|
+
const resumeRecording = useCallback(() => {
|
|
233
|
+
if (isRecording && isPaused) {
|
|
234
|
+
isPausedRef.current = false;
|
|
235
|
+
setIsPaused(false);
|
|
236
|
+
startTimeRef.current = performance.now() - duration * 1e3;
|
|
237
|
+
const updateDuration = () => {
|
|
238
|
+
if (isRecordingRef.current && !isPausedRef.current) {
|
|
239
|
+
const elapsed = (performance.now() - startTimeRef.current) / 1e3;
|
|
240
|
+
setDuration(elapsed);
|
|
241
|
+
animationFrameRef.current = requestAnimationFrame(updateDuration);
|
|
242
|
+
}
|
|
243
|
+
};
|
|
244
|
+
updateDuration();
|
|
245
|
+
}
|
|
246
|
+
}, [isRecording, isPaused, duration]);
|
|
247
|
+
useEffect(() => {
|
|
248
|
+
return () => {
|
|
249
|
+
if (workletNodeRef.current) {
|
|
250
|
+
workletNodeRef.current.port.postMessage({ command: "stop" });
|
|
251
|
+
if (mediaStreamSourceRef.current) {
|
|
252
|
+
try {
|
|
253
|
+
mediaStreamSourceRef.current.disconnect(workletNodeRef.current);
|
|
254
|
+
} catch (e) {
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
workletNodeRef.current.disconnect();
|
|
258
|
+
}
|
|
259
|
+
if (animationFrameRef.current !== null) {
|
|
260
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
}, []);
|
|
264
|
+
return {
|
|
265
|
+
isRecording,
|
|
266
|
+
isPaused,
|
|
267
|
+
duration,
|
|
268
|
+
peaks,
|
|
269
|
+
audioBuffer,
|
|
270
|
+
level,
|
|
271
|
+
peakLevel,
|
|
272
|
+
startRecording,
|
|
273
|
+
stopRecording,
|
|
274
|
+
pauseRecording,
|
|
275
|
+
resumeRecording,
|
|
276
|
+
error
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
// src/hooks/useMicrophoneAccess.ts
|
|
281
|
+
import { useState as useState2, useEffect as useEffect2, useCallback as useCallback2 } from "react";
|
|
282
|
+
function useMicrophoneAccess() {
|
|
283
|
+
const [stream, setStream] = useState2(null);
|
|
284
|
+
const [devices, setDevices] = useState2([]);
|
|
285
|
+
const [hasPermission, setHasPermission] = useState2(false);
|
|
286
|
+
const [isLoading, setIsLoading] = useState2(false);
|
|
287
|
+
const [error, setError] = useState2(null);
|
|
288
|
+
const enumerateDevices = useCallback2(async () => {
|
|
289
|
+
try {
|
|
290
|
+
const allDevices = await navigator.mediaDevices.enumerateDevices();
|
|
291
|
+
const audioInputs = allDevices.filter((device) => device.kind === "audioinput").map((device) => ({
|
|
292
|
+
deviceId: device.deviceId,
|
|
293
|
+
label: device.label || `Microphone ${device.deviceId.slice(0, 8)}`,
|
|
294
|
+
groupId: device.groupId
|
|
295
|
+
}));
|
|
296
|
+
setDevices(audioInputs);
|
|
297
|
+
} catch (err) {
|
|
298
|
+
console.error("Failed to enumerate devices:", err);
|
|
299
|
+
setError(err instanceof Error ? err : new Error("Failed to enumerate devices"));
|
|
300
|
+
}
|
|
301
|
+
}, []);
|
|
302
|
+
const requestAccess = useCallback2(async (deviceId, audioConstraints) => {
|
|
303
|
+
setIsLoading(true);
|
|
304
|
+
setError(null);
|
|
305
|
+
try {
|
|
306
|
+
if (stream) {
|
|
307
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
308
|
+
}
|
|
309
|
+
const audio = {
|
|
310
|
+
// Recording-optimized defaults: prioritize raw audio quality and low latency
|
|
311
|
+
echoCancellation: false,
|
|
312
|
+
noiseSuppression: false,
|
|
313
|
+
autoGainControl: false,
|
|
314
|
+
latency: 0,
|
|
315
|
+
// Low latency mode (not in TS types yet, but supported in modern browsers)
|
|
316
|
+
// User-provided constraints override defaults
|
|
317
|
+
...audioConstraints,
|
|
318
|
+
// Device ID override (if specified)
|
|
319
|
+
...deviceId && { deviceId: { exact: deviceId } }
|
|
320
|
+
};
|
|
321
|
+
const constraints = {
|
|
322
|
+
audio,
|
|
323
|
+
video: false
|
|
324
|
+
};
|
|
325
|
+
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
326
|
+
setStream(newStream);
|
|
327
|
+
setHasPermission(true);
|
|
328
|
+
await enumerateDevices();
|
|
329
|
+
} catch (err) {
|
|
330
|
+
console.error("Failed to access microphone:", err);
|
|
331
|
+
setError(
|
|
332
|
+
err instanceof Error ? err : new Error("Failed to access microphone")
|
|
333
|
+
);
|
|
334
|
+
setHasPermission(false);
|
|
335
|
+
} finally {
|
|
336
|
+
setIsLoading(false);
|
|
337
|
+
}
|
|
338
|
+
}, [stream, enumerateDevices]);
|
|
339
|
+
const stopStream = useCallback2(() => {
|
|
340
|
+
if (stream) {
|
|
341
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
342
|
+
setStream(null);
|
|
343
|
+
setHasPermission(false);
|
|
344
|
+
}
|
|
345
|
+
}, [stream]);
|
|
346
|
+
useEffect2(() => {
|
|
347
|
+
enumerateDevices();
|
|
348
|
+
return () => {
|
|
349
|
+
if (stream) {
|
|
350
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
351
|
+
}
|
|
352
|
+
};
|
|
353
|
+
}, []);
|
|
354
|
+
return {
|
|
355
|
+
stream,
|
|
356
|
+
devices,
|
|
357
|
+
hasPermission,
|
|
358
|
+
isLoading,
|
|
359
|
+
requestAccess,
|
|
360
|
+
stopStream,
|
|
361
|
+
error
|
|
362
|
+
};
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// src/hooks/useMicrophoneLevel.ts
|
|
366
|
+
import { useEffect as useEffect3, useState as useState3, useRef as useRef2 } from "react";
|
|
367
|
+
import { Meter, getContext as getContext2, connect } from "tone";
|
|
368
|
+
function useMicrophoneLevel(stream, options = {}) {
|
|
369
|
+
const {
|
|
370
|
+
updateRate = 60,
|
|
371
|
+
smoothingTimeConstant = 0.8
|
|
372
|
+
} = options;
|
|
373
|
+
const [level, setLevel] = useState3(0);
|
|
374
|
+
const [peakLevel, setPeakLevel] = useState3(0);
|
|
375
|
+
const meterRef = useRef2(null);
|
|
376
|
+
const sourceRef = useRef2(null);
|
|
377
|
+
const animationFrameRef = useRef2(null);
|
|
378
|
+
const resetPeak = () => setPeakLevel(0);
|
|
379
|
+
useEffect3(() => {
|
|
380
|
+
if (!stream) {
|
|
381
|
+
setLevel(0);
|
|
382
|
+
setPeakLevel(0);
|
|
383
|
+
return;
|
|
384
|
+
}
|
|
385
|
+
let isMounted = true;
|
|
386
|
+
const setupMonitoring = async () => {
|
|
387
|
+
if (!isMounted) return;
|
|
388
|
+
const context = getContext2();
|
|
389
|
+
if (context.state === "suspended") {
|
|
390
|
+
await context.resume();
|
|
391
|
+
}
|
|
392
|
+
if (!isMounted) return;
|
|
393
|
+
const meter = new Meter({ smoothing: smoothingTimeConstant, context });
|
|
394
|
+
meterRef.current = meter;
|
|
395
|
+
const source = context.createMediaStreamSource(stream);
|
|
396
|
+
sourceRef.current = source;
|
|
397
|
+
connect(source, meter);
|
|
398
|
+
const updateInterval = 1e3 / updateRate;
|
|
399
|
+
let lastUpdateTime = 0;
|
|
400
|
+
const updateLevel = (timestamp) => {
|
|
401
|
+
if (!isMounted || !meterRef.current) return;
|
|
402
|
+
if (timestamp - lastUpdateTime >= updateInterval) {
|
|
403
|
+
lastUpdateTime = timestamp;
|
|
404
|
+
const db = meterRef.current.getValue();
|
|
405
|
+
const dbValue = typeof db === "number" ? db : db[0];
|
|
406
|
+
const normalized = Math.max(0, Math.min(1, (dbValue + 100) / 100));
|
|
407
|
+
setLevel(normalized);
|
|
408
|
+
setPeakLevel((prev) => Math.max(prev, normalized));
|
|
409
|
+
}
|
|
410
|
+
animationFrameRef.current = requestAnimationFrame(updateLevel);
|
|
411
|
+
};
|
|
412
|
+
animationFrameRef.current = requestAnimationFrame(updateLevel);
|
|
413
|
+
};
|
|
414
|
+
setupMonitoring();
|
|
415
|
+
return () => {
|
|
416
|
+
isMounted = false;
|
|
417
|
+
if (animationFrameRef.current) {
|
|
418
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
419
|
+
animationFrameRef.current = null;
|
|
420
|
+
}
|
|
421
|
+
if (sourceRef.current) {
|
|
422
|
+
try {
|
|
423
|
+
sourceRef.current.disconnect();
|
|
424
|
+
} catch (e) {
|
|
425
|
+
}
|
|
426
|
+
sourceRef.current = null;
|
|
427
|
+
}
|
|
428
|
+
if (meterRef.current) {
|
|
429
|
+
meterRef.current.dispose();
|
|
430
|
+
meterRef.current = null;
|
|
431
|
+
}
|
|
432
|
+
};
|
|
433
|
+
}, [stream, smoothingTimeConstant, updateRate]);
|
|
434
|
+
return {
|
|
435
|
+
level,
|
|
436
|
+
peakLevel,
|
|
437
|
+
resetPeak
|
|
438
|
+
};
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
// src/components/RecordButton.tsx
|
|
442
|
+
import styled from "styled-components";
|
|
443
|
+
import { jsx, jsxs } from "react/jsx-runtime";
|
|
444
|
+
var Button = styled.button`
|
|
445
|
+
padding: 0.5rem 1rem;
|
|
446
|
+
font-size: 0.875rem;
|
|
447
|
+
font-weight: 500;
|
|
448
|
+
border: none;
|
|
449
|
+
border-radius: 0.25rem;
|
|
450
|
+
cursor: pointer;
|
|
451
|
+
transition: all 0.2s ease-in-out;
|
|
452
|
+
background: ${(props) => props.$isRecording ? "#dc3545" : "#e74c3c"};
|
|
453
|
+
color: white;
|
|
454
|
+
|
|
455
|
+
&:hover:not(:disabled) {
|
|
456
|
+
background: ${(props) => props.$isRecording ? "#c82333" : "#c0392b"};
|
|
457
|
+
transform: translateY(-1px);
|
|
458
|
+
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
&:active:not(:disabled) {
|
|
462
|
+
transform: translateY(0);
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
&:disabled {
|
|
466
|
+
opacity: 0.5;
|
|
467
|
+
cursor: not-allowed;
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
&:focus {
|
|
471
|
+
outline: none;
|
|
472
|
+
box-shadow: 0 0 0 3px rgba(231, 76, 60, 0.3);
|
|
473
|
+
}
|
|
474
|
+
`;
|
|
475
|
+
var RecordingIndicator = styled.span`
|
|
476
|
+
display: inline-block;
|
|
477
|
+
width: 8px;
|
|
478
|
+
height: 8px;
|
|
479
|
+
border-radius: 50%;
|
|
480
|
+
background: white;
|
|
481
|
+
margin-right: 0.5rem;
|
|
482
|
+
animation: pulse 1.5s ease-in-out infinite;
|
|
483
|
+
|
|
484
|
+
@keyframes pulse {
|
|
485
|
+
0%,
|
|
486
|
+
100% {
|
|
487
|
+
opacity: 1;
|
|
488
|
+
}
|
|
489
|
+
50% {
|
|
490
|
+
opacity: 0.3;
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
`;
|
|
494
|
+
var RecordButton = ({
|
|
495
|
+
isRecording,
|
|
496
|
+
onClick,
|
|
497
|
+
disabled = false,
|
|
498
|
+
className
|
|
499
|
+
}) => {
|
|
500
|
+
return /* @__PURE__ */ jsxs(
|
|
501
|
+
Button,
|
|
502
|
+
{
|
|
503
|
+
$isRecording: isRecording,
|
|
504
|
+
onClick,
|
|
505
|
+
disabled,
|
|
506
|
+
className,
|
|
507
|
+
"aria-label": isRecording ? "Stop recording" : "Start recording",
|
|
508
|
+
children: [
|
|
509
|
+
isRecording && /* @__PURE__ */ jsx(RecordingIndicator, {}),
|
|
510
|
+
isRecording ? "Stop Recording" : "Record"
|
|
511
|
+
]
|
|
512
|
+
}
|
|
513
|
+
);
|
|
514
|
+
};
|
|
515
|
+
|
|
516
|
+
// src/components/MicrophoneSelector.tsx
|
|
517
|
+
import styled2 from "styled-components";
|
|
518
|
+
import { BaseSelect, BaseLabel } from "@waveform-playlist/ui-components";
|
|
519
|
+
import { jsx as jsx2, jsxs as jsxs2 } from "react/jsx-runtime";
|
|
520
|
+
var Select = styled2(BaseSelect)`
|
|
521
|
+
min-width: 200px;
|
|
522
|
+
`;
|
|
523
|
+
var Label = styled2(BaseLabel)`
|
|
524
|
+
display: flex;
|
|
525
|
+
flex-direction: column;
|
|
526
|
+
gap: 0.25rem;
|
|
527
|
+
`;
|
|
528
|
+
var MicrophoneSelector = ({
|
|
529
|
+
devices,
|
|
530
|
+
selectedDeviceId,
|
|
531
|
+
onDeviceChange,
|
|
532
|
+
disabled = false,
|
|
533
|
+
className
|
|
534
|
+
}) => {
|
|
535
|
+
const handleChange = (event) => {
|
|
536
|
+
onDeviceChange(event.target.value);
|
|
537
|
+
};
|
|
538
|
+
const currentValue = selectedDeviceId || (devices.length > 0 ? devices[0].deviceId : "");
|
|
539
|
+
return /* @__PURE__ */ jsxs2(Label, { className, children: [
|
|
540
|
+
"Microphone",
|
|
541
|
+
/* @__PURE__ */ jsx2(
|
|
542
|
+
Select,
|
|
543
|
+
{
|
|
544
|
+
value: currentValue,
|
|
545
|
+
onChange: handleChange,
|
|
546
|
+
disabled: disabled || devices.length === 0,
|
|
547
|
+
children: devices.length === 0 ? /* @__PURE__ */ jsx2("option", { value: "", children: "No microphones found" }) : devices.map((device) => /* @__PURE__ */ jsx2("option", { value: device.deviceId, children: device.label }, device.deviceId))
|
|
548
|
+
}
|
|
549
|
+
)
|
|
550
|
+
] });
|
|
551
|
+
};
|
|
552
|
+
|
|
553
|
+
// src/components/RecordingIndicator.tsx
|
|
554
|
+
import styled3 from "styled-components";
|
|
555
|
+
import { jsx as jsx3, jsxs as jsxs3 } from "react/jsx-runtime";
|
|
556
|
+
var Container = styled3.div`
|
|
557
|
+
display: flex;
|
|
558
|
+
align-items: center;
|
|
559
|
+
gap: 0.75rem;
|
|
560
|
+
padding: 0.5rem 0.75rem;
|
|
561
|
+
background: ${(props) => props.$isRecording ? "#fff3cd" : "transparent"};
|
|
562
|
+
border-radius: 0.25rem;
|
|
563
|
+
transition: background 0.2s ease-in-out;
|
|
564
|
+
`;
|
|
565
|
+
var Dot = styled3.div`
|
|
566
|
+
width: 12px;
|
|
567
|
+
height: 12px;
|
|
568
|
+
border-radius: 50%;
|
|
569
|
+
background: ${(props) => props.$isPaused ? "#ffc107" : "#dc3545"};
|
|
570
|
+
opacity: ${(props) => props.$isRecording ? 1 : 0};
|
|
571
|
+
transition: opacity 0.2s ease-in-out;
|
|
572
|
+
|
|
573
|
+
${(props) => props.$isRecording && !props.$isPaused && `
|
|
574
|
+
animation: blink 1.5s ease-in-out infinite;
|
|
575
|
+
|
|
576
|
+
@keyframes blink {
|
|
577
|
+
0%, 100% {
|
|
578
|
+
opacity: 1;
|
|
579
|
+
}
|
|
580
|
+
50% {
|
|
581
|
+
opacity: 0.3;
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
`}
|
|
585
|
+
`;
|
|
586
|
+
var Duration = styled3.span`
|
|
587
|
+
font-family: 'Courier New', Monaco, monospace;
|
|
588
|
+
font-size: 1rem;
|
|
589
|
+
font-weight: 600;
|
|
590
|
+
color: #495057;
|
|
591
|
+
min-width: 70px;
|
|
592
|
+
`;
|
|
593
|
+
var Status = styled3.span`
|
|
594
|
+
font-size: 0.75rem;
|
|
595
|
+
font-weight: 500;
|
|
596
|
+
color: ${(props) => props.$isPaused ? "#ffc107" : "#dc3545"};
|
|
597
|
+
text-transform: uppercase;
|
|
598
|
+
`;
|
|
599
|
+
var defaultFormatTime = (seconds) => {
|
|
600
|
+
const mins = Math.floor(seconds / 60);
|
|
601
|
+
const secs = Math.floor(seconds % 60);
|
|
602
|
+
return `${mins.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}`;
|
|
603
|
+
};
|
|
604
|
+
var RecordingIndicator2 = ({
|
|
605
|
+
isRecording,
|
|
606
|
+
isPaused = false,
|
|
607
|
+
duration,
|
|
608
|
+
formatTime = defaultFormatTime,
|
|
609
|
+
className
|
|
610
|
+
}) => {
|
|
611
|
+
return /* @__PURE__ */ jsxs3(Container, { $isRecording: isRecording, className, children: [
|
|
612
|
+
/* @__PURE__ */ jsx3(Dot, { $isRecording: isRecording, $isPaused: isPaused }),
|
|
613
|
+
/* @__PURE__ */ jsx3(Duration, { children: formatTime(duration) }),
|
|
614
|
+
isRecording && /* @__PURE__ */ jsx3(Status, { $isPaused: isPaused, children: isPaused ? "Paused" : "Recording" })
|
|
615
|
+
] });
|
|
616
|
+
};
|
|
617
|
+
|
|
618
|
+
// src/components/VUMeter.tsx
|
|
619
|
+
import React from "react";
|
|
620
|
+
import styled4 from "styled-components";
|
|
621
|
+
import { jsx as jsx4, jsxs as jsxs4 } from "react/jsx-runtime";
|
|
622
|
+
var MeterContainer = styled4.div`
|
|
623
|
+
position: relative;
|
|
624
|
+
width: ${(props) => props.$width}px;
|
|
625
|
+
height: ${(props) => props.$height}px;
|
|
626
|
+
background: #2c3e50;
|
|
627
|
+
border-radius: 4px;
|
|
628
|
+
overflow: hidden;
|
|
629
|
+
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.3);
|
|
630
|
+
`;
|
|
631
|
+
var getLevelGradient = (level) => {
|
|
632
|
+
if (level < 0.6) return "linear-gradient(90deg, #27ae60, #2ecc71)";
|
|
633
|
+
if (level < 0.85) return "linear-gradient(90deg, #f39c12, #f1c40f)";
|
|
634
|
+
return "linear-gradient(90deg, #c0392b, #e74c3c)";
|
|
635
|
+
};
|
|
636
|
+
var MeterFill = styled4.div.attrs((props) => ({
|
|
637
|
+
style: {
|
|
638
|
+
width: `${props.$level * 100}%`,
|
|
639
|
+
height: `${props.$height}px`,
|
|
640
|
+
background: getLevelGradient(props.$level),
|
|
641
|
+
boxShadow: props.$level > 0.01 ? "0 0 8px rgba(255, 255, 255, 0.3)" : "none"
|
|
642
|
+
}
|
|
643
|
+
}))`
|
|
644
|
+
position: absolute;
|
|
645
|
+
left: 0;
|
|
646
|
+
top: 0;
|
|
647
|
+
transition: width 0.05s ease-out, background 0.1s ease-out;
|
|
648
|
+
`;
|
|
649
|
+
var PeakIndicator = styled4.div.attrs((props) => ({
|
|
650
|
+
style: {
|
|
651
|
+
left: `${props.$peakLevel * 100}%`,
|
|
652
|
+
height: `${props.$height}px`
|
|
653
|
+
}
|
|
654
|
+
}))`
|
|
655
|
+
position: absolute;
|
|
656
|
+
top: 0;
|
|
657
|
+
width: 2px;
|
|
658
|
+
background: #ecf0f1;
|
|
659
|
+
box-shadow: 0 0 4px rgba(236, 240, 241, 0.8);
|
|
660
|
+
transition: left 0.1s ease-out;
|
|
661
|
+
`;
|
|
662
|
+
var ScaleMarkers = styled4.div`
|
|
663
|
+
position: absolute;
|
|
664
|
+
top: 0;
|
|
665
|
+
left: 0;
|
|
666
|
+
width: 100%;
|
|
667
|
+
height: ${(props) => props.$height}px;
|
|
668
|
+
pointer-events: none;
|
|
669
|
+
`;
|
|
670
|
+
var ScaleMark = styled4.div`
|
|
671
|
+
position: absolute;
|
|
672
|
+
left: ${(props) => props.$position}%;
|
|
673
|
+
top: 0;
|
|
674
|
+
width: 1px;
|
|
675
|
+
height: ${(props) => props.$height}px;
|
|
676
|
+
background: rgba(255, 255, 255, 0.2);
|
|
677
|
+
`;
|
|
678
|
+
var VUMeterComponent = ({
|
|
679
|
+
level,
|
|
680
|
+
peakLevel,
|
|
681
|
+
width = 200,
|
|
682
|
+
height = 20,
|
|
683
|
+
className
|
|
684
|
+
}) => {
|
|
685
|
+
const clampedLevel = Math.max(0, Math.min(1, level));
|
|
686
|
+
const clampedPeak = peakLevel !== void 0 ? Math.max(0, Math.min(1, peakLevel)) : 0;
|
|
687
|
+
return /* @__PURE__ */ jsxs4(MeterContainer, { $width: width, $height: height, className, children: [
|
|
688
|
+
/* @__PURE__ */ jsx4(MeterFill, { $level: clampedLevel, $height: height }),
|
|
689
|
+
peakLevel !== void 0 && clampedPeak > 0 && /* @__PURE__ */ jsx4(PeakIndicator, { $peakLevel: clampedPeak, $height: height }),
|
|
690
|
+
/* @__PURE__ */ jsxs4(ScaleMarkers, { $height: height, children: [
|
|
691
|
+
/* @__PURE__ */ jsx4(ScaleMark, { $position: 60, $height: height }),
|
|
692
|
+
/* @__PURE__ */ jsx4(ScaleMark, { $position: 85, $height: height })
|
|
693
|
+
] })
|
|
694
|
+
] });
|
|
695
|
+
};
|
|
696
|
+
var VUMeter = React.memo(VUMeterComponent);
|
|
697
|
+
export {
|
|
698
|
+
MicrophoneSelector,
|
|
699
|
+
RecordButton,
|
|
700
|
+
RecordingIndicator2 as RecordingIndicator,
|
|
701
|
+
VUMeter,
|
|
702
|
+
concatenateAudioData,
|
|
703
|
+
createAudioBuffer,
|
|
704
|
+
generatePeaks,
|
|
705
|
+
useMicrophoneAccess,
|
|
706
|
+
useMicrophoneLevel,
|
|
707
|
+
useRecording
|
|
708
|
+
};
|
|
709
|
+
//# sourceMappingURL=index.mjs.map
|