@waveform-playlist/recording 5.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +232 -0
- package/dist/index.d.mts +234 -0
- package/dist/index.d.ts +234 -0
- package/dist/index.js +756 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +709 -0
- package/dist/index.mjs.map +1 -0
- package/dist/worklet/recording-processor.worklet.js +64 -0
- package/dist/worklet/recording-processor.worklet.js.map +1 -0
- package/dist/worklet/recording-processor.worklet.mjs +62 -0
- package/dist/worklet/recording-processor.worklet.mjs.map +1 -0
- package/package.json +59 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,756 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var src_exports = {};
|
|
32
|
+
__export(src_exports, {
|
|
33
|
+
MicrophoneSelector: () => MicrophoneSelector,
|
|
34
|
+
RecordButton: () => RecordButton,
|
|
35
|
+
RecordingIndicator: () => RecordingIndicator2,
|
|
36
|
+
VUMeter: () => VUMeter,
|
|
37
|
+
concatenateAudioData: () => concatenateAudioData,
|
|
38
|
+
createAudioBuffer: () => createAudioBuffer,
|
|
39
|
+
generatePeaks: () => generatePeaks,
|
|
40
|
+
useMicrophoneAccess: () => useMicrophoneAccess,
|
|
41
|
+
useMicrophoneLevel: () => useMicrophoneLevel,
|
|
42
|
+
useRecording: () => useRecording
|
|
43
|
+
});
|
|
44
|
+
module.exports = __toCommonJS(src_exports);
|
|
45
|
+
|
|
46
|
+
// src/hooks/useRecording.ts
|
|
47
|
+
var import_react = require("react");
|
|
48
|
+
|
|
49
|
+
// src/utils/audioBufferUtils.ts
|
|
50
|
+
function concatenateAudioData(chunks) {
|
|
51
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
52
|
+
const result = new Float32Array(totalLength);
|
|
53
|
+
let offset = 0;
|
|
54
|
+
for (const chunk of chunks) {
|
|
55
|
+
result.set(chunk, offset);
|
|
56
|
+
offset += chunk.length;
|
|
57
|
+
}
|
|
58
|
+
return result;
|
|
59
|
+
}
|
|
60
|
+
function createAudioBuffer(audioContext, samples, sampleRate, channelCount = 1) {
|
|
61
|
+
const buffer = audioContext.createBuffer(
|
|
62
|
+
channelCount,
|
|
63
|
+
samples.length,
|
|
64
|
+
sampleRate
|
|
65
|
+
);
|
|
66
|
+
const typedSamples = new Float32Array(samples);
|
|
67
|
+
buffer.copyToChannel(typedSamples, 0);
|
|
68
|
+
return buffer;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// src/utils/peaksGenerator.ts
|
|
72
|
+
function generatePeaks(samples, samplesPerPixel, bits = 16) {
|
|
73
|
+
const numPeaks = Math.ceil(samples.length / samplesPerPixel);
|
|
74
|
+
const peakArray = bits === 8 ? new Int8Array(numPeaks * 2) : new Int16Array(numPeaks * 2);
|
|
75
|
+
const maxValue = 2 ** (bits - 1);
|
|
76
|
+
for (let i = 0; i < numPeaks; i++) {
|
|
77
|
+
const start = i * samplesPerPixel;
|
|
78
|
+
const end = Math.min(start + samplesPerPixel, samples.length);
|
|
79
|
+
let min = 0;
|
|
80
|
+
let max = 0;
|
|
81
|
+
for (let j = start; j < end; j++) {
|
|
82
|
+
const value = samples[j];
|
|
83
|
+
if (value < min) min = value;
|
|
84
|
+
if (value > max) max = value;
|
|
85
|
+
}
|
|
86
|
+
peakArray[i * 2] = Math.floor(min * maxValue);
|
|
87
|
+
peakArray[i * 2 + 1] = Math.floor(max * maxValue);
|
|
88
|
+
}
|
|
89
|
+
return peakArray;
|
|
90
|
+
}
|
|
91
|
+
function appendPeaks(existingPeaks, newSamples, samplesPerPixel, totalSamplesProcessed, bits = 16) {
|
|
92
|
+
const maxValue = 2 ** (bits - 1);
|
|
93
|
+
const remainder = totalSamplesProcessed % samplesPerPixel;
|
|
94
|
+
let offset = 0;
|
|
95
|
+
if (remainder > 0 && existingPeaks.length > 0) {
|
|
96
|
+
const samplesToComplete = samplesPerPixel - remainder;
|
|
97
|
+
const endIndex = Math.min(samplesToComplete, newSamples.length);
|
|
98
|
+
let min = existingPeaks[existingPeaks.length - 2] / maxValue;
|
|
99
|
+
let max = existingPeaks[existingPeaks.length - 1] / maxValue;
|
|
100
|
+
for (let i = 0; i < endIndex; i++) {
|
|
101
|
+
const value = newSamples[i];
|
|
102
|
+
if (value < min) min = value;
|
|
103
|
+
if (value > max) max = value;
|
|
104
|
+
}
|
|
105
|
+
const updated = new (bits === 8 ? Int8Array : Int16Array)(existingPeaks.length);
|
|
106
|
+
updated.set(existingPeaks);
|
|
107
|
+
updated[existingPeaks.length - 2] = Math.floor(min * maxValue);
|
|
108
|
+
updated[existingPeaks.length - 1] = Math.floor(max * maxValue);
|
|
109
|
+
offset = endIndex;
|
|
110
|
+
const newPeaks2 = generatePeaks(newSamples.slice(offset), samplesPerPixel, bits);
|
|
111
|
+
const result2 = new (bits === 8 ? Int8Array : Int16Array)(updated.length + newPeaks2.length);
|
|
112
|
+
result2.set(updated);
|
|
113
|
+
result2.set(newPeaks2, updated.length);
|
|
114
|
+
return result2;
|
|
115
|
+
}
|
|
116
|
+
const newPeaks = generatePeaks(newSamples.slice(offset), samplesPerPixel, bits);
|
|
117
|
+
const result = new (bits === 8 ? Int8Array : Int16Array)(existingPeaks.length + newPeaks.length);
|
|
118
|
+
result.set(existingPeaks);
|
|
119
|
+
result.set(newPeaks, existingPeaks.length);
|
|
120
|
+
return result;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// src/hooks/useRecording.ts
|
|
124
|
+
var import_tone = require("tone");
|
|
125
|
+
var import_meta = {};
|
|
126
|
+
function useRecording(stream, options = {}) {
|
|
127
|
+
const {
|
|
128
|
+
channelCount = 1,
|
|
129
|
+
samplesPerPixel = 1024
|
|
130
|
+
} = options;
|
|
131
|
+
const [isRecording, setIsRecording] = (0, import_react.useState)(false);
|
|
132
|
+
const [isPaused, setIsPaused] = (0, import_react.useState)(false);
|
|
133
|
+
const [duration, setDuration] = (0, import_react.useState)(0);
|
|
134
|
+
const [peaks, setPeaks] = (0, import_react.useState)(new Int16Array(0));
|
|
135
|
+
const [audioBuffer, setAudioBuffer] = (0, import_react.useState)(null);
|
|
136
|
+
const [error, setError] = (0, import_react.useState)(null);
|
|
137
|
+
const [level, setLevel] = (0, import_react.useState)(0);
|
|
138
|
+
const [peakLevel, setPeakLevel] = (0, import_react.useState)(0);
|
|
139
|
+
const bits = 16;
|
|
140
|
+
const workletLoadedRef = (0, import_react.useRef)(false);
|
|
141
|
+
const workletNodeRef = (0, import_react.useRef)(null);
|
|
142
|
+
const mediaStreamSourceRef = (0, import_react.useRef)(null);
|
|
143
|
+
const recordedChunksRef = (0, import_react.useRef)([]);
|
|
144
|
+
const totalSamplesRef = (0, import_react.useRef)(0);
|
|
145
|
+
const animationFrameRef = (0, import_react.useRef)(null);
|
|
146
|
+
const startTimeRef = (0, import_react.useRef)(0);
|
|
147
|
+
const isRecordingRef = (0, import_react.useRef)(false);
|
|
148
|
+
const isPausedRef = (0, import_react.useRef)(false);
|
|
149
|
+
const loadWorklet = (0, import_react.useCallback)(async () => {
|
|
150
|
+
if (workletLoadedRef.current) {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
try {
|
|
154
|
+
const context = (0, import_tone.getContext)();
|
|
155
|
+
const workletUrl = new URL(
|
|
156
|
+
"./worklet/recording-processor.worklet.js",
|
|
157
|
+
import_meta.url
|
|
158
|
+
).href;
|
|
159
|
+
await context.addAudioWorkletModule(workletUrl);
|
|
160
|
+
workletLoadedRef.current = true;
|
|
161
|
+
} catch (err) {
|
|
162
|
+
console.error("Failed to load AudioWorklet module:", err);
|
|
163
|
+
throw new Error("Failed to load recording processor");
|
|
164
|
+
}
|
|
165
|
+
}, []);
|
|
166
|
+
const startRecording = (0, import_react.useCallback)(async () => {
|
|
167
|
+
if (!stream) {
|
|
168
|
+
setError(new Error("No microphone stream available"));
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
try {
|
|
172
|
+
setError(null);
|
|
173
|
+
const context = (0, import_tone.getContext)();
|
|
174
|
+
if (context.state === "suspended") {
|
|
175
|
+
await context.resume();
|
|
176
|
+
}
|
|
177
|
+
await loadWorklet();
|
|
178
|
+
const source = context.createMediaStreamSource(stream);
|
|
179
|
+
mediaStreamSourceRef.current = source;
|
|
180
|
+
const workletNode = context.createAudioWorkletNode("recording-processor");
|
|
181
|
+
workletNodeRef.current = workletNode;
|
|
182
|
+
source.connect(workletNode);
|
|
183
|
+
workletNode.port.onmessage = (event) => {
|
|
184
|
+
const { samples } = event.data;
|
|
185
|
+
recordedChunksRef.current.push(samples);
|
|
186
|
+
totalSamplesRef.current += samples.length;
|
|
187
|
+
setPeaks(
|
|
188
|
+
(prevPeaks) => appendPeaks(
|
|
189
|
+
prevPeaks,
|
|
190
|
+
samples,
|
|
191
|
+
samplesPerPixel,
|
|
192
|
+
totalSamplesRef.current - samples.length,
|
|
193
|
+
bits
|
|
194
|
+
)
|
|
195
|
+
);
|
|
196
|
+
};
|
|
197
|
+
workletNode.port.postMessage({
|
|
198
|
+
command: "start",
|
|
199
|
+
sampleRate: context.sampleRate,
|
|
200
|
+
channelCount
|
|
201
|
+
});
|
|
202
|
+
recordedChunksRef.current = [];
|
|
203
|
+
totalSamplesRef.current = 0;
|
|
204
|
+
setPeaks(new Int16Array(0));
|
|
205
|
+
setAudioBuffer(null);
|
|
206
|
+
setLevel(0);
|
|
207
|
+
setPeakLevel(0);
|
|
208
|
+
isRecordingRef.current = true;
|
|
209
|
+
isPausedRef.current = false;
|
|
210
|
+
setIsRecording(true);
|
|
211
|
+
setIsPaused(false);
|
|
212
|
+
startTimeRef.current = performance.now();
|
|
213
|
+
const updateDuration = () => {
|
|
214
|
+
if (isRecordingRef.current && !isPausedRef.current) {
|
|
215
|
+
const elapsed = (performance.now() - startTimeRef.current) / 1e3;
|
|
216
|
+
setDuration(elapsed);
|
|
217
|
+
animationFrameRef.current = requestAnimationFrame(updateDuration);
|
|
218
|
+
}
|
|
219
|
+
};
|
|
220
|
+
updateDuration();
|
|
221
|
+
} catch (err) {
|
|
222
|
+
console.error("Failed to start recording:", err);
|
|
223
|
+
setError(err instanceof Error ? err : new Error("Failed to start recording"));
|
|
224
|
+
}
|
|
225
|
+
}, [stream, channelCount, samplesPerPixel, loadWorklet, isRecording, isPaused]);
|
|
226
|
+
const stopRecording = (0, import_react.useCallback)(async () => {
|
|
227
|
+
if (!isRecording) {
|
|
228
|
+
return null;
|
|
229
|
+
}
|
|
230
|
+
try {
|
|
231
|
+
if (workletNodeRef.current) {
|
|
232
|
+
workletNodeRef.current.port.postMessage({ command: "stop" });
|
|
233
|
+
if (mediaStreamSourceRef.current) {
|
|
234
|
+
try {
|
|
235
|
+
mediaStreamSourceRef.current.disconnect(workletNodeRef.current);
|
|
236
|
+
} catch (e) {
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
workletNodeRef.current.disconnect();
|
|
240
|
+
}
|
|
241
|
+
if (animationFrameRef.current !== null) {
|
|
242
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
243
|
+
animationFrameRef.current = null;
|
|
244
|
+
}
|
|
245
|
+
const allSamples = concatenateAudioData(recordedChunksRef.current);
|
|
246
|
+
const context = (0, import_tone.getContext)();
|
|
247
|
+
const rawContext = context.rawContext;
|
|
248
|
+
const buffer = createAudioBuffer(
|
|
249
|
+
rawContext,
|
|
250
|
+
allSamples,
|
|
251
|
+
rawContext.sampleRate,
|
|
252
|
+
channelCount
|
|
253
|
+
);
|
|
254
|
+
setAudioBuffer(buffer);
|
|
255
|
+
setDuration(buffer.duration);
|
|
256
|
+
isRecordingRef.current = false;
|
|
257
|
+
isPausedRef.current = false;
|
|
258
|
+
setIsRecording(false);
|
|
259
|
+
setIsPaused(false);
|
|
260
|
+
setLevel(0);
|
|
261
|
+
return buffer;
|
|
262
|
+
} catch (err) {
|
|
263
|
+
console.error("Failed to stop recording:", err);
|
|
264
|
+
setError(err instanceof Error ? err : new Error("Failed to stop recording"));
|
|
265
|
+
return null;
|
|
266
|
+
}
|
|
267
|
+
}, [isRecording, channelCount]);
|
|
268
|
+
const pauseRecording = (0, import_react.useCallback)(() => {
|
|
269
|
+
if (isRecording && !isPaused) {
|
|
270
|
+
if (animationFrameRef.current !== null) {
|
|
271
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
272
|
+
animationFrameRef.current = null;
|
|
273
|
+
}
|
|
274
|
+
isPausedRef.current = true;
|
|
275
|
+
setIsPaused(true);
|
|
276
|
+
}
|
|
277
|
+
}, [isRecording, isPaused]);
|
|
278
|
+
const resumeRecording = (0, import_react.useCallback)(() => {
|
|
279
|
+
if (isRecording && isPaused) {
|
|
280
|
+
isPausedRef.current = false;
|
|
281
|
+
setIsPaused(false);
|
|
282
|
+
startTimeRef.current = performance.now() - duration * 1e3;
|
|
283
|
+
const updateDuration = () => {
|
|
284
|
+
if (isRecordingRef.current && !isPausedRef.current) {
|
|
285
|
+
const elapsed = (performance.now() - startTimeRef.current) / 1e3;
|
|
286
|
+
setDuration(elapsed);
|
|
287
|
+
animationFrameRef.current = requestAnimationFrame(updateDuration);
|
|
288
|
+
}
|
|
289
|
+
};
|
|
290
|
+
updateDuration();
|
|
291
|
+
}
|
|
292
|
+
}, [isRecording, isPaused, duration]);
|
|
293
|
+
(0, import_react.useEffect)(() => {
|
|
294
|
+
return () => {
|
|
295
|
+
if (workletNodeRef.current) {
|
|
296
|
+
workletNodeRef.current.port.postMessage({ command: "stop" });
|
|
297
|
+
if (mediaStreamSourceRef.current) {
|
|
298
|
+
try {
|
|
299
|
+
mediaStreamSourceRef.current.disconnect(workletNodeRef.current);
|
|
300
|
+
} catch (e) {
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
workletNodeRef.current.disconnect();
|
|
304
|
+
}
|
|
305
|
+
if (animationFrameRef.current !== null) {
|
|
306
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
307
|
+
}
|
|
308
|
+
};
|
|
309
|
+
}, []);
|
|
310
|
+
return {
|
|
311
|
+
isRecording,
|
|
312
|
+
isPaused,
|
|
313
|
+
duration,
|
|
314
|
+
peaks,
|
|
315
|
+
audioBuffer,
|
|
316
|
+
level,
|
|
317
|
+
peakLevel,
|
|
318
|
+
startRecording,
|
|
319
|
+
stopRecording,
|
|
320
|
+
pauseRecording,
|
|
321
|
+
resumeRecording,
|
|
322
|
+
error
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
// src/hooks/useMicrophoneAccess.ts
|
|
327
|
+
var import_react2 = require("react");
|
|
328
|
+
function useMicrophoneAccess() {
|
|
329
|
+
const [stream, setStream] = (0, import_react2.useState)(null);
|
|
330
|
+
const [devices, setDevices] = (0, import_react2.useState)([]);
|
|
331
|
+
const [hasPermission, setHasPermission] = (0, import_react2.useState)(false);
|
|
332
|
+
const [isLoading, setIsLoading] = (0, import_react2.useState)(false);
|
|
333
|
+
const [error, setError] = (0, import_react2.useState)(null);
|
|
334
|
+
const enumerateDevices = (0, import_react2.useCallback)(async () => {
|
|
335
|
+
try {
|
|
336
|
+
const allDevices = await navigator.mediaDevices.enumerateDevices();
|
|
337
|
+
const audioInputs = allDevices.filter((device) => device.kind === "audioinput").map((device) => ({
|
|
338
|
+
deviceId: device.deviceId,
|
|
339
|
+
label: device.label || `Microphone ${device.deviceId.slice(0, 8)}`,
|
|
340
|
+
groupId: device.groupId
|
|
341
|
+
}));
|
|
342
|
+
setDevices(audioInputs);
|
|
343
|
+
} catch (err) {
|
|
344
|
+
console.error("Failed to enumerate devices:", err);
|
|
345
|
+
setError(err instanceof Error ? err : new Error("Failed to enumerate devices"));
|
|
346
|
+
}
|
|
347
|
+
}, []);
|
|
348
|
+
const requestAccess = (0, import_react2.useCallback)(async (deviceId, audioConstraints) => {
|
|
349
|
+
setIsLoading(true);
|
|
350
|
+
setError(null);
|
|
351
|
+
try {
|
|
352
|
+
if (stream) {
|
|
353
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
354
|
+
}
|
|
355
|
+
const audio = {
|
|
356
|
+
// Recording-optimized defaults: prioritize raw audio quality and low latency
|
|
357
|
+
echoCancellation: false,
|
|
358
|
+
noiseSuppression: false,
|
|
359
|
+
autoGainControl: false,
|
|
360
|
+
latency: 0,
|
|
361
|
+
// Low latency mode (not in TS types yet, but supported in modern browsers)
|
|
362
|
+
// User-provided constraints override defaults
|
|
363
|
+
...audioConstraints,
|
|
364
|
+
// Device ID override (if specified)
|
|
365
|
+
...deviceId && { deviceId: { exact: deviceId } }
|
|
366
|
+
};
|
|
367
|
+
const constraints = {
|
|
368
|
+
audio,
|
|
369
|
+
video: false
|
|
370
|
+
};
|
|
371
|
+
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
372
|
+
setStream(newStream);
|
|
373
|
+
setHasPermission(true);
|
|
374
|
+
await enumerateDevices();
|
|
375
|
+
} catch (err) {
|
|
376
|
+
console.error("Failed to access microphone:", err);
|
|
377
|
+
setError(
|
|
378
|
+
err instanceof Error ? err : new Error("Failed to access microphone")
|
|
379
|
+
);
|
|
380
|
+
setHasPermission(false);
|
|
381
|
+
} finally {
|
|
382
|
+
setIsLoading(false);
|
|
383
|
+
}
|
|
384
|
+
}, [stream, enumerateDevices]);
|
|
385
|
+
const stopStream = (0, import_react2.useCallback)(() => {
|
|
386
|
+
if (stream) {
|
|
387
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
388
|
+
setStream(null);
|
|
389
|
+
setHasPermission(false);
|
|
390
|
+
}
|
|
391
|
+
}, [stream]);
|
|
392
|
+
(0, import_react2.useEffect)(() => {
|
|
393
|
+
enumerateDevices();
|
|
394
|
+
return () => {
|
|
395
|
+
if (stream) {
|
|
396
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
397
|
+
}
|
|
398
|
+
};
|
|
399
|
+
}, []);
|
|
400
|
+
return {
|
|
401
|
+
stream,
|
|
402
|
+
devices,
|
|
403
|
+
hasPermission,
|
|
404
|
+
isLoading,
|
|
405
|
+
requestAccess,
|
|
406
|
+
stopStream,
|
|
407
|
+
error
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// src/hooks/useMicrophoneLevel.ts
|
|
412
|
+
var import_react3 = require("react");
|
|
413
|
+
var import_tone2 = require("tone");
|
|
414
|
+
function useMicrophoneLevel(stream, options = {}) {
|
|
415
|
+
const {
|
|
416
|
+
updateRate = 60,
|
|
417
|
+
smoothingTimeConstant = 0.8
|
|
418
|
+
} = options;
|
|
419
|
+
const [level, setLevel] = (0, import_react3.useState)(0);
|
|
420
|
+
const [peakLevel, setPeakLevel] = (0, import_react3.useState)(0);
|
|
421
|
+
const meterRef = (0, import_react3.useRef)(null);
|
|
422
|
+
const sourceRef = (0, import_react3.useRef)(null);
|
|
423
|
+
const animationFrameRef = (0, import_react3.useRef)(null);
|
|
424
|
+
const resetPeak = () => setPeakLevel(0);
|
|
425
|
+
(0, import_react3.useEffect)(() => {
|
|
426
|
+
if (!stream) {
|
|
427
|
+
setLevel(0);
|
|
428
|
+
setPeakLevel(0);
|
|
429
|
+
return;
|
|
430
|
+
}
|
|
431
|
+
let isMounted = true;
|
|
432
|
+
const setupMonitoring = async () => {
|
|
433
|
+
if (!isMounted) return;
|
|
434
|
+
const context = (0, import_tone2.getContext)();
|
|
435
|
+
if (context.state === "suspended") {
|
|
436
|
+
await context.resume();
|
|
437
|
+
}
|
|
438
|
+
if (!isMounted) return;
|
|
439
|
+
const meter = new import_tone2.Meter({ smoothing: smoothingTimeConstant, context });
|
|
440
|
+
meterRef.current = meter;
|
|
441
|
+
const source = context.createMediaStreamSource(stream);
|
|
442
|
+
sourceRef.current = source;
|
|
443
|
+
(0, import_tone2.connect)(source, meter);
|
|
444
|
+
const updateInterval = 1e3 / updateRate;
|
|
445
|
+
let lastUpdateTime = 0;
|
|
446
|
+
const updateLevel = (timestamp) => {
|
|
447
|
+
if (!isMounted || !meterRef.current) return;
|
|
448
|
+
if (timestamp - lastUpdateTime >= updateInterval) {
|
|
449
|
+
lastUpdateTime = timestamp;
|
|
450
|
+
const db = meterRef.current.getValue();
|
|
451
|
+
const dbValue = typeof db === "number" ? db : db[0];
|
|
452
|
+
const normalized = Math.max(0, Math.min(1, (dbValue + 100) / 100));
|
|
453
|
+
setLevel(normalized);
|
|
454
|
+
setPeakLevel((prev) => Math.max(prev, normalized));
|
|
455
|
+
}
|
|
456
|
+
animationFrameRef.current = requestAnimationFrame(updateLevel);
|
|
457
|
+
};
|
|
458
|
+
animationFrameRef.current = requestAnimationFrame(updateLevel);
|
|
459
|
+
};
|
|
460
|
+
setupMonitoring();
|
|
461
|
+
return () => {
|
|
462
|
+
isMounted = false;
|
|
463
|
+
if (animationFrameRef.current) {
|
|
464
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
465
|
+
animationFrameRef.current = null;
|
|
466
|
+
}
|
|
467
|
+
if (sourceRef.current) {
|
|
468
|
+
try {
|
|
469
|
+
sourceRef.current.disconnect();
|
|
470
|
+
} catch (e) {
|
|
471
|
+
}
|
|
472
|
+
sourceRef.current = null;
|
|
473
|
+
}
|
|
474
|
+
if (meterRef.current) {
|
|
475
|
+
meterRef.current.dispose();
|
|
476
|
+
meterRef.current = null;
|
|
477
|
+
}
|
|
478
|
+
};
|
|
479
|
+
}, [stream, smoothingTimeConstant, updateRate]);
|
|
480
|
+
return {
|
|
481
|
+
level,
|
|
482
|
+
peakLevel,
|
|
483
|
+
resetPeak
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
// src/components/RecordButton.tsx
|
|
488
|
+
var import_styled_components = __toESM(require("styled-components"));
|
|
489
|
+
var import_jsx_runtime = require("react/jsx-runtime");
|
|
490
|
+
var Button = import_styled_components.default.button`
|
|
491
|
+
padding: 0.5rem 1rem;
|
|
492
|
+
font-size: 0.875rem;
|
|
493
|
+
font-weight: 500;
|
|
494
|
+
border: none;
|
|
495
|
+
border-radius: 0.25rem;
|
|
496
|
+
cursor: pointer;
|
|
497
|
+
transition: all 0.2s ease-in-out;
|
|
498
|
+
background: ${(props) => props.$isRecording ? "#dc3545" : "#e74c3c"};
|
|
499
|
+
color: white;
|
|
500
|
+
|
|
501
|
+
&:hover:not(:disabled) {
|
|
502
|
+
background: ${(props) => props.$isRecording ? "#c82333" : "#c0392b"};
|
|
503
|
+
transform: translateY(-1px);
|
|
504
|
+
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
&:active:not(:disabled) {
|
|
508
|
+
transform: translateY(0);
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
&:disabled {
|
|
512
|
+
opacity: 0.5;
|
|
513
|
+
cursor: not-allowed;
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
&:focus {
|
|
517
|
+
outline: none;
|
|
518
|
+
box-shadow: 0 0 0 3px rgba(231, 76, 60, 0.3);
|
|
519
|
+
}
|
|
520
|
+
`;
|
|
521
|
+
var RecordingIndicator = import_styled_components.default.span`
|
|
522
|
+
display: inline-block;
|
|
523
|
+
width: 8px;
|
|
524
|
+
height: 8px;
|
|
525
|
+
border-radius: 50%;
|
|
526
|
+
background: white;
|
|
527
|
+
margin-right: 0.5rem;
|
|
528
|
+
animation: pulse 1.5s ease-in-out infinite;
|
|
529
|
+
|
|
530
|
+
@keyframes pulse {
|
|
531
|
+
0%,
|
|
532
|
+
100% {
|
|
533
|
+
opacity: 1;
|
|
534
|
+
}
|
|
535
|
+
50% {
|
|
536
|
+
opacity: 0.3;
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
`;
|
|
540
|
+
var RecordButton = ({
|
|
541
|
+
isRecording,
|
|
542
|
+
onClick,
|
|
543
|
+
disabled = false,
|
|
544
|
+
className
|
|
545
|
+
}) => {
|
|
546
|
+
return /* @__PURE__ */ (0, import_jsx_runtime.jsxs)(
|
|
547
|
+
Button,
|
|
548
|
+
{
|
|
549
|
+
$isRecording: isRecording,
|
|
550
|
+
onClick,
|
|
551
|
+
disabled,
|
|
552
|
+
className,
|
|
553
|
+
"aria-label": isRecording ? "Stop recording" : "Start recording",
|
|
554
|
+
children: [
|
|
555
|
+
isRecording && /* @__PURE__ */ (0, import_jsx_runtime.jsx)(RecordingIndicator, {}),
|
|
556
|
+
isRecording ? "Stop Recording" : "Record"
|
|
557
|
+
]
|
|
558
|
+
}
|
|
559
|
+
);
|
|
560
|
+
};
|
|
561
|
+
|
|
562
|
+
// src/components/MicrophoneSelector.tsx
|
|
563
|
+
var import_styled_components2 = __toESM(require("styled-components"));
|
|
564
|
+
var import_ui_components = require("@waveform-playlist/ui-components");
|
|
565
|
+
var import_jsx_runtime2 = require("react/jsx-runtime");
|
|
566
|
+
var Select = (0, import_styled_components2.default)(import_ui_components.BaseSelect)`
|
|
567
|
+
min-width: 200px;
|
|
568
|
+
`;
|
|
569
|
+
var Label = (0, import_styled_components2.default)(import_ui_components.BaseLabel)`
|
|
570
|
+
display: flex;
|
|
571
|
+
flex-direction: column;
|
|
572
|
+
gap: 0.25rem;
|
|
573
|
+
`;
|
|
574
|
+
var MicrophoneSelector = ({
|
|
575
|
+
devices,
|
|
576
|
+
selectedDeviceId,
|
|
577
|
+
onDeviceChange,
|
|
578
|
+
disabled = false,
|
|
579
|
+
className
|
|
580
|
+
}) => {
|
|
581
|
+
const handleChange = (event) => {
|
|
582
|
+
onDeviceChange(event.target.value);
|
|
583
|
+
};
|
|
584
|
+
const currentValue = selectedDeviceId || (devices.length > 0 ? devices[0].deviceId : "");
|
|
585
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(Label, { className, children: [
|
|
586
|
+
"Microphone",
|
|
587
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
588
|
+
Select,
|
|
589
|
+
{
|
|
590
|
+
value: currentValue,
|
|
591
|
+
onChange: handleChange,
|
|
592
|
+
disabled: disabled || devices.length === 0,
|
|
593
|
+
children: devices.length === 0 ? /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("option", { value: "", children: "No microphones found" }) : devices.map((device) => /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("option", { value: device.deviceId, children: device.label }, device.deviceId))
|
|
594
|
+
}
|
|
595
|
+
)
|
|
596
|
+
] });
|
|
597
|
+
};
|
|
598
|
+
|
|
599
|
+
// src/components/RecordingIndicator.tsx
|
|
600
|
+
var import_styled_components3 = __toESM(require("styled-components"));
|
|
601
|
+
var import_jsx_runtime3 = require("react/jsx-runtime");
|
|
602
|
+
var Container = import_styled_components3.default.div`
|
|
603
|
+
display: flex;
|
|
604
|
+
align-items: center;
|
|
605
|
+
gap: 0.75rem;
|
|
606
|
+
padding: 0.5rem 0.75rem;
|
|
607
|
+
background: ${(props) => props.$isRecording ? "#fff3cd" : "transparent"};
|
|
608
|
+
border-radius: 0.25rem;
|
|
609
|
+
transition: background 0.2s ease-in-out;
|
|
610
|
+
`;
|
|
611
|
+
var Dot = import_styled_components3.default.div`
|
|
612
|
+
width: 12px;
|
|
613
|
+
height: 12px;
|
|
614
|
+
border-radius: 50%;
|
|
615
|
+
background: ${(props) => props.$isPaused ? "#ffc107" : "#dc3545"};
|
|
616
|
+
opacity: ${(props) => props.$isRecording ? 1 : 0};
|
|
617
|
+
transition: opacity 0.2s ease-in-out;
|
|
618
|
+
|
|
619
|
+
${(props) => props.$isRecording && !props.$isPaused && `
|
|
620
|
+
animation: blink 1.5s ease-in-out infinite;
|
|
621
|
+
|
|
622
|
+
@keyframes blink {
|
|
623
|
+
0%, 100% {
|
|
624
|
+
opacity: 1;
|
|
625
|
+
}
|
|
626
|
+
50% {
|
|
627
|
+
opacity: 0.3;
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
`}
|
|
631
|
+
`;
|
|
632
|
+
var Duration = import_styled_components3.default.span`
|
|
633
|
+
font-family: 'Courier New', Monaco, monospace;
|
|
634
|
+
font-size: 1rem;
|
|
635
|
+
font-weight: 600;
|
|
636
|
+
color: #495057;
|
|
637
|
+
min-width: 70px;
|
|
638
|
+
`;
|
|
639
|
+
var Status = import_styled_components3.default.span`
|
|
640
|
+
font-size: 0.75rem;
|
|
641
|
+
font-weight: 500;
|
|
642
|
+
color: ${(props) => props.$isPaused ? "#ffc107" : "#dc3545"};
|
|
643
|
+
text-transform: uppercase;
|
|
644
|
+
`;
|
|
645
|
+
var defaultFormatTime = (seconds) => {
|
|
646
|
+
const mins = Math.floor(seconds / 60);
|
|
647
|
+
const secs = Math.floor(seconds % 60);
|
|
648
|
+
return `${mins.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}`;
|
|
649
|
+
};
|
|
650
|
+
var RecordingIndicator2 = ({
|
|
651
|
+
isRecording,
|
|
652
|
+
isPaused = false,
|
|
653
|
+
duration,
|
|
654
|
+
formatTime = defaultFormatTime,
|
|
655
|
+
className
|
|
656
|
+
}) => {
|
|
657
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)(Container, { $isRecording: isRecording, className, children: [
|
|
658
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)(Dot, { $isRecording: isRecording, $isPaused: isPaused }),
|
|
659
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)(Duration, { children: formatTime(duration) }),
|
|
660
|
+
isRecording && /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(Status, { $isPaused: isPaused, children: isPaused ? "Paused" : "Recording" })
|
|
661
|
+
] });
|
|
662
|
+
};
|
|
663
|
+
|
|
664
|
+
// src/components/VUMeter.tsx
|
|
665
|
+
var import_react4 = __toESM(require("react"));
|
|
666
|
+
var import_styled_components4 = __toESM(require("styled-components"));
|
|
667
|
+
var import_jsx_runtime4 = require("react/jsx-runtime");
|
|
668
|
+
var MeterContainer = import_styled_components4.default.div`
|
|
669
|
+
position: relative;
|
|
670
|
+
width: ${(props) => props.$width}px;
|
|
671
|
+
height: ${(props) => props.$height}px;
|
|
672
|
+
background: #2c3e50;
|
|
673
|
+
border-radius: 4px;
|
|
674
|
+
overflow: hidden;
|
|
675
|
+
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.3);
|
|
676
|
+
`;
|
|
677
|
+
var getLevelGradient = (level) => {
|
|
678
|
+
if (level < 0.6) return "linear-gradient(90deg, #27ae60, #2ecc71)";
|
|
679
|
+
if (level < 0.85) return "linear-gradient(90deg, #f39c12, #f1c40f)";
|
|
680
|
+
return "linear-gradient(90deg, #c0392b, #e74c3c)";
|
|
681
|
+
};
|
|
682
|
+
var MeterFill = import_styled_components4.default.div.attrs((props) => ({
|
|
683
|
+
style: {
|
|
684
|
+
width: `${props.$level * 100}%`,
|
|
685
|
+
height: `${props.$height}px`,
|
|
686
|
+
background: getLevelGradient(props.$level),
|
|
687
|
+
boxShadow: props.$level > 0.01 ? "0 0 8px rgba(255, 255, 255, 0.3)" : "none"
|
|
688
|
+
}
|
|
689
|
+
}))`
|
|
690
|
+
position: absolute;
|
|
691
|
+
left: 0;
|
|
692
|
+
top: 0;
|
|
693
|
+
transition: width 0.05s ease-out, background 0.1s ease-out;
|
|
694
|
+
`;
|
|
695
|
+
var PeakIndicator = import_styled_components4.default.div.attrs((props) => ({
|
|
696
|
+
style: {
|
|
697
|
+
left: `${props.$peakLevel * 100}%`,
|
|
698
|
+
height: `${props.$height}px`
|
|
699
|
+
}
|
|
700
|
+
}))`
|
|
701
|
+
position: absolute;
|
|
702
|
+
top: 0;
|
|
703
|
+
width: 2px;
|
|
704
|
+
background: #ecf0f1;
|
|
705
|
+
box-shadow: 0 0 4px rgba(236, 240, 241, 0.8);
|
|
706
|
+
transition: left 0.1s ease-out;
|
|
707
|
+
`;
|
|
708
|
+
var ScaleMarkers = import_styled_components4.default.div`
|
|
709
|
+
position: absolute;
|
|
710
|
+
top: 0;
|
|
711
|
+
left: 0;
|
|
712
|
+
width: 100%;
|
|
713
|
+
height: ${(props) => props.$height}px;
|
|
714
|
+
pointer-events: none;
|
|
715
|
+
`;
|
|
716
|
+
var ScaleMark = import_styled_components4.default.div`
|
|
717
|
+
position: absolute;
|
|
718
|
+
left: ${(props) => props.$position}%;
|
|
719
|
+
top: 0;
|
|
720
|
+
width: 1px;
|
|
721
|
+
height: ${(props) => props.$height}px;
|
|
722
|
+
background: rgba(255, 255, 255, 0.2);
|
|
723
|
+
`;
|
|
724
|
+
var VUMeterComponent = ({
|
|
725
|
+
level,
|
|
726
|
+
peakLevel,
|
|
727
|
+
width = 200,
|
|
728
|
+
height = 20,
|
|
729
|
+
className
|
|
730
|
+
}) => {
|
|
731
|
+
const clampedLevel = Math.max(0, Math.min(1, level));
|
|
732
|
+
const clampedPeak = peakLevel !== void 0 ? Math.max(0, Math.min(1, peakLevel)) : 0;
|
|
733
|
+
return /* @__PURE__ */ (0, import_jsx_runtime4.jsxs)(MeterContainer, { $width: width, $height: height, className, children: [
|
|
734
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)(MeterFill, { $level: clampedLevel, $height: height }),
|
|
735
|
+
peakLevel !== void 0 && clampedPeak > 0 && /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(PeakIndicator, { $peakLevel: clampedPeak, $height: height }),
|
|
736
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)(ScaleMarkers, { $height: height, children: [
|
|
737
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)(ScaleMark, { $position: 60, $height: height }),
|
|
738
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)(ScaleMark, { $position: 85, $height: height })
|
|
739
|
+
] })
|
|
740
|
+
] });
|
|
741
|
+
};
|
|
742
|
+
var VUMeter = import_react4.default.memo(VUMeterComponent);
|
|
743
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
744
|
+
0 && (module.exports = {
|
|
745
|
+
MicrophoneSelector,
|
|
746
|
+
RecordButton,
|
|
747
|
+
RecordingIndicator,
|
|
748
|
+
VUMeter,
|
|
749
|
+
concatenateAudioData,
|
|
750
|
+
createAudioBuffer,
|
|
751
|
+
generatePeaks,
|
|
752
|
+
useMicrophoneAccess,
|
|
753
|
+
useMicrophoneLevel,
|
|
754
|
+
useRecording
|
|
755
|
+
});
|
|
756
|
+
//# sourceMappingURL=index.js.map
|