l-min-components 1.7.1310 → 1.7.1312
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "l-min-components",
|
|
3
|
-
"version": "1.7.
|
|
3
|
+
"version": "1.7.1312",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"files": [
|
|
6
6
|
"src/assets",
|
|
@@ -31,6 +31,7 @@
|
|
|
31
31
|
"iso-639-1": "^3.1.2",
|
|
32
32
|
"js-cookie": "^3.0.5",
|
|
33
33
|
"lottie-web": "^5.12.2",
|
|
34
|
+
"lucide-react": "^0.510.0",
|
|
34
35
|
"moment": "^2.29.4",
|
|
35
36
|
"npm": "^10.2.5",
|
|
36
37
|
"papaparse": "^5.4.1",
|
|
@@ -133,7 +133,7 @@ const useHeader = (props = { default: false }) => {
|
|
|
133
133
|
url: "/iam/v1/users/default_account/",
|
|
134
134
|
params: {
|
|
135
135
|
_account: accountID,
|
|
136
|
-
exclude_account: "PERSONAL",
|
|
136
|
+
// exclude_account: "PERSONAL",
|
|
137
137
|
},
|
|
138
138
|
// headers: {
|
|
139
139
|
// "Content-Type": "application/json",
|
package/src/components/index.js
CHANGED
|
@@ -57,3 +57,4 @@ export { default as VideoPlayer } from "./video-player";
|
|
|
57
57
|
export { default as useAudioPlayer } from "./useAudioPlayer";
|
|
58
58
|
export { default as AudioWaveComponent } from "./useAudioPlayer/audioWave";
|
|
59
59
|
export { default as InputEmoji } from "./InputEmoji";
|
|
60
|
+
export { default as useAudioRecorder } from "../hooks/recorder-kit";
|
|
@@ -0,0 +1,805 @@
|
|
|
1
|
+
import React, { useState, useRef, useEffect, useCallback } from "react";
|
|
2
|
+
import {
|
|
3
|
+
Mic,
|
|
4
|
+
StopCircle,
|
|
5
|
+
Play,
|
|
6
|
+
Pause,
|
|
7
|
+
Download,
|
|
8
|
+
AlertTriangle,
|
|
9
|
+
CheckCircle,
|
|
10
|
+
RefreshCw,
|
|
11
|
+
} from "lucide-react";
|
|
12
|
+
|
|
13
|
+
// Helper function to convert AudioBuffer to WAV Blob
|
|
14
|
+
// buffer: AudioBuffer
|
|
15
|
+
function audioBufferToWav(buffer, opt = {}) {
|
|
16
|
+
const numChannels = buffer.numberOfChannels;
|
|
17
|
+
const sampleRate = buffer.sampleRate;
|
|
18
|
+
const format = 1; // PCM
|
|
19
|
+
const bitDepth = 16; // 16-bit PCM
|
|
20
|
+
|
|
21
|
+
const interleaved =
|
|
22
|
+
numChannels === 2
|
|
23
|
+
? interleave(buffer.getChannelData(0), buffer.getChannelData(1))
|
|
24
|
+
: buffer.getChannelData(0);
|
|
25
|
+
|
|
26
|
+
const dataView = encodeWAV(
|
|
27
|
+
interleaved,
|
|
28
|
+
format,
|
|
29
|
+
sampleRate,
|
|
30
|
+
numChannels,
|
|
31
|
+
bitDepth
|
|
32
|
+
);
|
|
33
|
+
return new Blob([dataView], { type: "audio/wav" });
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// samples: Float32Array, format: number, sampleRate: number, numChannels: number, bitDepth: number
|
|
37
|
+
// Returns: DataView
|
|
38
|
+
function encodeWAV(samples, format, sampleRate, numChannels, bitDepth) {
|
|
39
|
+
const bytesPerSample = bitDepth / 8;
|
|
40
|
+
const blockAlign = numChannels * bytesPerSample;
|
|
41
|
+
const buffer = new ArrayBuffer(44 + samples.length * bytesPerSample);
|
|
42
|
+
const view = new DataView(buffer);
|
|
43
|
+
|
|
44
|
+
/* RIFF identifier */
|
|
45
|
+
writeString(view, 0, "RIFF");
|
|
46
|
+
/* RIFF chunk length */
|
|
47
|
+
view.setUint32(4, 36 + samples.length * bytesPerSample, true);
|
|
48
|
+
/* RIFF type */
|
|
49
|
+
writeString(view, 8, "WAVE");
|
|
50
|
+
/* format chunk identifier */
|
|
51
|
+
writeString(view, 12, "fmt ");
|
|
52
|
+
/* format chunk length */
|
|
53
|
+
view.setUint32(16, 16, true);
|
|
54
|
+
/* sample format (raw) */
|
|
55
|
+
view.setUint16(20, format, true);
|
|
56
|
+
/* channel count */
|
|
57
|
+
view.setUint16(22, numChannels, true);
|
|
58
|
+
/* sample rate */
|
|
59
|
+
view.setUint32(24, sampleRate, true);
|
|
60
|
+
/* byte rate (sample rate * block align) */
|
|
61
|
+
view.setUint32(28, sampleRate * blockAlign, true);
|
|
62
|
+
/* block align (channel count * bytes per sample) */
|
|
63
|
+
view.setUint16(32, blockAlign, true);
|
|
64
|
+
/* bits per sample */
|
|
65
|
+
view.setUint16(34, bitDepth, true);
|
|
66
|
+
/* data chunk identifier */
|
|
67
|
+
writeString(view, 36, "data");
|
|
68
|
+
/* data chunk length */
|
|
69
|
+
view.setUint32(40, samples.length * bytesPerSample, true);
|
|
70
|
+
|
|
71
|
+
// Write PCM samples
|
|
72
|
+
floatTo16BitPCM(view, 44, samples);
|
|
73
|
+
|
|
74
|
+
return view;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// output: DataView, offset: number, input: Float32Array
|
|
78
|
+
function floatTo16BitPCM(output, offset, input) {
|
|
79
|
+
for (let i = 0; i < input.length; i++, offset += 2) {
|
|
80
|
+
const s = Math.max(-1, Math.min(1, input[i]));
|
|
81
|
+
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// inputL: Float32Array, inputR: Float32Array
|
|
86
|
+
// Returns: Float32Array
|
|
87
|
+
function interleave(inputL, inputR) {
|
|
88
|
+
const length = inputL.length + inputR.length;
|
|
89
|
+
const result = new Float32Array(length);
|
|
90
|
+
let index = 0;
|
|
91
|
+
let inputIndex = 0;
|
|
92
|
+
while (index < length) {
|
|
93
|
+
result[index++] = inputL[inputIndex];
|
|
94
|
+
result[index++] = inputR[inputIndex];
|
|
95
|
+
inputIndex++;
|
|
96
|
+
}
|
|
97
|
+
return result;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// view: DataView, offset: number, string: string
|
|
101
|
+
function writeString(view, offset, string) {
|
|
102
|
+
for (let i = 0; i < string.length; i++) {
|
|
103
|
+
view.setUint8(offset + i, string.charCodeAt(i));
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// The Headless Audio Recorder Hook
|
|
108
|
+
export default function useAudioRecorder(props) {
|
|
109
|
+
const {
|
|
110
|
+
onStateChange,
|
|
111
|
+
onTimerUpdate,
|
|
112
|
+
onRecordingComplete,
|
|
113
|
+
onError,
|
|
114
|
+
onStreamReady,
|
|
115
|
+
config,
|
|
116
|
+
} = props || {};
|
|
117
|
+
|
|
118
|
+
const [recordingState, setRecordingState] = useState("idle");
|
|
119
|
+
const [duration, setDuration] = useState(0);
|
|
120
|
+
const [audioChunks, setAudioChunks] = useState([]);
|
|
121
|
+
const [finalWavBlob, setFinalWavBlob] = useState(null);
|
|
122
|
+
const [audioURL, setAudioURL] = useState("");
|
|
123
|
+
const [errorMessage, setErrorMessage] = useState(null);
|
|
124
|
+
const [isMicrophoneAvailable, setIsMicrophoneAvailable] = useState(false);
|
|
125
|
+
|
|
126
|
+
const mediaRecorderRef = useRef(null);
|
|
127
|
+
const accumulatedChunksRef = useRef([]);
|
|
128
|
+
const timerIntervalRef = useRef(null);
|
|
129
|
+
const audioContextRef = useRef(null);
|
|
130
|
+
const mediaStreamRef = useRef(null);
|
|
131
|
+
|
|
132
|
+
// Function to update state and call prop
|
|
133
|
+
const updateState = useCallback(
|
|
134
|
+
(newState) => {
|
|
135
|
+
setRecordingState((oldState) => {
|
|
136
|
+
if (oldState !== newState) {
|
|
137
|
+
onStateChange?.({ newState, oldState });
|
|
138
|
+
}
|
|
139
|
+
return newState;
|
|
140
|
+
});
|
|
141
|
+
},
|
|
142
|
+
[onStateChange]
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
// Error handler
|
|
146
|
+
const handleError = useCallback(
|
|
147
|
+
(message, details) => {
|
|
148
|
+
setErrorMessage(message);
|
|
149
|
+
onError?.({ message, details });
|
|
150
|
+
console.error("AudioRecorder Hook Error:", message, details);
|
|
151
|
+
setRecordingState((currentState) => {
|
|
152
|
+
if (currentState !== "permissionDenied" && currentState !== "idle") {
|
|
153
|
+
onStateChange?.({ newState: "ready", oldState: currentState });
|
|
154
|
+
return "ready";
|
|
155
|
+
}
|
|
156
|
+
return currentState;
|
|
157
|
+
});
|
|
158
|
+
},
|
|
159
|
+
[onError, onStateChange]
|
|
160
|
+
);
|
|
161
|
+
|
|
162
|
+
// Request microphone permission
|
|
163
|
+
const requestPermission = useCallback(async () => {
|
|
164
|
+
updateState("requestingPermission");
|
|
165
|
+
setErrorMessage(null);
|
|
166
|
+
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
|
167
|
+
handleError("MediaDevices API not supported.");
|
|
168
|
+
updateState("permissionDenied");
|
|
169
|
+
setIsMicrophoneAvailable(false);
|
|
170
|
+
return false;
|
|
171
|
+
}
|
|
172
|
+
setIsMicrophoneAvailable(true);
|
|
173
|
+
|
|
174
|
+
try {
|
|
175
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
176
|
+
mediaStreamRef.current = stream;
|
|
177
|
+
if (!audioContextRef.current) {
|
|
178
|
+
try {
|
|
179
|
+
audioContextRef.current = new (window.AudioContext ||
|
|
180
|
+
window.webkitAudioContext)();
|
|
181
|
+
} catch (e) {
|
|
182
|
+
handleError(
|
|
183
|
+
"AudioContext could not be created during permission request.",
|
|
184
|
+
e
|
|
185
|
+
);
|
|
186
|
+
updateState("permissionDenied");
|
|
187
|
+
return false;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
updateState("ready");
|
|
191
|
+
return true;
|
|
192
|
+
} catch (err) {
|
|
193
|
+
if (
|
|
194
|
+
err instanceof Error &&
|
|
195
|
+
(err.name === "NotAllowedError" || err.name === "PermissionDeniedError")
|
|
196
|
+
) {
|
|
197
|
+
handleError(
|
|
198
|
+
"Microphone permission denied. Please enable it in your browser settings."
|
|
199
|
+
);
|
|
200
|
+
} else {
|
|
201
|
+
handleError("Error accessing microphone.", err);
|
|
202
|
+
}
|
|
203
|
+
updateState("permissionDenied");
|
|
204
|
+
return false;
|
|
205
|
+
}
|
|
206
|
+
}, [handleError, updateState]);
|
|
207
|
+
|
|
208
|
+
// Timer management
|
|
209
|
+
const startTimer = () => {
|
|
210
|
+
if (timerIntervalRef.current) clearInterval(timerIntervalRef.current);
|
|
211
|
+
timerIntervalRef.current = setInterval(() => {
|
|
212
|
+
setDuration((prev) => {
|
|
213
|
+
const newTime = prev + 1;
|
|
214
|
+
onTimerUpdate?.(newTime);
|
|
215
|
+
return newTime;
|
|
216
|
+
});
|
|
217
|
+
}, 1000);
|
|
218
|
+
};
|
|
219
|
+
const stopTimer = () => {
|
|
220
|
+
if (timerIntervalRef.current) clearInterval(timerIntervalRef.current);
|
|
221
|
+
timerIntervalRef.current = null;
|
|
222
|
+
};
|
|
223
|
+
const resetTimer = () => {
|
|
224
|
+
stopTimer();
|
|
225
|
+
setDuration(0);
|
|
226
|
+
onTimerUpdate?.(0);
|
|
227
|
+
};
|
|
228
|
+
|
|
229
|
+
// Centralized Reset Logic
|
|
230
|
+
const performReset = useCallback(() => {
|
|
231
|
+
if (
|
|
232
|
+
mediaRecorderRef.current &&
|
|
233
|
+
mediaRecorderRef.current.state !== "inactive"
|
|
234
|
+
) {
|
|
235
|
+
mediaRecorderRef.current.stop();
|
|
236
|
+
}
|
|
237
|
+
mediaStreamRef.current?.getTracks().forEach((track) => track.stop());
|
|
238
|
+
mediaStreamRef.current = null;
|
|
239
|
+
|
|
240
|
+
if (audioURL) URL.revokeObjectURL(audioURL);
|
|
241
|
+
setAudioURL("");
|
|
242
|
+
setFinalWavBlob(null);
|
|
243
|
+
setAudioChunks([]);
|
|
244
|
+
accumulatedChunksRef.current = [];
|
|
245
|
+
resetTimer();
|
|
246
|
+
setErrorMessage(null);
|
|
247
|
+
|
|
248
|
+
updateState("permissionNeeded");
|
|
249
|
+
|
|
250
|
+
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
|
|
251
|
+
setIsMicrophoneAvailable(true);
|
|
252
|
+
if (navigator.permissions && navigator.permissions.query) {
|
|
253
|
+
navigator.permissions
|
|
254
|
+
.query({ name: "microphone" })
|
|
255
|
+
.then((status) => {
|
|
256
|
+
// Removed 'as PermissionName'
|
|
257
|
+
if (status.state === "granted") updateState("ready");
|
|
258
|
+
else if (status.state === "denied") {
|
|
259
|
+
updateState("permissionDenied");
|
|
260
|
+
setErrorMessage(
|
|
261
|
+
(prev) => prev || "Microphone permission is denied."
|
|
262
|
+
);
|
|
263
|
+
} else updateState("permissionNeeded");
|
|
264
|
+
})
|
|
265
|
+
.catch(() => updateState("permissionNeeded"));
|
|
266
|
+
} else updateState("permissionNeeded");
|
|
267
|
+
} else {
|
|
268
|
+
setIsMicrophoneAvailable(false);
|
|
269
|
+
updateState("permissionDenied"); // If no media devices, it's effectively denied.
|
|
270
|
+
setErrorMessage("MediaDevices API not supported on this browser.");
|
|
271
|
+
}
|
|
272
|
+
}, [audioURL, updateState]);
|
|
273
|
+
|
|
274
|
+
// Initialize and check permissions on mount
|
|
275
|
+
useEffect(() => {
|
|
276
|
+
setIsMicrophoneAvailable(
|
|
277
|
+
!!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
|
|
278
|
+
);
|
|
279
|
+
performReset();
|
|
280
|
+
|
|
281
|
+
if (
|
|
282
|
+
!audioContextRef.current &&
|
|
283
|
+
navigator.mediaDevices &&
|
|
284
|
+
navigator.mediaDevices.getUserMedia
|
|
285
|
+
) {
|
|
286
|
+
try {
|
|
287
|
+
audioContextRef.current = new (window.AudioContext ||
|
|
288
|
+
window.webkitAudioContext)();
|
|
289
|
+
} catch (e) {
|
|
290
|
+
handleError(
|
|
291
|
+
"AudioContext could not be created on mount. WAV conversion might fail.",
|
|
292
|
+
e
|
|
293
|
+
);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
let permStatus = null;
|
|
298
|
+
const handlePermissionChange = () => {
|
|
299
|
+
if (permStatus) {
|
|
300
|
+
if (permStatus.state === "granted") {
|
|
301
|
+
updateState("ready");
|
|
302
|
+
if (!audioContextRef.current) {
|
|
303
|
+
try {
|
|
304
|
+
audioContextRef.current = new (window.AudioContext ||
|
|
305
|
+
window.webkitAudioContext)();
|
|
306
|
+
} catch (e) {
|
|
307
|
+
handleError(
|
|
308
|
+
"AudioContext could not be created after permission grant.",
|
|
309
|
+
e
|
|
310
|
+
);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
} else if (permStatus.state === "denied") {
|
|
314
|
+
updateState("permissionDenied");
|
|
315
|
+
handleError("Microphone permission status changed to denied.");
|
|
316
|
+
} else {
|
|
317
|
+
updateState("permissionNeeded");
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
};
|
|
321
|
+
|
|
322
|
+
if (navigator.permissions && navigator.permissions.query) {
|
|
323
|
+
navigator.permissions
|
|
324
|
+
.query({ name: "microphone" })
|
|
325
|
+
.then((status) => {
|
|
326
|
+
// Removed 'as PermissionName'
|
|
327
|
+
permStatus = status;
|
|
328
|
+
handlePermissionChange();
|
|
329
|
+
status.onchange = handlePermissionChange;
|
|
330
|
+
})
|
|
331
|
+
.catch(() => updateState("permissionNeeded"));
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
return () => {
|
|
335
|
+
if (timerIntervalRef.current) clearInterval(timerIntervalRef.current);
|
|
336
|
+
if (audioURL) URL.revokeObjectURL(audioURL);
|
|
337
|
+
mediaStreamRef.current?.getTracks().forEach((track) => track.stop());
|
|
338
|
+
mediaRecorderRef.current?.stream
|
|
339
|
+
?.getTracks()
|
|
340
|
+
.forEach((track) => track.stop());
|
|
341
|
+
if (permStatus) permStatus.onchange = null;
|
|
342
|
+
};
|
|
343
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
344
|
+
}, []); // performReset, handleError, updateState are stable.
|
|
345
|
+
|
|
346
|
+
// Recording actions
|
|
347
|
+
const start = useCallback(async () => {
|
|
348
|
+
if (
|
|
349
|
+
!["ready", "finished", "idle", "permissionNeeded"].includes(
|
|
350
|
+
recordingState
|
|
351
|
+
)
|
|
352
|
+
) {
|
|
353
|
+
handleError(`Cannot start recording in state: ${recordingState}`);
|
|
354
|
+
return;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
setErrorMessage(null);
|
|
358
|
+
setFinalWavBlob(null);
|
|
359
|
+
if (audioURL) {
|
|
360
|
+
URL.revokeObjectURL(audioURL);
|
|
361
|
+
setAudioURL("");
|
|
362
|
+
}
|
|
363
|
+
accumulatedChunksRef.current = [];
|
|
364
|
+
setAudioChunks([]);
|
|
365
|
+
resetTimer();
|
|
366
|
+
|
|
367
|
+
let stream = mediaStreamRef.current;
|
|
368
|
+
if (
|
|
369
|
+
!stream ||
|
|
370
|
+
stream.getAudioTracks().every((track) => track.readyState === "ended")
|
|
371
|
+
) {
|
|
372
|
+
const permissionGranted = await requestPermission();
|
|
373
|
+
if (!permissionGranted) return;
|
|
374
|
+
stream = mediaStreamRef.current;
|
|
375
|
+
}
|
|
376
|
+
if (!stream) {
|
|
377
|
+
handleError("Microphone stream not available.");
|
|
378
|
+
updateState("permissionDenied");
|
|
379
|
+
return;
|
|
380
|
+
}
|
|
381
|
+
if (!audioContextRef.current) {
|
|
382
|
+
try {
|
|
383
|
+
audioContextRef.current = new (window.AudioContext ||
|
|
384
|
+
window.webkitAudioContext)();
|
|
385
|
+
} catch (e) {
|
|
386
|
+
handleError(
|
|
387
|
+
"AudioContext could not be initialized before starting recording.",
|
|
388
|
+
e
|
|
389
|
+
);
|
|
390
|
+
updateState("permissionDenied");
|
|
391
|
+
return;
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
if (stream && audioContextRef.current) {
|
|
396
|
+
onStreamReady?.({ stream, audioContext: audioContextRef.current });
|
|
397
|
+
} else {
|
|
398
|
+
handleError(
|
|
399
|
+
"Stream or AudioContext not available for onStreamReady callback."
|
|
400
|
+
);
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
try {
|
|
405
|
+
const options = {
|
|
406
|
+
mimeType: config?.mimeType || "audio/webm",
|
|
407
|
+
audioBitsPerSecond: config?.audioBitsPerSecond,
|
|
408
|
+
};
|
|
409
|
+
mediaRecorderRef.current = new MediaRecorder(stream, options);
|
|
410
|
+
|
|
411
|
+
mediaRecorderRef.current.ondataavailable = (event) => {
|
|
412
|
+
if (event.data.size > 0) {
|
|
413
|
+
accumulatedChunksRef.current.push(event.data);
|
|
414
|
+
}
|
|
415
|
+
};
|
|
416
|
+
|
|
417
|
+
mediaRecorderRef.current.onstop = async () => {
|
|
418
|
+
updateState("processing");
|
|
419
|
+
const allChunks = [...accumulatedChunksRef.current];
|
|
420
|
+
setAudioChunks(allChunks);
|
|
421
|
+
|
|
422
|
+
const recordedBlob = new Blob(allChunks, {
|
|
423
|
+
type: mediaRecorderRef.current?.mimeType || "audio/webm",
|
|
424
|
+
});
|
|
425
|
+
|
|
426
|
+
if (!audioContextRef.current) {
|
|
427
|
+
handleError(
|
|
428
|
+
"AudioContext not available for WAV conversion. Providing raw recording."
|
|
429
|
+
);
|
|
430
|
+
const rawUrl = URL.createObjectURL(recordedBlob);
|
|
431
|
+
setAudioURL(rawUrl);
|
|
432
|
+
onRecordingComplete?.({
|
|
433
|
+
wavBlob: null,
|
|
434
|
+
audioChunks: allChunks,
|
|
435
|
+
duration,
|
|
436
|
+
});
|
|
437
|
+
updateState("finished");
|
|
438
|
+
return;
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
try {
|
|
442
|
+
const arrayBuffer = await recordedBlob.arrayBuffer();
|
|
443
|
+
const audioBuffer = await audioContextRef.current.decodeAudioData(
|
|
444
|
+
arrayBuffer
|
|
445
|
+
);
|
|
446
|
+
const wavBlob = audioBufferToWav(audioBuffer);
|
|
447
|
+
setFinalWavBlob(wavBlob);
|
|
448
|
+
const newAudioURL = URL.createObjectURL(wavBlob);
|
|
449
|
+
setAudioURL(newAudioURL);
|
|
450
|
+
onRecordingComplete?.({ wavBlob, audioChunks: allChunks, duration });
|
|
451
|
+
updateState("finished");
|
|
452
|
+
} catch (conversionError) {
|
|
453
|
+
handleError(
|
|
454
|
+
"Error converting audio to WAV. Providing raw recording.",
|
|
455
|
+
conversionError
|
|
456
|
+
);
|
|
457
|
+
const originalUrl = URL.createObjectURL(recordedBlob);
|
|
458
|
+
setAudioURL(originalUrl);
|
|
459
|
+
setFinalWavBlob(null);
|
|
460
|
+
onRecordingComplete?.({
|
|
461
|
+
wavBlob: null,
|
|
462
|
+
audioChunks: allChunks,
|
|
463
|
+
duration,
|
|
464
|
+
});
|
|
465
|
+
updateState("finished");
|
|
466
|
+
}
|
|
467
|
+
};
|
|
468
|
+
|
|
469
|
+
mediaRecorderRef.current.onerror = (event) => {
|
|
470
|
+
handleError("MediaRecorder error.", event);
|
|
471
|
+
stopTimer();
|
|
472
|
+
};
|
|
473
|
+
|
|
474
|
+
mediaRecorderRef.current.start();
|
|
475
|
+
updateState("recording");
|
|
476
|
+
startTimer();
|
|
477
|
+
} catch (err) {
|
|
478
|
+
handleError("Failed to start recording.", err);
|
|
479
|
+
}
|
|
480
|
+
}, [
|
|
481
|
+
recordingState,
|
|
482
|
+
audioURL,
|
|
483
|
+
requestPermission,
|
|
484
|
+
onStreamReady,
|
|
485
|
+
config,
|
|
486
|
+
handleError,
|
|
487
|
+
updateState,
|
|
488
|
+
onRecordingComplete,
|
|
489
|
+
duration,
|
|
490
|
+
]);
|
|
491
|
+
|
|
492
|
+
const pause = useCallback(() => {
|
|
493
|
+
if (
|
|
494
|
+
recordingState === "recording" &&
|
|
495
|
+
mediaRecorderRef.current?.state === "recording"
|
|
496
|
+
) {
|
|
497
|
+
mediaRecorderRef.current.pause();
|
|
498
|
+
updateState("paused");
|
|
499
|
+
stopTimer();
|
|
500
|
+
}
|
|
501
|
+
}, [recordingState, updateState]);
|
|
502
|
+
|
|
503
|
+
const resume = useCallback(() => {
|
|
504
|
+
if (
|
|
505
|
+
recordingState === "paused" &&
|
|
506
|
+
mediaRecorderRef.current?.state === "paused"
|
|
507
|
+
) {
|
|
508
|
+
mediaRecorderRef.current.resume();
|
|
509
|
+
updateState("recording");
|
|
510
|
+
startTimer();
|
|
511
|
+
}
|
|
512
|
+
}, [recordingState, updateState]);
|
|
513
|
+
|
|
514
|
+
const stop = useCallback(async () => {
|
|
515
|
+
if (
|
|
516
|
+
(recordingState === "recording" || recordingState === "paused") &&
|
|
517
|
+
mediaRecorderRef.current?.state !== "inactive"
|
|
518
|
+
) {
|
|
519
|
+
updateState("stopping");
|
|
520
|
+
mediaRecorderRef.current?.stop();
|
|
521
|
+
stopTimer();
|
|
522
|
+
}
|
|
523
|
+
}, [recordingState, updateState]);
|
|
524
|
+
|
|
525
|
+
return {
|
|
526
|
+
recordingState,
|
|
527
|
+
duration,
|
|
528
|
+
audioChunks,
|
|
529
|
+
finalWavBlob,
|
|
530
|
+
audioURL,
|
|
531
|
+
errorMessage,
|
|
532
|
+
isMicrophoneAvailable,
|
|
533
|
+
actions: {
|
|
534
|
+
start,
|
|
535
|
+
pause,
|
|
536
|
+
resume,
|
|
537
|
+
stop,
|
|
538
|
+
requestPermission,
|
|
539
|
+
reset: performReset,
|
|
540
|
+
},
|
|
541
|
+
};
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// Main App component - Now uses the useAudioRecorder hook and builds its own UI
|
|
545
|
+
export function RecorderTestster() {
|
|
546
|
+
const {
|
|
547
|
+
recordingState,
|
|
548
|
+
duration,
|
|
549
|
+
// audioChunks, // Available if needed for advanced use
|
|
550
|
+
finalWavBlob,
|
|
551
|
+
audioURL,
|
|
552
|
+
errorMessage,
|
|
553
|
+
isMicrophoneAvailable,
|
|
554
|
+
actions,
|
|
555
|
+
} = useAudioRecorder({
|
|
556
|
+
onStateChange: (payload) =>
|
|
557
|
+
console.log(`State Change: ${payload.oldState} -> ${payload.newState}`),
|
|
558
|
+
onTimerUpdate: (time) => console.log(`Timer Update: ${time}s`),
|
|
559
|
+
onRecordingComplete: (payload) => {
|
|
560
|
+
console.log("App: Recording Complete", payload);
|
|
561
|
+
if (payload.wavBlob)
|
|
562
|
+
console.log("App: WAV Blob size:", payload.wavBlob.size);
|
|
563
|
+
},
|
|
564
|
+
onError: (error) => console.error("App: Recorder Error", error),
|
|
565
|
+
onStreamReady: (payload) => {
|
|
566
|
+
console.log("App: Stream Ready", payload);
|
|
567
|
+
// Example: You could initialize WaveSurfer here with payload.stream and payload.audioContext
|
|
568
|
+
},
|
|
569
|
+
config: { mimeType: "audio/webm;codecs=opus" },
|
|
570
|
+
});
|
|
571
|
+
|
|
572
|
+
const formatDurationForDisplay = (seconds) => {
|
|
573
|
+
const minutes = Math.floor(seconds / 60);
|
|
574
|
+
const remainingSeconds = seconds % 60;
|
|
575
|
+
return `${String(minutes).padStart(2, "0")}:${String(
|
|
576
|
+
remainingSeconds
|
|
577
|
+
).padStart(2, "0")}`;
|
|
578
|
+
};
|
|
579
|
+
|
|
580
|
+
// Cleanup for audioURL when App unmounts or audioURL changes
|
|
581
|
+
useEffect(() => {
|
|
582
|
+
return () => {
|
|
583
|
+
if (audioURL) {
|
|
584
|
+
URL.revokeObjectURL(audioURL);
|
|
585
|
+
}
|
|
586
|
+
};
|
|
587
|
+
}, [audioURL]);
|
|
588
|
+
|
|
589
|
+
return (
|
|
590
|
+
<div className="min-h-screen bg-gradient-to-br from-slate-900 to-slate-800 text-white flex flex-col items-center justify-center p-4 font-sans">
|
|
591
|
+
<header className="mb-8 text-center">
|
|
592
|
+
<h1 className="text-4xl sm:text-5xl font-bold mb-2 bg-clip-text text-transparent bg-gradient-to-r from-sky-400 to-blue-500">
|
|
593
|
+
Headless Audio Recorder Demo
|
|
594
|
+
</h1>
|
|
595
|
+
<p className="text-slate-400 text-lg">
|
|
596
|
+
UI built in App component using the{" "}
|
|
597
|
+
<code className="bg-slate-700 px-1 rounded">useAudioRecorder</code>{" "}
|
|
598
|
+
hook.
|
|
599
|
+
</p>
|
|
600
|
+
</header>
|
|
601
|
+
|
|
602
|
+
{/* UI Section built using the hook's state and actions */}
|
|
603
|
+
<div className="w-full max-w-lg bg-slate-800 shadow-2xl rounded-xl p-6 md:p-8 space-y-6">
|
|
604
|
+
<div className="text-center mb-4">
|
|
605
|
+
<h2 className="text-3xl font-semibold text-sky-400 mb-2">
|
|
606
|
+
Recorder Controls
|
|
607
|
+
</h2>
|
|
608
|
+
<p className="text-sm text-slate-400">
|
|
609
|
+
Hook State:{" "}
|
|
610
|
+
<span
|
|
611
|
+
className={`font-semibold ${
|
|
612
|
+
recordingState === "recording"
|
|
613
|
+
? "text-rose-400 animate-pulse"
|
|
614
|
+
: recordingState === "paused"
|
|
615
|
+
? "text-yellow-400"
|
|
616
|
+
: recordingState === "processing" ||
|
|
617
|
+
recordingState === "stopping"
|
|
618
|
+
? "text-sky-400 animate-pulse"
|
|
619
|
+
: recordingState === "finished"
|
|
620
|
+
? "text-green-400"
|
|
621
|
+
: recordingState === "permissionDenied"
|
|
622
|
+
? "text-red-400"
|
|
623
|
+
: "text-slate-300"
|
|
624
|
+
}`}
|
|
625
|
+
>
|
|
626
|
+
{recordingState}
|
|
627
|
+
</span>
|
|
628
|
+
</p>
|
|
629
|
+
<p className="text-lg text-slate-200 mt-1">
|
|
630
|
+
{formatDurationForDisplay(duration)}
|
|
631
|
+
</p>
|
|
632
|
+
</div>
|
|
633
|
+
|
|
634
|
+
{!isMicrophoneAvailable && (
|
|
635
|
+
<div
|
|
636
|
+
className="bg-red-700/50 border border-red-600 text-red-200 px-4 py-3 rounded-lg flex items-center"
|
|
637
|
+
role="alert"
|
|
638
|
+
>
|
|
639
|
+
<AlertTriangle size={20} className="mr-3 flex-shrink-0" />
|
|
640
|
+
<span>
|
|
641
|
+
Microphone/MediaDevices API not available on this browser.
|
|
642
|
+
</span>
|
|
643
|
+
</div>
|
|
644
|
+
)}
|
|
645
|
+
|
|
646
|
+
{errorMessage && recordingState !== "permissionNeeded" && (
|
|
647
|
+
<div
|
|
648
|
+
className="bg-red-500/20 border border-red-500 text-red-300 px-4 py-3 rounded-lg flex items-center"
|
|
649
|
+
role="alert"
|
|
650
|
+
>
|
|
651
|
+
<AlertTriangle size={20} className="mr-3 flex-shrink-0" />
|
|
652
|
+
<span>{errorMessage}</span>
|
|
653
|
+
</div>
|
|
654
|
+
)}
|
|
655
|
+
|
|
656
|
+
{isMicrophoneAvailable && recordingState === "permissionNeeded" && (
|
|
657
|
+
<div className="text-center p-4 bg-yellow-800 bg-opacity-50 border border-yellow-700 rounded-lg space-y-3">
|
|
658
|
+
<div className="flex items-center justify-center text-yellow-300">
|
|
659
|
+
<AlertTriangle size={24} className="mr-2" />
|
|
660
|
+
<h3 className="text-lg font-semibold">
|
|
661
|
+
Microphone Access Required
|
|
662
|
+
</h3>
|
|
663
|
+
</div>
|
|
664
|
+
<p className="text-yellow-200 text-sm">
|
|
665
|
+
To record audio, this application needs access to your microphone.
|
|
666
|
+
Please grant permission when prompted by your browser.
|
|
667
|
+
</p>
|
|
668
|
+
<button
|
|
669
|
+
onClick={actions.requestPermission}
|
|
670
|
+
className="w-full flex items-center justify-center px-4 py-3 bg-yellow-500 hover:bg-yellow-600 text-slate-900 font-semibold rounded-lg shadow-md transition-colors duration-150 focus:outline-none focus:ring-2 focus:ring-yellow-400"
|
|
671
|
+
>
|
|
672
|
+
<CheckCircle size={20} className="mr-2" /> Grant Microphone
|
|
673
|
+
Permission
|
|
674
|
+
</button>
|
|
675
|
+
</div>
|
|
676
|
+
)}
|
|
677
|
+
|
|
678
|
+
{recordingState === "permissionDenied" && (
|
|
679
|
+
<div className="text-center p-4 bg-red-800 bg-opacity-70 border border-red-700 rounded-lg">
|
|
680
|
+
<AlertTriangle size={32} className="mx-auto mb-2 text-red-300" />
|
|
681
|
+
<p className="text-red-200 mb-1 font-semibold">
|
|
682
|
+
Microphone Access Denied
|
|
683
|
+
</p>
|
|
684
|
+
<p className="text-sm text-red-300 mb-3">
|
|
685
|
+
{errorMessage &&
|
|
686
|
+
errorMessage !== "Microphone permission is denied."
|
|
687
|
+
? errorMessage
|
|
688
|
+
: "Please enable microphone permissions in your browser's site settings."}
|
|
689
|
+
</p>
|
|
690
|
+
<button
|
|
691
|
+
onClick={actions.reset}
|
|
692
|
+
className="mt-2 px-4 py-2 bg-slate-600 hover:bg-slate-500 text-white font-semibold rounded-lg shadow-md transition-colors text-sm"
|
|
693
|
+
>
|
|
694
|
+
<RefreshCw size={16} className="mr-2 inline" />
|
|
695
|
+
Try Reset & Re-check Permission
|
|
696
|
+
</button>
|
|
697
|
+
</div>
|
|
698
|
+
)}
|
|
699
|
+
|
|
700
|
+
{["ready", "recording", "paused", "finished"].includes(
|
|
701
|
+
recordingState
|
|
702
|
+
) && (
|
|
703
|
+
<div className="space-y-4">
|
|
704
|
+
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
|
|
705
|
+
{recordingState !== "recording" &&
|
|
706
|
+
recordingState !== "paused" && (
|
|
707
|
+
<button
|
|
708
|
+
onClick={actions.start}
|
|
709
|
+
disabled={
|
|
710
|
+
recordingState === "processing" ||
|
|
711
|
+
recordingState === "stopping" ||
|
|
712
|
+
recordingState === "permissionDenied"
|
|
713
|
+
}
|
|
714
|
+
className="w-full flex items-center justify-center px-6 py-3 bg-sky-500 hover:bg-sky-600 disabled:bg-slate-600 text-white font-semibold rounded-lg shadow-md transition-all"
|
|
715
|
+
>
|
|
716
|
+
<Mic size={20} className="mr-2" /> Start Recording
|
|
717
|
+
</button>
|
|
718
|
+
)}
|
|
719
|
+
|
|
720
|
+
{recordingState === "recording" && (
|
|
721
|
+
<button
|
|
722
|
+
onClick={actions.pause}
|
|
723
|
+
className="w-full flex items-center justify-center px-6 py-3 bg-yellow-500 hover:bg-yellow-600 text-slate-900 font-semibold rounded-lg shadow-md transition-all"
|
|
724
|
+
>
|
|
725
|
+
<Pause size={20} className="mr-2" /> Pause
|
|
726
|
+
</button>
|
|
727
|
+
)}
|
|
728
|
+
|
|
729
|
+
{recordingState === "paused" && (
|
|
730
|
+
<button
|
|
731
|
+
onClick={actions.resume}
|
|
732
|
+
className="w-full flex items-center justify-center px-6 py-3 bg-green-500 hover:bg-green-600 text-white font-semibold rounded-lg shadow-md transition-all"
|
|
733
|
+
>
|
|
734
|
+
<Play size={20} className="mr-2" /> Resume
|
|
735
|
+
</button>
|
|
736
|
+
)}
|
|
737
|
+
|
|
738
|
+
{(recordingState === "recording" ||
|
|
739
|
+
recordingState === "paused") && (
|
|
740
|
+
<button
|
|
741
|
+
onClick={actions.stop}
|
|
742
|
+
className="w-full flex items-center justify-center px-6 py-3 bg-rose-500 hover:bg-rose-600 text-white font-semibold rounded-lg shadow-md transition-all"
|
|
743
|
+
>
|
|
744
|
+
<StopCircle size={20} className="mr-2" /> Stop
|
|
745
|
+
</button>
|
|
746
|
+
)}
|
|
747
|
+
</div>
|
|
748
|
+
{(recordingState === "ready" || recordingState === "finished") &&
|
|
749
|
+
recordingState !== "recording" &&
|
|
750
|
+
recordingState !== "paused" && (
|
|
751
|
+
<button
|
|
752
|
+
onClick={actions.reset}
|
|
753
|
+
className="w-full flex items-center justify-center px-4 py-2 bg-slate-600 hover:bg-slate-500 text-white font-semibold rounded-lg shadow-md transition-colors text-sm"
|
|
754
|
+
>
|
|
755
|
+
<RefreshCw size={16} className="mr-2" />
|
|
756
|
+
Reset Recorder
|
|
757
|
+
</button>
|
|
758
|
+
)}
|
|
759
|
+
|
|
760
|
+
{audioURL && recordingState === "finished" && (
|
|
761
|
+
<div className="mt-6 p-4 bg-slate-700 rounded-lg shadow">
|
|
762
|
+
<h3 className="text-lg font-medium text-sky-300 mb-3">
|
|
763
|
+
Recording Complete {finalWavBlob ? "(WAV)" : "(Raw Format)"}
|
|
764
|
+
</h3>
|
|
765
|
+
{!finalWavBlob && (
|
|
766
|
+
<p className="text-sm text-slate-400 mb-2">
|
|
767
|
+
WAV conversion failed. Raw audio is available.
|
|
768
|
+
</p>
|
|
769
|
+
)}
|
|
770
|
+
<audio
|
|
771
|
+
controls
|
|
772
|
+
src={audioURL}
|
|
773
|
+
className="w-full rounded-md shadow-inner bg-slate-600"
|
|
774
|
+
>
|
|
775
|
+
Your browser does not support the audio element.
|
|
776
|
+
</audio>
|
|
777
|
+
<a
|
|
778
|
+
href={audioURL}
|
|
779
|
+
download={`recording-${new Date()
|
|
780
|
+
.toISOString()
|
|
781
|
+
.slice(0, 19)
|
|
782
|
+
.replace(/:/g, "-")}.${finalWavBlob ? "wav" : "webm"}`}
|
|
783
|
+
className={`mt-4 w-full flex items-center justify-center px-6 py-3 text-white font-semibold rounded-lg shadow-md transition-colors ${
|
|
784
|
+
finalWavBlob
|
|
785
|
+
? "bg-teal-500 hover:bg-teal-600"
|
|
786
|
+
: "bg-orange-500 hover:bg-orange-600"
|
|
787
|
+
}`}
|
|
788
|
+
>
|
|
789
|
+
<Download size={20} className="mr-2" /> Download{" "}
|
|
790
|
+
{finalWavBlob ? "WAV" : "Raw Audio"}
|
|
791
|
+
</a>
|
|
792
|
+
</div>
|
|
793
|
+
)}
|
|
794
|
+
</div>
|
|
795
|
+
)}
|
|
796
|
+
</div>
|
|
797
|
+
|
|
798
|
+
<footer className="mt-12 text-center text-slate-500 text-sm">
|
|
799
|
+
<p>
|
|
800
|
+
© {new Date().getFullYear()} Your Company. All rights reserved.
|
|
801
|
+
</p>
|
|
802
|
+
</footer>
|
|
803
|
+
</div>
|
|
804
|
+
);
|
|
805
|
+
}
|