sera-ai 1.0.7 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +145 -0
- package/dist/index.d.mts +15 -1
- package/dist/index.d.ts +15 -1
- package/dist/index.js +886 -0
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +887 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import * as React3 from 'react';
|
|
2
2
|
import React3__default, { useState, useRef, useCallback, useEffect } from 'react';
|
|
3
3
|
import pRetry, { AbortError } from 'p-retry';
|
|
4
|
-
import { AlertTriangle, Loader2, Square, Pause, Play, Mic, X, AlertCircle, Check } from 'lucide-react';
|
|
4
|
+
import { AlertTriangle, Loader2, Square, Pause, Play, Mic, Download, X, AlertCircle, Check } from 'lucide-react';
|
|
5
5
|
import { jsx, jsxs, Fragment } from 'react/jsx-runtime';
|
|
6
6
|
|
|
7
7
|
// src/AudioRecorder.tsx
|
|
@@ -2828,6 +2828,7 @@ var useAudioRecorder = ({
|
|
|
2828
2828
|
speciality,
|
|
2829
2829
|
patientId,
|
|
2830
2830
|
patientName,
|
|
2831
|
+
patientHistory,
|
|
2831
2832
|
selectedFormat = "json",
|
|
2832
2833
|
skipDiarization = true,
|
|
2833
2834
|
silenceRemoval = true,
|
|
@@ -3209,6 +3210,7 @@ var useAudioRecorder = ({
|
|
|
3209
3210
|
formData.append("model", selectedModelRef.current);
|
|
3210
3211
|
formData.append("doctorName", doctorName);
|
|
3211
3212
|
formData.append("patientName", patientName || "");
|
|
3213
|
+
if (patientHistory) formData.append("patientHistory", patientHistory);
|
|
3212
3214
|
if (patientId) formData.append("patientId", patientId.toString());
|
|
3213
3215
|
formData.append("removeSilence", removeSilenceRef.current.toString());
|
|
3214
3216
|
formData.append("skipDiarization", skipDiarizationRef.current.toString());
|
|
@@ -3390,6 +3392,7 @@ var useAudioRecorder = ({
|
|
|
3390
3392
|
skipDiarization,
|
|
3391
3393
|
selectedFormat,
|
|
3392
3394
|
patientName,
|
|
3395
|
+
patientHistory,
|
|
3393
3396
|
onTranscriptionComplete,
|
|
3394
3397
|
speciality,
|
|
3395
3398
|
removeSilence,
|
|
@@ -3448,6 +3451,7 @@ var useAudioRecorder = ({
|
|
|
3448
3451
|
await createSession(localSessionId, {
|
|
3449
3452
|
patientId,
|
|
3450
3453
|
patientName: patientName || void 0,
|
|
3454
|
+
patientHistory: patientHistory || void 0,
|
|
3451
3455
|
speciality
|
|
3452
3456
|
});
|
|
3453
3457
|
}
|
|
@@ -3546,6 +3550,7 @@ var useAudioRecorder = ({
|
|
|
3546
3550
|
createSession,
|
|
3547
3551
|
patientId,
|
|
3548
3552
|
patientName,
|
|
3553
|
+
patientHistory,
|
|
3549
3554
|
speciality,
|
|
3550
3555
|
currentDeviceId
|
|
3551
3556
|
]);
|
|
@@ -4336,6 +4341,7 @@ var AudioRecorder = ({
|
|
|
4336
4341
|
speciality,
|
|
4337
4342
|
patientId,
|
|
4338
4343
|
patientName,
|
|
4344
|
+
patientHistory,
|
|
4339
4345
|
onTranscriptionUpdate,
|
|
4340
4346
|
onTranscriptionComplete,
|
|
4341
4347
|
onSuccess,
|
|
@@ -4370,6 +4376,7 @@ var AudioRecorder = ({
|
|
|
4370
4376
|
speciality,
|
|
4371
4377
|
patientName,
|
|
4372
4378
|
patientId,
|
|
4379
|
+
patientHistory,
|
|
4373
4380
|
onTranscriptionUpdate: (text, sessionId) => {
|
|
4374
4381
|
console.log("onTranscriptionUpdate called with text:", text, "sessionId:", sessionId);
|
|
4375
4382
|
if (text.length > 0) {
|
|
@@ -5071,7 +5078,885 @@ var AudioDictation = ({
|
|
|
5071
5078
|
) });
|
|
5072
5079
|
};
|
|
5073
5080
|
var AudioDictation_default = AudioDictation;
|
|
5081
|
+
var createAudioCaptureWorker = () => {
|
|
5082
|
+
const workerCode = `
|
|
5083
|
+
class AudioCaptureProcessor extends AudioWorkletProcessor {
|
|
5084
|
+
constructor() {
|
|
5085
|
+
super();
|
|
5086
|
+
this._buffer = [];
|
|
5087
|
+
this._isStopped = false;
|
|
5088
|
+
this._isPaused = false;
|
|
5089
|
+
this._chunkReady = false;
|
|
5090
|
+
this._processingChunk = false;
|
|
5091
|
+
|
|
5092
|
+
this._audioLevelCheckInterval = 0;
|
|
5093
|
+
this._audioLevelCheckFrequency = 128;
|
|
5094
|
+
this._silentSampleCount = 0;
|
|
5095
|
+
this._maxSilentSamples = 44100 * 30;
|
|
5096
|
+
this._audioThreshold = 0.002;
|
|
5097
|
+
this._hasDetectedAudio = false;
|
|
5098
|
+
this._lastAudioTime = 0;
|
|
5099
|
+
this._recordingStartTime = Date.now();
|
|
5100
|
+
this._initialSilenceThreshold = 44100 * 10;
|
|
5101
|
+
this._isInitialPhase = true;
|
|
5102
|
+
this._bufferSize = 0;
|
|
5103
|
+
|
|
5104
|
+
this.port.onmessage = (event) => {
|
|
5105
|
+
if (event.data.command === "stop") {
|
|
5106
|
+
this._isStopped = true;
|
|
5107
|
+
this._sendFinalChunk();
|
|
5108
|
+
}
|
|
5109
|
+
|
|
5110
|
+
if (event.data.command === "getChunk") {
|
|
5111
|
+
this._chunkReady = true;
|
|
5112
|
+
}
|
|
5113
|
+
|
|
5114
|
+
if (event.data.command === "resetChunk") {
|
|
5115
|
+
this._chunkReady = false;
|
|
5116
|
+
this._processingChunk = false;
|
|
5117
|
+
this._buffer = [];
|
|
5118
|
+
this._bufferSize = 0;
|
|
5119
|
+
}
|
|
5120
|
+
|
|
5121
|
+
if (event.data.command === "pause") {
|
|
5122
|
+
this._isPaused = true;
|
|
5123
|
+
}
|
|
5124
|
+
|
|
5125
|
+
if (event.data.command === "resume") {
|
|
5126
|
+
this._isPaused = false;
|
|
5127
|
+
}
|
|
5128
|
+
};
|
|
5129
|
+
}
|
|
5130
|
+
|
|
5131
|
+
_sendFinalChunk() {
|
|
5132
|
+
if (this._buffer.length > 0) {
|
|
5133
|
+
const flat = this._flattenBuffer();
|
|
5134
|
+
this.port.postMessage({
|
|
5135
|
+
command: "finalChunk",
|
|
5136
|
+
audioBuffer: flat.buffer,
|
|
5137
|
+
duration: this._bufferSize / 44100
|
|
5138
|
+
}, [flat.buffer]);
|
|
5139
|
+
} else {
|
|
5140
|
+
// Send empty final chunk
|
|
5141
|
+
const emptyBuffer = new Float32Array(1000);
|
|
5142
|
+
this.port.postMessage({
|
|
5143
|
+
command: "finalChunk",
|
|
5144
|
+
audioBuffer: emptyBuffer.buffer,
|
|
5145
|
+
duration: 0
|
|
5146
|
+
}, [emptyBuffer.buffer]);
|
|
5147
|
+
}
|
|
5148
|
+
this._buffer = [];
|
|
5149
|
+
this._bufferSize = 0;
|
|
5150
|
+
}
|
|
5151
|
+
|
|
5152
|
+
_flattenBuffer() {
|
|
5153
|
+
let totalLength = 0;
|
|
5154
|
+
for (let i = 0; i < this._buffer.length; i++) {
|
|
5155
|
+
totalLength += this._buffer[i].length;
|
|
5156
|
+
}
|
|
5157
|
+
|
|
5158
|
+
const flat = new Float32Array(totalLength);
|
|
5159
|
+
let offset = 0;
|
|
5160
|
+
for (let i = 0; i < this._buffer.length; i++) {
|
|
5161
|
+
flat.set(this._buffer[i], offset);
|
|
5162
|
+
offset += this._buffer[i].length;
|
|
5163
|
+
}
|
|
5164
|
+
return flat;
|
|
5165
|
+
}
|
|
5166
|
+
|
|
5167
|
+
process(inputs, outputs) {
|
|
5168
|
+
if (this._isStopped || this._isPaused) {
|
|
5169
|
+
return true;
|
|
5170
|
+
}
|
|
5171
|
+
|
|
5172
|
+
const input = inputs[0];
|
|
5173
|
+
if (input && input.length > 0) {
|
|
5174
|
+
const samples = input[0];
|
|
5175
|
+
|
|
5176
|
+
// Calculate audio level
|
|
5177
|
+
let audioLevel = 0;
|
|
5178
|
+
for (let i = 0; i < samples.length; i++) {
|
|
5179
|
+
audioLevel += Math.abs(samples[i]);
|
|
5180
|
+
}
|
|
5181
|
+
audioLevel /= samples.length;
|
|
5182
|
+
|
|
5183
|
+
// Send audio level updates
|
|
5184
|
+
this._audioLevelCheckInterval++;
|
|
5185
|
+
if (this._audioLevelCheckInterval >= this._audioLevelCheckFrequency) {
|
|
5186
|
+
this.port.postMessage({
|
|
5187
|
+
command: "audioLevel",
|
|
5188
|
+
level: audioLevel,
|
|
5189
|
+
});
|
|
5190
|
+
this._audioLevelCheckInterval = 0;
|
|
5191
|
+
}
|
|
5192
|
+
|
|
5193
|
+
// Check for audio activity
|
|
5194
|
+
if (audioLevel > this._audioThreshold) {
|
|
5195
|
+
this._hasDetectedAudio = true;
|
|
5196
|
+
this._isInitialPhase = false;
|
|
5197
|
+
this._silentSampleCount = 0;
|
|
5198
|
+
this._lastAudioTime = Date.now();
|
|
5199
|
+
} else {
|
|
5200
|
+
this._silentSampleCount += samples.length;
|
|
5201
|
+
|
|
5202
|
+
if (this._isInitialPhase && this._silentSampleCount > this._initialSilenceThreshold) {
|
|
5203
|
+
this.port.postMessage({
|
|
5204
|
+
command: "noAudioDetected",
|
|
5205
|
+
message: "No audio input detected after 10 seconds. Please check your microphone."
|
|
5206
|
+
});
|
|
5207
|
+
return true;
|
|
5208
|
+
}
|
|
5209
|
+
}
|
|
5210
|
+
|
|
5211
|
+
// Buffer the audio
|
|
5212
|
+
this._buffer.push(new Float32Array(samples));
|
|
5213
|
+
this._bufferSize += samples.length;
|
|
5214
|
+
|
|
5215
|
+
// Send chunk if ready
|
|
5216
|
+
if (this._chunkReady && !this._processingChunk) {
|
|
5217
|
+
this._processingChunk = true;
|
|
5218
|
+
|
|
5219
|
+
const flat = this._flattenBuffer();
|
|
5220
|
+
this.port.postMessage({
|
|
5221
|
+
command: "chunk",
|
|
5222
|
+
audioBuffer: flat.buffer,
|
|
5223
|
+
duration: this._bufferSize / 44100,
|
|
5224
|
+
hasActivity: audioLevel > this._audioThreshold
|
|
5225
|
+
}, [flat.buffer]);
|
|
5226
|
+
|
|
5227
|
+
this._buffer = [];
|
|
5228
|
+
this._bufferSize = 0;
|
|
5229
|
+
this._chunkReady = false;
|
|
5230
|
+
this._processingChunk = false;
|
|
5231
|
+
}
|
|
5232
|
+
}
|
|
5233
|
+
|
|
5234
|
+
return true;
|
|
5235
|
+
}
|
|
5236
|
+
}
|
|
5237
|
+
|
|
5238
|
+
registerProcessor("audio-capture-processor", AudioCaptureProcessor);
|
|
5239
|
+
`;
|
|
5240
|
+
const blob = new Blob([workerCode], { type: "application/javascript" });
|
|
5241
|
+
return URL.createObjectURL(blob);
|
|
5242
|
+
};
|
|
5243
|
+
var useAudioCapture = ({
|
|
5244
|
+
onAudioChunk,
|
|
5245
|
+
onAudioComplete,
|
|
5246
|
+
onAudioFile,
|
|
5247
|
+
silenceRemoval = false,
|
|
5248
|
+
chunkDuration = 30,
|
|
5249
|
+
// Default 30 seconds per chunk
|
|
5250
|
+
format = "raw"
|
|
5251
|
+
}) => {
|
|
5252
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
5253
|
+
const [isPaused, setIsPaused] = useState(false);
|
|
5254
|
+
const [isProcessing, setIsProcessing] = useState(false);
|
|
5255
|
+
const [error, setError] = useState(null);
|
|
5256
|
+
const [currentDeviceId, setCurrentDeviceId] = useState(null);
|
|
5257
|
+
const [availableDevices, setAvailableDevices] = useState([]);
|
|
5258
|
+
const [audioLevel, setAudioLevel] = useState(0);
|
|
5259
|
+
const [noAudioDetected, setNoAudioDetected] = useState(false);
|
|
5260
|
+
const [recordingDuration, setRecordingDuration] = useState(0);
|
|
5261
|
+
const [totalChunks, setTotalChunks] = useState(0);
|
|
5262
|
+
const mediaStreamRef = useRef(null);
|
|
5263
|
+
const processorRef = useRef(null);
|
|
5264
|
+
const audioContextRef = useRef(null);
|
|
5265
|
+
const chunkIntervalRef = useRef(null);
|
|
5266
|
+
const recordingStartTimeRef = useRef(0);
|
|
5267
|
+
const durationIntervalRef = useRef(null);
|
|
5268
|
+
const sequenceCounterRef = useRef(0);
|
|
5269
|
+
const allAudioChunksRef = useRef([]);
|
|
5270
|
+
const {
|
|
5271
|
+
removeSilence,
|
|
5272
|
+
isLoaded,
|
|
5273
|
+
isConverting,
|
|
5274
|
+
progress,
|
|
5275
|
+
statusMessage,
|
|
5276
|
+
convertToWav
|
|
5277
|
+
} = useFFmpegConverter_default();
|
|
5278
|
+
const validateMicrophoneAccess = useCallback(async () => {
|
|
5279
|
+
try {
|
|
5280
|
+
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
5281
|
+
const audioInputDevices = devices.filter((device) => device.kind === "audioinput");
|
|
5282
|
+
setAvailableDevices(audioInputDevices);
|
|
5283
|
+
if (audioInputDevices.length === 0) {
|
|
5284
|
+
throw new Error("No microphone devices detected. Please connect a microphone.");
|
|
5285
|
+
}
|
|
5286
|
+
const testStream = await navigator.mediaDevices.getUserMedia({
|
|
5287
|
+
audio: {
|
|
5288
|
+
deviceId: currentDeviceId ? { exact: currentDeviceId } : void 0
|
|
5289
|
+
}
|
|
5290
|
+
});
|
|
5291
|
+
const audioTracks = testStream.getAudioTracks();
|
|
5292
|
+
if (audioTracks.length > 0) {
|
|
5293
|
+
const track = audioTracks[0];
|
|
5294
|
+
const settings = track.getSettings();
|
|
5295
|
+
setCurrentDeviceId(settings.deviceId || null);
|
|
5296
|
+
}
|
|
5297
|
+
testStream.getTracks().forEach((track) => track.stop());
|
|
5298
|
+
return true;
|
|
5299
|
+
} catch (error2) {
|
|
5300
|
+
console.error("Microphone validation failed:", error2);
|
|
5301
|
+
if (error2 instanceof Error) {
|
|
5302
|
+
let errorMessage = "";
|
|
5303
|
+
if (error2.name === "NotFoundError" || error2.name === "DevicesNotFoundError") {
|
|
5304
|
+
errorMessage = "No microphone devices found. Please connect a microphone.";
|
|
5305
|
+
} else if (error2.name === "NotAllowedError" || error2.name === "PermissionDeniedError") {
|
|
5306
|
+
errorMessage = "Microphone permission denied. Please allow access and try again.";
|
|
5307
|
+
} else if (error2.name === "NotReadableError" || error2.name === "TrackStartError") {
|
|
5308
|
+
errorMessage = "Microphone is busy or unavailable. Please close other applications using the microphone.";
|
|
5309
|
+
} else if (error2.name === "OverconstrainedError") {
|
|
5310
|
+
errorMessage = "Selected microphone device is not available. Please select a different device.";
|
|
5311
|
+
} else {
|
|
5312
|
+
errorMessage = `Microphone error: ${error2.message}`;
|
|
5313
|
+
}
|
|
5314
|
+
setError(errorMessage);
|
|
5315
|
+
} else {
|
|
5316
|
+
setError("Microphone access error occurred.");
|
|
5317
|
+
}
|
|
5318
|
+
return false;
|
|
5319
|
+
}
|
|
5320
|
+
}, [currentDeviceId]);
|
|
5321
|
+
const processAudioChunk = useCallback(async (audioData, sequence, isFinal) => {
|
|
5322
|
+
try {
|
|
5323
|
+
setIsProcessing(true);
|
|
5324
|
+
let processedAudio = audioData;
|
|
5325
|
+
if (silenceRemoval && isLoaded && typeof removeSilence === "function") {
|
|
5326
|
+
console.log("Applying silence removal to audio chunk...");
|
|
5327
|
+
const tempFile = float32ToWavFile(audioData);
|
|
5328
|
+
const processedFile = await removeSilence(tempFile);
|
|
5329
|
+
if (processedFile) {
|
|
5330
|
+
const arrayBuffer = await processedFile.arrayBuffer();
|
|
5331
|
+
const dataView = new DataView(arrayBuffer);
|
|
5332
|
+
const samples = new Float32Array((arrayBuffer.byteLength - 44) / 2);
|
|
5333
|
+
for (let i = 0; i < samples.length; i++) {
|
|
5334
|
+
const int16 = dataView.getInt16(44 + i * 2, true);
|
|
5335
|
+
samples[i] = int16 / (int16 < 0 ? 32768 : 32767);
|
|
5336
|
+
}
|
|
5337
|
+
processedAudio = samples;
|
|
5338
|
+
}
|
|
5339
|
+
}
|
|
5340
|
+
if (!isFinal) {
|
|
5341
|
+
allAudioChunksRef.current.push(processedAudio);
|
|
5342
|
+
}
|
|
5343
|
+
if (onAudioChunk) {
|
|
5344
|
+
onAudioChunk(processedAudio, sequence, isFinal);
|
|
5345
|
+
}
|
|
5346
|
+
if (isFinal) {
|
|
5347
|
+
const totalLength = allAudioChunksRef.current.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
5348
|
+
const finalAudio = new Float32Array(totalLength);
|
|
5349
|
+
let offset = 0;
|
|
5350
|
+
for (const chunk of allAudioChunksRef.current) {
|
|
5351
|
+
finalAudio.set(chunk, offset);
|
|
5352
|
+
offset += chunk.length;
|
|
5353
|
+
}
|
|
5354
|
+
if (onAudioComplete) {
|
|
5355
|
+
onAudioComplete(finalAudio);
|
|
5356
|
+
}
|
|
5357
|
+
if (onAudioFile && format === "wav") {
|
|
5358
|
+
const wavFile = await createWavFile(finalAudio);
|
|
5359
|
+
onAudioFile(wavFile);
|
|
5360
|
+
} else if (onAudioFile && format === "raw") {
|
|
5361
|
+
const rawFile = createRawFile(finalAudio);
|
|
5362
|
+
onAudioFile(rawFile);
|
|
5363
|
+
}
|
|
5364
|
+
allAudioChunksRef.current = [];
|
|
5365
|
+
}
|
|
5366
|
+
} catch (error2) {
|
|
5367
|
+
console.error("Error processing audio chunk:", error2);
|
|
5368
|
+
setError(`Audio processing failed: ${error2 instanceof Error ? error2.message : "Unknown error"}`);
|
|
5369
|
+
} finally {
|
|
5370
|
+
setIsProcessing(false);
|
|
5371
|
+
}
|
|
5372
|
+
}, [silenceRemoval, isLoaded, removeSilence, onAudioChunk, onAudioComplete, onAudioFile, format]);
|
|
5373
|
+
const createWavFile = useCallback(async (samples) => {
|
|
5374
|
+
if (isLoaded && typeof convertToWav === "function") {
|
|
5375
|
+
const result = await convertToWav(samples);
|
|
5376
|
+
return result || float32ToWavFile(samples);
|
|
5377
|
+
} else {
|
|
5378
|
+
return float32ToWavFile(samples);
|
|
5379
|
+
}
|
|
5380
|
+
}, [isLoaded, convertToWav]);
|
|
5381
|
+
const createRawFile = useCallback((samples) => {
|
|
5382
|
+
const timestamp = Date.now();
|
|
5383
|
+
const filename = `audio-recording-${timestamp}.raw`;
|
|
5384
|
+
const buffer = new ArrayBuffer(samples.byteLength);
|
|
5385
|
+
const uint8Array = new Uint8Array(buffer);
|
|
5386
|
+
const sourceArray = new Uint8Array(samples.buffer, samples.byteOffset, samples.byteLength);
|
|
5387
|
+
uint8Array.set(sourceArray);
|
|
5388
|
+
return new File([buffer], filename, {
|
|
5389
|
+
type: "application/octet-stream",
|
|
5390
|
+
lastModified: timestamp
|
|
5391
|
+
});
|
|
5392
|
+
}, []);
|
|
5393
|
+
const float32ToWavFile = (samples) => {
|
|
5394
|
+
const sampleRate = audioContextRef.current?.sampleRate || 44100;
|
|
5395
|
+
const buffer = new ArrayBuffer(44 + samples.length * 2);
|
|
5396
|
+
const view = new DataView(buffer);
|
|
5397
|
+
const writeString = (offset2, str) => {
|
|
5398
|
+
for (let i = 0; i < str.length; i++) {
|
|
5399
|
+
view.setUint8(offset2 + i, str.charCodeAt(i));
|
|
5400
|
+
}
|
|
5401
|
+
};
|
|
5402
|
+
writeString(0, "RIFF");
|
|
5403
|
+
view.setUint32(4, 36 + samples.length * 2, true);
|
|
5404
|
+
writeString(8, "WAVE");
|
|
5405
|
+
writeString(12, "fmt ");
|
|
5406
|
+
view.setUint32(16, 16, true);
|
|
5407
|
+
view.setUint16(20, 1, true);
|
|
5408
|
+
view.setUint16(22, 1, true);
|
|
5409
|
+
view.setUint32(24, sampleRate, true);
|
|
5410
|
+
view.setUint32(28, sampleRate * 2, true);
|
|
5411
|
+
view.setUint16(32, 2, true);
|
|
5412
|
+
view.setUint16(34, 16, true);
|
|
5413
|
+
writeString(36, "data");
|
|
5414
|
+
view.setUint32(40, samples.length * 2, true);
|
|
5415
|
+
let offset = 44;
|
|
5416
|
+
for (let i = 0; i < samples.length; i++) {
|
|
5417
|
+
const sample = Math.max(-1, Math.min(1, samples[i]));
|
|
5418
|
+
view.setInt16(offset, sample < 0 ? sample * 32768 : sample * 32767, true);
|
|
5419
|
+
offset += 2;
|
|
5420
|
+
}
|
|
5421
|
+
const timestamp = Date.now();
|
|
5422
|
+
const filename = `audio-recording-${timestamp}.wav`;
|
|
5423
|
+
return new File([view], filename, {
|
|
5424
|
+
type: "audio/wav",
|
|
5425
|
+
lastModified: timestamp
|
|
5426
|
+
});
|
|
5427
|
+
};
|
|
5428
|
+
const startRecording = useCallback(async () => {
|
|
5429
|
+
try {
|
|
5430
|
+
setError(null);
|
|
5431
|
+
setNoAudioDetected(false);
|
|
5432
|
+
const micValid = await validateMicrophoneAccess();
|
|
5433
|
+
if (!micValid) return;
|
|
5434
|
+
sequenceCounterRef.current = 0;
|
|
5435
|
+
allAudioChunksRef.current = [];
|
|
5436
|
+
setTotalChunks(0);
|
|
5437
|
+
setRecordingDuration(0);
|
|
5438
|
+
recordingStartTimeRef.current = Date.now();
|
|
5439
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
5440
|
+
audio: {
|
|
5441
|
+
deviceId: currentDeviceId ? { exact: currentDeviceId } : void 0,
|
|
5442
|
+
echoCancellation: true,
|
|
5443
|
+
noiseSuppression: true,
|
|
5444
|
+
autoGainControl: true
|
|
5445
|
+
}
|
|
5446
|
+
});
|
|
5447
|
+
mediaStreamRef.current = stream;
|
|
5448
|
+
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
5449
|
+
const processorUrl = createAudioCaptureWorker();
|
|
5450
|
+
await audioContext.audioWorklet.addModule(processorUrl);
|
|
5451
|
+
URL.revokeObjectURL(processorUrl);
|
|
5452
|
+
const processor = new AudioWorkletNode(audioContext, "audio-capture-processor");
|
|
5453
|
+
processor.port.onmessage = (event) => {
|
|
5454
|
+
if (event.data.command === "chunk") {
|
|
5455
|
+
const audioData = new Float32Array(event.data.audioBuffer);
|
|
5456
|
+
const sequence = sequenceCounterRef.current++;
|
|
5457
|
+
setTotalChunks(sequence + 1);
|
|
5458
|
+
processAudioChunk(audioData, sequence, false);
|
|
5459
|
+
} else if (event.data.command === "finalChunk") {
|
|
5460
|
+
const audioData = new Float32Array(event.data.audioBuffer);
|
|
5461
|
+
const sequence = sequenceCounterRef.current++;
|
|
5462
|
+
processAudioChunk(audioData, sequence, true);
|
|
5463
|
+
} else if (event.data.command === "audioLevel") {
|
|
5464
|
+
setAudioLevel(event.data.level);
|
|
5465
|
+
} else if (event.data.command === "noAudioDetected") {
|
|
5466
|
+
setNoAudioDetected(true);
|
|
5467
|
+
setError(event.data.message);
|
|
5468
|
+
}
|
|
5469
|
+
};
|
|
5470
|
+
const source = audioContext.createMediaStreamSource(stream);
|
|
5471
|
+
source.connect(processor);
|
|
5472
|
+
audioContextRef.current = audioContext;
|
|
5473
|
+
processorRef.current = processor;
|
|
5474
|
+
setIsRecording(true);
|
|
5475
|
+
const chunkIntervalId = window.setInterval(() => {
|
|
5476
|
+
processor.port.postMessage({ command: "getChunk" });
|
|
5477
|
+
}, chunkDuration * 1e3);
|
|
5478
|
+
chunkIntervalRef.current = chunkIntervalId;
|
|
5479
|
+
const durationIntervalId = window.setInterval(() => {
|
|
5480
|
+
const elapsed = (Date.now() - recordingStartTimeRef.current) / 1e3;
|
|
5481
|
+
setRecordingDuration(elapsed);
|
|
5482
|
+
}, 100);
|
|
5483
|
+
durationIntervalRef.current = durationIntervalId;
|
|
5484
|
+
} catch (err) {
|
|
5485
|
+
console.error("Recording start failed:", err);
|
|
5486
|
+
setError(err instanceof Error ? err.message : "Failed to start recording");
|
|
5487
|
+
}
|
|
5488
|
+
}, [validateMicrophoneAccess, currentDeviceId, chunkDuration, processAudioChunk]);
|
|
5489
|
+
const stopRecording = useCallback(() => {
|
|
5490
|
+
console.log("Stopping recording...");
|
|
5491
|
+
if (chunkIntervalRef.current) {
|
|
5492
|
+
clearInterval(chunkIntervalRef.current);
|
|
5493
|
+
chunkIntervalRef.current = null;
|
|
5494
|
+
}
|
|
5495
|
+
if (durationIntervalRef.current) {
|
|
5496
|
+
clearInterval(durationIntervalRef.current);
|
|
5497
|
+
durationIntervalRef.current = null;
|
|
5498
|
+
}
|
|
5499
|
+
if (processorRef.current) {
|
|
5500
|
+
processorRef.current.port.postMessage({ command: "stop" });
|
|
5501
|
+
}
|
|
5502
|
+
if (mediaStreamRef.current) {
|
|
5503
|
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
|
5504
|
+
mediaStreamRef.current = null;
|
|
5505
|
+
}
|
|
5506
|
+
if (audioContextRef.current) {
|
|
5507
|
+
audioContextRef.current.close();
|
|
5508
|
+
audioContextRef.current = null;
|
|
5509
|
+
}
|
|
5510
|
+
processorRef.current = null;
|
|
5511
|
+
setIsRecording(false);
|
|
5512
|
+
}, []);
|
|
5513
|
+
const pauseRecording = useCallback(() => {
|
|
5514
|
+
if (!isRecording || isPaused) return;
|
|
5515
|
+
if (chunkIntervalRef.current) {
|
|
5516
|
+
clearInterval(chunkIntervalRef.current);
|
|
5517
|
+
chunkIntervalRef.current = null;
|
|
5518
|
+
}
|
|
5519
|
+
if (durationIntervalRef.current) {
|
|
5520
|
+
clearInterval(durationIntervalRef.current);
|
|
5521
|
+
durationIntervalRef.current = null;
|
|
5522
|
+
}
|
|
5523
|
+
setIsPaused(true);
|
|
5524
|
+
if (processorRef.current) {
|
|
5525
|
+
processorRef.current.port.postMessage({ command: "pause" });
|
|
5526
|
+
}
|
|
5527
|
+
setIsRecording(false);
|
|
5528
|
+
}, [isRecording, isPaused]);
|
|
5529
|
+
const resumeRecording = useCallback(() => {
|
|
5530
|
+
if (!isPaused) return;
|
|
5531
|
+
setIsPaused(false);
|
|
5532
|
+
setIsRecording(true);
|
|
5533
|
+
setNoAudioDetected(false);
|
|
5534
|
+
if (processorRef.current) {
|
|
5535
|
+
processorRef.current.port.postMessage({ command: "resume" });
|
|
5536
|
+
}
|
|
5537
|
+
const chunkIntervalId = window.setInterval(() => {
|
|
5538
|
+
processorRef.current?.port.postMessage({ command: "getChunk" });
|
|
5539
|
+
}, chunkDuration * 1e3);
|
|
5540
|
+
chunkIntervalRef.current = chunkIntervalId;
|
|
5541
|
+
const durationIntervalId = window.setInterval(() => {
|
|
5542
|
+
const elapsed = (Date.now() - recordingStartTimeRef.current) / 1e3;
|
|
5543
|
+
setRecordingDuration(elapsed);
|
|
5544
|
+
}, 100);
|
|
5545
|
+
durationIntervalRef.current = durationIntervalId;
|
|
5546
|
+
}, [isPaused, chunkDuration]);
|
|
5547
|
+
const selectMicrophone = useCallback(async (deviceId) => {
|
|
5548
|
+
try {
|
|
5549
|
+
setCurrentDeviceId(deviceId);
|
|
5550
|
+
if (isRecording) {
|
|
5551
|
+
await stopRecording();
|
|
5552
|
+
await startRecording();
|
|
5553
|
+
}
|
|
5554
|
+
} catch (error2) {
|
|
5555
|
+
console.error("Device selection failed:", error2);
|
|
5556
|
+
setError("Failed to switch to selected microphone.");
|
|
5557
|
+
}
|
|
5558
|
+
}, [isRecording, stopRecording, startRecording]);
|
|
5559
|
+
return {
|
|
5560
|
+
mediaStreamRef,
|
|
5561
|
+
startRecording,
|
|
5562
|
+
stopRecording,
|
|
5563
|
+
pauseRecording,
|
|
5564
|
+
resumeRecording,
|
|
5565
|
+
isRecording,
|
|
5566
|
+
isPaused,
|
|
5567
|
+
isProcessing,
|
|
5568
|
+
error,
|
|
5569
|
+
availableDevices,
|
|
5570
|
+
currentDeviceId,
|
|
5571
|
+
selectMicrophone,
|
|
5572
|
+
validateMicrophoneAccess,
|
|
5573
|
+
audioLevel,
|
|
5574
|
+
noAudioDetected,
|
|
5575
|
+
isConverting,
|
|
5576
|
+
progress,
|
|
5577
|
+
statusMessage,
|
|
5578
|
+
recordingDuration,
|
|
5579
|
+
totalChunks
|
|
5580
|
+
};
|
|
5581
|
+
};
|
|
5582
|
+
var useAudioCapture_default = useAudioCapture;
|
|
5583
|
+
var tailwindStyles2 = `
|
|
5584
|
+
.space-y-4 > :not([hidden]) ~ :not([hidden]) { margin-top: 1rem; }
|
|
5585
|
+
.bg-orange-50 { background-color: rgb(255 247 237); }
|
|
5586
|
+
.bg-blue-900 { background-color: rgb(30 58 138); }
|
|
5587
|
+
.text-yellow-200 { color: rgb(254 240 138); }
|
|
5588
|
+
.hover\\:bg-blue-700:hover { background-color: rgb(29 78 216); }
|
|
5589
|
+
.dark .dark\\:bg-orange-900\\/20 { background-color: rgb(194 65 12 / 0.2); }
|
|
5590
|
+
.border { border-width: 1px; }
|
|
5591
|
+
.border-orange-200 { border-color: rgb(254 215 170); }
|
|
5592
|
+
.dark .dark\\:border-orange-800 { border-color: rgb(154 52 18); }
|
|
5593
|
+
.rounded-lg { border-radius: 0.5rem; }
|
|
5594
|
+
.p-4 { padding: 1rem; }
|
|
5595
|
+
.flex { display: flex; }
|
|
5596
|
+
.items-start { align-items: flex-start; }
|
|
5597
|
+
.items-center { align-items: center; }
|
|
5598
|
+
.justify-center { justify-content: center; }
|
|
5599
|
+
.justify-between { justify-content: space-between; }
|
|
5600
|
+
.flex-shrink-0 { flex-shrink: 0; }
|
|
5601
|
+
.h-5 { height: 1.25rem; }
|
|
5602
|
+
.w-5 { width: 1.25rem; }
|
|
5603
|
+
.text-orange-400 { color: rgb(251 146 60); }
|
|
5604
|
+
.ml-3 { margin-left: 0.75rem; }
|
|
5605
|
+
.mr-2 { margin-right: 0.5rem; }
|
|
5606
|
+
.flex-1 { flex: 1 1 0%; }
|
|
5607
|
+
.text-sm { font-size: 0.875rem; line-height: 1.25rem; }
|
|
5608
|
+
.font-medium { font-weight: 500; }
|
|
5609
|
+
.text-orange-800 { color: rgb(154 52 18); }
|
|
5610
|
+
.dark .dark\\:text-orange-200 { color: rgb(254 215 170); }
|
|
5611
|
+
.mt-2 { margin-top: 0.5rem; }
|
|
5612
|
+
.text-orange-700 { color: rgb(194 65 12); }
|
|
5613
|
+
.dark .dark\\:text-orange-300 { color: rgb(253 186 116); }
|
|
5614
|
+
.list-disc { list-style-type: disc; }
|
|
5615
|
+
.list-inside { list-style-position: inside; }
|
|
5616
|
+
.bg-red-50 { background-color: rgb(254 242 242); }
|
|
5617
|
+
.dark .dark\\:bg-red-900\\/20 { background-color: rgb(127 29 29 / 0.2); }
|
|
5618
|
+
.border-red-200 { border-color: rgb(254 202 202); }
|
|
5619
|
+
.dark .dark\\:border-red-800 { border-color: rgb(153 27 27); }
|
|
5620
|
+
.text-red-400 { color: rgb(248 113 113); }
|
|
5621
|
+
.text-red-800 { color: rgb(153 27 27); }
|
|
5622
|
+
.dark .dark\\:text-red-200 { color: rgb(254 202 202); }
|
|
5623
|
+
.text-red-700 { color: rgb(185 28 28); }
|
|
5624
|
+
.dark .dark\\:text-red-300 { color: rgb(252 165 165); }
|
|
5625
|
+
.mt-4 { margin-top: 1rem; }
|
|
5626
|
+
.bg-red-600 { background-color: rgb(220 38 38); }
|
|
5627
|
+
.hover\\:bg-red-700:hover { background-color: rgb(185 28 28); }
|
|
5628
|
+
.text-white { color: rgb(255 255 255); }
|
|
5629
|
+
.px-3 { padding-left: 0.75rem; padding-right: 0.75rem; }
|
|
5630
|
+
.py-1 { padding-top: 0.25rem; padding-bottom: 0.25rem; }
|
|
5631
|
+
.py-2 { padding-top: 0.5rem; padding-bottom: 0.5rem; }
|
|
5632
|
+
.px-4 { padding-left: 1rem; padding-right: 1rem; }
|
|
5633
|
+
.rounded { border-radius: 0.25rem; }
|
|
5634
|
+
.rounded-full { border-radius: 9999px; }
|
|
5635
|
+
.transition-colors { transition-property: color, background-color, border-color, text-decoration-color, fill, stroke; transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); transition-duration: 150ms; }
|
|
5636
|
+
.bg-yellow-50 { background-color: rgb(254 252 232); }
|
|
5637
|
+
.dark .dark\\:bg-yellow-900\\/20 { background-color: rgb(133 77 14 / 0.2); }
|
|
5638
|
+
.border-yellow-200 { border-color: rgb(254 240 138); }
|
|
5639
|
+
.dark .dark\\:border-yellow-800 { border-color: rgb(133 77 14); }
|
|
5640
|
+
.text-yellow-400 { color: rgb(250 204 21); }
|
|
5641
|
+
.text-yellow-800 { color: rgb(133 77 14); }
|
|
5642
|
+
.dark .dark\\:text-yellow-200 { color: rgb(254 240 138); }
|
|
5643
|
+
.text-yellow-700 { color: rgb(161 98 7); }
|
|
5644
|
+
.dark .dark\\:text-yellow-300 { color: rgb(253 224 71); }
|
|
5645
|
+
.space-x-3 > :not([hidden]) ~ :not([hidden]) { margin-left: 0.75rem; }
|
|
5646
|
+
.space-x-1 > :not([hidden]) ~ :not([hidden]) { margin-left: 0.25rem; }
|
|
5647
|
+
.space-x-2 > :not([hidden]) ~ :not([hidden]) { margin-left: 0.5rem; }
|
|
5648
|
+
.bg-yellow-600 { background-color: rgb(202 138 4); }
|
|
5649
|
+
.hover\\:bg-yellow-700:hover { background-color: rgb(161 98 7); }
|
|
5650
|
+
.disabled\\:opacity-50:disabled { opacity: 0.5; }
|
|
5651
|
+
.h-3 { height: 0.75rem; }
|
|
5652
|
+
.w-3 { width: 0.75rem; }
|
|
5653
|
+
.animate-spin { animation: spin 1s linear infinite; }
|
|
5654
|
+
@keyframes spin { from { transform: rotate(0deg); } to { transform: rotate(360deg); } }
|
|
5655
|
+
.bg-gray-600 { background-color: rgb(75 85 99); }
|
|
5656
|
+
.hover\\:bg-gray-700:hover { background-color: rgb(55 65 81); }
|
|
5657
|
+
.bg-blue-50 { background-color: rgb(239 246 255); }
|
|
5658
|
+
.dark .dark\\:bg-blue-900\\/20 { background-color: rgb(30 58 138 / 0.2); }
|
|
5659
|
+
.border-blue-200 { border-color: rgb(191 219 254); }
|
|
5660
|
+
.dark .dark\\:border-blue-800 { border-color: rgb(30 64 175); }
|
|
5661
|
+
.text-blue-600 { color: rgb(37 99 235); }
|
|
5662
|
+
.block { display: block; }
|
|
5663
|
+
.text-blue-700 { color: rgb(29 78 216); }
|
|
5664
|
+
.dark .dark\\:text-blue-300 { color: rgb(147 197 253); }
|
|
5665
|
+
.text-xs { font-size: 0.75rem; line-height: 1rem; }
|
|
5666
|
+
.dark .dark\\:text-blue-400 { color: rgb(96 165 250); }
|
|
5667
|
+
.mt-1 { margin-top: 0.25rem; }
|
|
5668
|
+
.bg-teal-600 { background-color: rgb(13 148 136); }
|
|
5669
|
+
.hover\\:bg-teal-700:hover { background-color: rgb(15 118 110); }
|
|
5670
|
+
.bg-green-600 { background-color: rgb(22 163 74); }
|
|
5671
|
+
.hover\\:bg-green-700:hover { background-color: rgb(21 128 61); }
|
|
5672
|
+
.bg-yellow-500 { background-color: rgb(234 179 8); }
|
|
5673
|
+
.hover\\:bg-yellow-600:hover { background-color: rgb(202 138 4); }
|
|
5674
|
+
.bg-gradient-to-r { background-image: linear-gradient(to right, var(--tw-gradient-stops)); }
|
|
5675
|
+
.from-purple-400 { --tw-gradient-from: #c084fc; --tw-gradient-to: rgb(192 132 252 / 0); --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); }
|
|
5676
|
+
.to-blue-500 { --tw-gradient-to: #3b82f6; }
|
|
5677
|
+
.hover\\:from-purple-500:hover { --tw-gradient-from: #a855f7; --tw-gradient-to: rgb(168 85 247 / 0); --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); }
|
|
5678
|
+
.hover\\:to-blue-600:hover { --tw-gradient-to: #2563eb; }
|
|
5679
|
+
.opacity-50 { opacity: 0.5; }
|
|
5680
|
+
.cursor-not-allowed { cursor: not-allowed; }
|
|
5681
|
+
.text-gray-600 { color: rgb(75 85 99); }
|
|
5682
|
+
.dark .dark\\:text-gray-300 { color: rgb(209 213 219); }
|
|
5683
|
+
.bg-gray-100 { background-color: rgb(243 244 246); }
|
|
5684
|
+
.dark .dark\\:bg-gray-800 { background-color: rgb(31 41 55); }
|
|
5685
|
+
.border-gray-200 { border-color: rgb(229 231 235); }
|
|
5686
|
+
.dark .dark\\:border-gray-700 { border-color: rgb(55 65 81); }
|
|
5687
|
+
.cursor-pointer { cursor: pointer; }
|
|
5688
|
+
.hover\\:bg-gray-200:hover { background-color: rgb(229 231 235); }
|
|
5689
|
+
.dark .dark\\:hover\\:bg-gray-700:hover { background-color: rgb(55 65 81); }
|
|
5690
|
+
`;
|
|
5691
|
+
var stylesInjected2 = false;
|
|
5692
|
+
var injectTailwindStyles2 = () => {
|
|
5693
|
+
if (!stylesInjected2 && typeof document !== "undefined") {
|
|
5694
|
+
const styleElement = document.createElement("style");
|
|
5695
|
+
styleElement.textContent = tailwindStyles2;
|
|
5696
|
+
document.head.appendChild(styleElement);
|
|
5697
|
+
stylesInjected2 = true;
|
|
5698
|
+
}
|
|
5699
|
+
};
|
|
5700
|
+
var AudioCapture = ({
|
|
5701
|
+
onAudioChunk,
|
|
5702
|
+
onAudioComplete,
|
|
5703
|
+
onAudioFile,
|
|
5704
|
+
silenceRemoval = false,
|
|
5705
|
+
chunkDuration = 30,
|
|
5706
|
+
format = "raw",
|
|
5707
|
+
showDownload = false,
|
|
5708
|
+
className = "",
|
|
5709
|
+
style
|
|
5710
|
+
}) => {
|
|
5711
|
+
React3.useEffect(() => {
|
|
5712
|
+
injectTailwindStyles2();
|
|
5713
|
+
}, []);
|
|
5714
|
+
const {
|
|
5715
|
+
mediaStreamRef,
|
|
5716
|
+
startRecording,
|
|
5717
|
+
stopRecording,
|
|
5718
|
+
pauseRecording,
|
|
5719
|
+
resumeRecording,
|
|
5720
|
+
isRecording,
|
|
5721
|
+
isPaused,
|
|
5722
|
+
isProcessing,
|
|
5723
|
+
error,
|
|
5724
|
+
availableDevices,
|
|
5725
|
+
currentDeviceId,
|
|
5726
|
+
selectMicrophone,
|
|
5727
|
+
validateMicrophoneAccess,
|
|
5728
|
+
noAudioDetected,
|
|
5729
|
+
isConverting,
|
|
5730
|
+
progress,
|
|
5731
|
+
statusMessage,
|
|
5732
|
+
recordingDuration,
|
|
5733
|
+
totalChunks
|
|
5734
|
+
} = useAudioCapture_default({
|
|
5735
|
+
onAudioChunk,
|
|
5736
|
+
onAudioComplete,
|
|
5737
|
+
onAudioFile,
|
|
5738
|
+
silenceRemoval,
|
|
5739
|
+
chunkDuration,
|
|
5740
|
+
format
|
|
5741
|
+
});
|
|
5742
|
+
const [isDisabled, setIsDisabled] = React3.useState(false);
|
|
5743
|
+
const [lastAudioFile, setLastAudioFile] = React3.useState(null);
|
|
5744
|
+
const [toast, setToast] = React3.useState({
|
|
5745
|
+
show: false,
|
|
5746
|
+
message: "",
|
|
5747
|
+
type: "success"
|
|
5748
|
+
});
|
|
5749
|
+
React3.useEffect(() => {
|
|
5750
|
+
if (onAudioFile) {
|
|
5751
|
+
const originalCallback = onAudioFile;
|
|
5752
|
+
onAudioFile = (file) => {
|
|
5753
|
+
setLastAudioFile(file);
|
|
5754
|
+
originalCallback(file);
|
|
5755
|
+
};
|
|
5756
|
+
}
|
|
5757
|
+
}, [onAudioFile]);
|
|
5758
|
+
const handleStopClick = () => {
|
|
5759
|
+
setIsDisabled(true);
|
|
5760
|
+
stopRecording();
|
|
5761
|
+
};
|
|
5762
|
+
const handleStartRecording = () => {
|
|
5763
|
+
setIsDisabled(false);
|
|
5764
|
+
setLastAudioFile(null);
|
|
5765
|
+
startRecording();
|
|
5766
|
+
};
|
|
5767
|
+
const handleDownload = () => {
|
|
5768
|
+
if (lastAudioFile) {
|
|
5769
|
+
const url = URL.createObjectURL(lastAudioFile);
|
|
5770
|
+
const a = document.createElement("a");
|
|
5771
|
+
a.href = url;
|
|
5772
|
+
a.download = lastAudioFile.name;
|
|
5773
|
+
document.body.appendChild(a);
|
|
5774
|
+
a.click();
|
|
5775
|
+
document.body.removeChild(a);
|
|
5776
|
+
URL.revokeObjectURL(url);
|
|
5777
|
+
setToast({
|
|
5778
|
+
show: true,
|
|
5779
|
+
message: `Downloaded ${lastAudioFile.name}`,
|
|
5780
|
+
type: "success"
|
|
5781
|
+
});
|
|
5782
|
+
}
|
|
5783
|
+
};
|
|
5784
|
+
React3.useEffect(() => {
|
|
5785
|
+
if (error) {
|
|
5786
|
+
setToast({ show: true, message: error, type: "error" });
|
|
5787
|
+
}
|
|
5788
|
+
}, [error]);
|
|
5789
|
+
const closeToast = () => {
|
|
5790
|
+
setToast({ ...toast, show: false });
|
|
5791
|
+
};
|
|
5792
|
+
const formatDuration = (seconds) => {
|
|
5793
|
+
const minutes = Math.floor(seconds / 60);
|
|
5794
|
+
const secs = Math.floor(seconds % 60);
|
|
5795
|
+
return `${minutes}:${secs.toString().padStart(2, "0")}`;
|
|
5796
|
+
};
|
|
5797
|
+
const isMicrophoneError = error && (error.toLowerCase().includes("microphone") || error.toLowerCase().includes("not found") || error.toLowerCase().includes("no audio") || error.toLowerCase().includes("devices not found") || error.toLowerCase().includes("access denied") || error.toLowerCase().includes("permission") || error.toLowerCase().includes("not allowed") || error.toLowerCase().includes("busy") || error.toLowerCase().includes("media devices not supported") || availableDevices.length === 0 || noAudioDetected);
|
|
5798
|
+
if (noAudioDetected || error && error.includes("No audio input detected")) {
|
|
5799
|
+
return /* @__PURE__ */ jsx("div", { className: "bg-orange-50 dark:bg-orange-900/20 border border-orange-200 dark:border-orange-800 rounded-lg p-4", children: /* @__PURE__ */ jsxs("div", { className: "flex items-start", children: [
|
|
5800
|
+
/* @__PURE__ */ jsx("div", { className: "flex-shrink-0", children: /* @__PURE__ */ jsx(AlertTriangle, { className: "h-5 w-5 text-orange-400" }) }),
|
|
5801
|
+
/* @__PURE__ */ jsxs("div", { className: "ml-3 flex-1", children: [
|
|
5802
|
+
/* @__PURE__ */ jsx("h3", { className: "text-sm font-medium text-orange-800 dark:text-orange-200", children: "No Audio Input Detected" }),
|
|
5803
|
+
/* @__PURE__ */ jsxs("div", { className: "mt-2 text-sm text-orange-700 dark:text-orange-300", children: [
|
|
5804
|
+
/* @__PURE__ */ jsx("p", { children: error }),
|
|
5805
|
+
/* @__PURE__ */ jsxs("ul", { className: "mt-2 list-disc list-inside", children: [
|
|
5806
|
+
/* @__PURE__ */ jsx("li", { children: "Check if your microphone is properly connected" }),
|
|
5807
|
+
/* @__PURE__ */ jsx("li", { children: "Ensure you're speaking close enough to the microphone" }),
|
|
5808
|
+
/* @__PURE__ */ jsx("li", { children: "Try adjusting your microphone volume settings" }),
|
|
5809
|
+
/* @__PURE__ */ jsx("li", { children: "Test your microphone in other applications" }),
|
|
5810
|
+
/* @__PURE__ */ jsx("li", { children: "Make sure your browser has microphone permissions" }),
|
|
5811
|
+
/* @__PURE__ */ jsx("li", { children: "Please reload the page and try again" })
|
|
5812
|
+
] })
|
|
5813
|
+
] })
|
|
5814
|
+
] })
|
|
5815
|
+
] }) });
|
|
5816
|
+
}
|
|
5817
|
+
if (isMicrophoneError) {
|
|
5818
|
+
return /* @__PURE__ */ jsx("div", { className: "bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4", children: /* @__PURE__ */ jsxs("div", { className: "flex items-start", children: [
|
|
5819
|
+
/* @__PURE__ */ jsx("div", { className: "flex-shrink-0", children: /* @__PURE__ */ jsx(AlertTriangle, { className: "h-5 w-5 text-red-400" }) }),
|
|
5820
|
+
/* @__PURE__ */ jsxs("div", { className: "ml-3 flex-1", children: [
|
|
5821
|
+
/* @__PURE__ */ jsx("h3", { className: "text-sm font-medium text-red-800 dark:text-red-200", children: "Audio Input Issue" }),
|
|
5822
|
+
/* @__PURE__ */ jsx("div", { className: "mt-2 text-sm text-red-700 dark:text-red-300", children: /* @__PURE__ */ jsx("p", { children: error }) }),
|
|
5823
|
+
/* @__PURE__ */ jsx("div", { className: "mt-4", children: /* @__PURE__ */ jsx(
|
|
5824
|
+
"button",
|
|
5825
|
+
{
|
|
5826
|
+
onClick: () => {
|
|
5827
|
+
validateMicrophoneAccess();
|
|
5828
|
+
},
|
|
5829
|
+
className: "bg-red-600 hover:bg-red-700 text-white px-3 py-1 rounded text-sm transition-colors",
|
|
5830
|
+
children: "Check Again"
|
|
5831
|
+
}
|
|
5832
|
+
) })
|
|
5833
|
+
] })
|
|
5834
|
+
] }) });
|
|
5835
|
+
}
|
|
5836
|
+
return /* @__PURE__ */ jsxs("div", { className: "space-y-4", children: [
|
|
5837
|
+
toast.show && /* @__PURE__ */ jsx(Toast, { message: toast.message, type: toast.type, onClose: closeToast, duration: 3 }),
|
|
5838
|
+
(isRecording || isPaused || recordingDuration > 0) && /* @__PURE__ */ jsx("div", { className: "bg-gray-100 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4", children: /* @__PURE__ */ jsxs("div", { className: "flex justify-between items-center", children: [
|
|
5839
|
+
/* @__PURE__ */ jsx("div", { className: "text-sm text-gray-600 dark:text-gray-300", children: /* @__PURE__ */ jsxs("div", { className: "flex space-x-4", children: [
|
|
5840
|
+
/* @__PURE__ */ jsxs("span", { children: [
|
|
5841
|
+
"Duration: ",
|
|
5842
|
+
formatDuration(recordingDuration)
|
|
5843
|
+
] }),
|
|
5844
|
+
/* @__PURE__ */ jsxs("span", { children: [
|
|
5845
|
+
"Chunks: ",
|
|
5846
|
+
totalChunks
|
|
5847
|
+
] }),
|
|
5848
|
+
/* @__PURE__ */ jsxs("span", { children: [
|
|
5849
|
+
"Format: ",
|
|
5850
|
+
format.toUpperCase()
|
|
5851
|
+
] }),
|
|
5852
|
+
silenceRemoval && /* @__PURE__ */ jsx("span", { children: "Silence Removal: ON" })
|
|
5853
|
+
] }) }),
|
|
5854
|
+
showDownload && lastAudioFile && /* @__PURE__ */ jsxs(
|
|
5855
|
+
"button",
|
|
5856
|
+
{
|
|
5857
|
+
onClick: handleDownload,
|
|
5858
|
+
className: "flex items-center space-x-1 bg-blue-600 hover:bg-blue-700 text-white px-3 py-1 rounded text-sm transition-colors",
|
|
5859
|
+
children: [
|
|
5860
|
+
/* @__PURE__ */ jsx(Download, { className: "h-3 w-3" }),
|
|
5861
|
+
/* @__PURE__ */ jsx("span", { children: "Download" })
|
|
5862
|
+
]
|
|
5863
|
+
}
|
|
5864
|
+
)
|
|
5865
|
+
] }) }),
|
|
5866
|
+
(isProcessing || isConverting) && /* @__PURE__ */ jsxs("div", { className: "flex items-center justify-center space-x-2 bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-4", children: [
|
|
5867
|
+
/* @__PURE__ */ jsx(Loader2, { className: "animate-spin h-5 w-5 text-blue-600" }),
|
|
5868
|
+
/* @__PURE__ */ jsxs("div", { className: "text-center", children: [
|
|
5869
|
+
/* @__PURE__ */ jsx("span", { className: "block text-sm font-medium text-blue-700 dark:text-blue-300", children: isConverting ? `Processing Audio... ${Math.round(progress)}%` : "Processing audio chunk..." }),
|
|
5870
|
+
statusMessage && /* @__PURE__ */ jsx("span", { className: "block text-xs text-blue-600 dark:text-blue-400 mt-1", children: statusMessage })
|
|
5871
|
+
] })
|
|
5872
|
+
] }),
|
|
5873
|
+
isRecording && !isPaused && mediaStreamRef.current && /* @__PURE__ */ jsxs(
|
|
5874
|
+
"div",
|
|
5875
|
+
{
|
|
5876
|
+
className: `audio-recorder-container ${isRecording && !isPaused ? "glow-active" : ""}`,
|
|
5877
|
+
children: [
|
|
5878
|
+
/* @__PURE__ */ jsxs("div", { className: "edge-container", children: [
|
|
5879
|
+
/* @__PURE__ */ jsx("div", { className: "edge edge-top" }),
|
|
5880
|
+
/* @__PURE__ */ jsx("div", { className: "edge edge-right" }),
|
|
5881
|
+
/* @__PURE__ */ jsx("div", { className: "edge edge-bottom" }),
|
|
5882
|
+
/* @__PURE__ */ jsx("div", { className: "edge edge-left" })
|
|
5883
|
+
] }),
|
|
5884
|
+
/* @__PURE__ */ jsx("div", { className: "flex justify-center items-center", children: /* @__PURE__ */ jsx(
|
|
5885
|
+
AudioVisualizerImproved,
|
|
5886
|
+
{
|
|
5887
|
+
mediaStream: mediaStreamRef.current,
|
|
5888
|
+
isRecording: isRecording && !isPaused,
|
|
5889
|
+
forceLight: false
|
|
5890
|
+
}
|
|
5891
|
+
) })
|
|
5892
|
+
]
|
|
5893
|
+
}
|
|
5894
|
+
),
|
|
5895
|
+
/* @__PURE__ */ jsx("div", { className: "flex justify-center", children: isProcessing ? /* @__PURE__ */ jsxs("div", { className: "flex items-center justify-center space-x-2 bg-teal-600 hover:bg-teal-700 text-white py-2 px-4 rounded-full", children: [
|
|
5896
|
+
/* @__PURE__ */ jsx(Loader2, { className: "animate-spin h-5 w-5" }),
|
|
5897
|
+
/* @__PURE__ */ jsx("span", { children: "Processing..." })
|
|
5898
|
+
] }) : isRecording || isPaused ? /* @__PURE__ */ jsxs("div", { className: "flex space-x-2", children: [
|
|
5899
|
+
/* @__PURE__ */ jsxs(
|
|
5900
|
+
"button",
|
|
5901
|
+
{
|
|
5902
|
+
className: `flex items-center justify-center space-x-2 bg-red-600 hover:bg-red-700 text-white py-2 px-4 rounded-full transition-colors ${isDisabled ? "opacity-50 cursor-not-allowed" : ""}`,
|
|
5903
|
+
onClick: handleStopClick,
|
|
5904
|
+
disabled: isDisabled,
|
|
5905
|
+
children: [
|
|
5906
|
+
/* @__PURE__ */ jsx(Square, { className: "h-5 w-5" }),
|
|
5907
|
+
/* @__PURE__ */ jsx("span", { children: "Stop" })
|
|
5908
|
+
]
|
|
5909
|
+
}
|
|
5910
|
+
),
|
|
5911
|
+
!isPaused ? /* @__PURE__ */ jsxs(
|
|
5912
|
+
"button",
|
|
5913
|
+
{
|
|
5914
|
+
className: "flex items-center justify-center space-x-2 bg-yellow-500 hover:bg-yellow-600 text-white py-2 px-4 rounded-full transition-colors",
|
|
5915
|
+
onClick: pauseRecording,
|
|
5916
|
+
children: [
|
|
5917
|
+
/* @__PURE__ */ jsx(Pause, { className: "h-5 w-5" }),
|
|
5918
|
+
/* @__PURE__ */ jsx("span", { children: "Pause" })
|
|
5919
|
+
]
|
|
5920
|
+
}
|
|
5921
|
+
) : /* @__PURE__ */ jsxs(
|
|
5922
|
+
"button",
|
|
5923
|
+
{
|
|
5924
|
+
className: "flex items-center justify-center space-x-2 bg-green-600 hover:bg-green-700 text-white py-2 px-4 rounded-full transition-colors",
|
|
5925
|
+
onClick: resumeRecording,
|
|
5926
|
+
children: [
|
|
5927
|
+
/* @__PURE__ */ jsx(Play, { className: "h-5 w-5" }),
|
|
5928
|
+
/* @__PURE__ */ jsx("span", { children: "Resume" })
|
|
5929
|
+
]
|
|
5930
|
+
}
|
|
5931
|
+
)
|
|
5932
|
+
] }) : /* @__PURE__ */ jsxs(
|
|
5933
|
+
"button",
|
|
5934
|
+
{
|
|
5935
|
+
className: className ? `flex items-center justify-center space-x-2 py-2 px-4 rounded-full transition-colors ${className}` : "flex items-center justify-center space-x-2 bg-gradient-to-r from-purple-400 to-blue-500 hover:bg-gradient-to-r hover:from-purple-500 hover:to-blue-600 text-white py-2 px-4 rounded-full transition-colors",
|
|
5936
|
+
onClick: handleStartRecording,
|
|
5937
|
+
children: [
|
|
5938
|
+
/* @__PURE__ */ jsx(Mic, { className: "h-5 w-5" }),
|
|
5939
|
+
/* @__PURE__ */ jsx("span", { children: "Start Recording" })
|
|
5940
|
+
]
|
|
5941
|
+
}
|
|
5942
|
+
) }),
|
|
5943
|
+
availableDevices.length > 1 && /* @__PURE__ */ jsxs("div", { className: "mt-4", children: [
|
|
5944
|
+
/* @__PURE__ */ jsx("label", { className: "block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2", children: "Select Microphone:" }),
|
|
5945
|
+
/* @__PURE__ */ jsx(
|
|
5946
|
+
"select",
|
|
5947
|
+
{
|
|
5948
|
+
value: currentDeviceId || "",
|
|
5949
|
+
onChange: (e) => selectMicrophone(e.target.value),
|
|
5950
|
+
className: "w-full p-2 border border-gray-200 dark:border-gray-700 rounded bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100",
|
|
5951
|
+
disabled: isRecording,
|
|
5952
|
+
children: availableDevices.map((device) => /* @__PURE__ */ jsx("option", { value: device.deviceId, children: device.label || `Microphone ${device.deviceId.slice(0, 8)}` }, device.deviceId))
|
|
5953
|
+
}
|
|
5954
|
+
)
|
|
5955
|
+
] })
|
|
5956
|
+
] });
|
|
5957
|
+
};
|
|
5958
|
+
var AudioCapture_default = AudioCapture;
|
|
5074
5959
|
|
|
5075
|
-
export { AudioDictation_default as AudioDictation, AudioRecorder_default as AudioRecorder };
|
|
5960
|
+
export { AudioCapture_default as AudioCapture, AudioDictation_default as AudioDictation, AudioRecorder_default as AudioRecorder };
|
|
5076
5961
|
//# sourceMappingURL=index.mjs.map
|
|
5077
5962
|
//# sourceMappingURL=index.mjs.map
|