vibesurf 0.1.20__py3-none-any.whl → 0.1.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vibesurf might be problematic. Click here for more details.
- vibe_surf/_version.py +2 -2
- vibe_surf/backend/api/task.py +1 -1
- vibe_surf/backend/api/voices.py +481 -0
- vibe_surf/backend/database/migrations/v004_add_voice_profiles.sql +35 -0
- vibe_surf/backend/database/models.py +38 -1
- vibe_surf/backend/database/queries.py +189 -1
- vibe_surf/backend/main.py +2 -0
- vibe_surf/backend/shared_state.py +1 -1
- vibe_surf/backend/voice_model_config.py +25 -0
- vibe_surf/browser/agen_browser_profile.py +2 -0
- vibe_surf/browser/agent_browser_session.py +3 -3
- vibe_surf/chrome_extension/background.js +224 -9
- vibe_surf/chrome_extension/content.js +147 -0
- vibe_surf/chrome_extension/manifest.json +11 -2
- vibe_surf/chrome_extension/permission-iframe.html +38 -0
- vibe_surf/chrome_extension/permission-request.html +104 -0
- vibe_surf/chrome_extension/scripts/api-client.js +61 -0
- vibe_surf/chrome_extension/scripts/main.js +8 -2
- vibe_surf/chrome_extension/scripts/permission-iframe-request.js +188 -0
- vibe_surf/chrome_extension/scripts/permission-request.js +118 -0
- vibe_surf/chrome_extension/scripts/settings-manager.js +690 -3
- vibe_surf/chrome_extension/scripts/ui-manager.js +730 -119
- vibe_surf/chrome_extension/scripts/user-settings-storage.js +422 -0
- vibe_surf/chrome_extension/scripts/voice-recorder.js +514 -0
- vibe_surf/chrome_extension/sidepanel.html +106 -29
- vibe_surf/chrome_extension/styles/components.css +35 -0
- vibe_surf/chrome_extension/styles/input.css +164 -1
- vibe_surf/chrome_extension/styles/layout.css +1 -1
- vibe_surf/chrome_extension/styles/settings-environment.css +138 -0
- vibe_surf/chrome_extension/styles/settings-forms.css +7 -7
- vibe_surf/chrome_extension/styles/variables.css +51 -0
- vibe_surf/tools/voice_asr.py +79 -8
- {vibesurf-0.1.20.dist-info → vibesurf-0.1.21.dist-info}/METADATA +8 -12
- {vibesurf-0.1.20.dist-info → vibesurf-0.1.21.dist-info}/RECORD +38 -31
- vibe_surf/chrome_extension/icons/convert-svg.js +0 -33
- vibe_surf/chrome_extension/icons/logo-preview.html +0 -187
- {vibesurf-0.1.20.dist-info → vibesurf-0.1.21.dist-info}/WHEEL +0 -0
- {vibesurf-0.1.20.dist-info → vibesurf-0.1.21.dist-info}/entry_points.txt +0 -0
- {vibesurf-0.1.20.dist-info → vibesurf-0.1.21.dist-info}/licenses/LICENSE +0 -0
- {vibesurf-0.1.20.dist-info → vibesurf-0.1.21.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,514 @@
|
|
|
1
|
+
// Voice Recording Manager - Handles voice input functionality
|
|
2
|
+
// Provides recording capabilities and integration with ASR API
|
|
3
|
+
|
|
4
|
+
class VibeSurfVoiceRecorder {
|
|
5
|
+
constructor(apiClient) {
|
|
6
|
+
this.apiClient = apiClient;
|
|
7
|
+
this.mediaRecorder = null;
|
|
8
|
+
this.audioChunks = [];
|
|
9
|
+
this.isRecording = false;
|
|
10
|
+
this.recordingStartTime = null;
|
|
11
|
+
this.maxRecordingDuration = 60000; // 30 seconds max
|
|
12
|
+
this.recordingTimeout = null;
|
|
13
|
+
this.durationInterval = null;
|
|
14
|
+
this.onDurationUpdate = null;
|
|
15
|
+
|
|
16
|
+
// Recording state callbacks
|
|
17
|
+
this.onRecordingStart = null;
|
|
18
|
+
this.onRecordingStop = null;
|
|
19
|
+
this.onTranscriptionComplete = null;
|
|
20
|
+
this.onTranscriptionError = null;
|
|
21
|
+
|
|
22
|
+
console.log('[VoiceRecorder] Voice recorder initialized');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Check if browser supports media recording
|
|
26
|
+
isSupported() {
|
|
27
|
+
return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia && window.MediaRecorder);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Simplified permission request for Chrome extension
|
|
31
|
+
async requestMicrophonePermission() {
|
|
32
|
+
try {
|
|
33
|
+
console.log('[VoiceRecorder] Requesting microphone permission...');
|
|
34
|
+
|
|
35
|
+
// For Chrome extensions, try direct permission first
|
|
36
|
+
if (typeof chrome !== 'undefined' && chrome.runtime && chrome.runtime.id) {
|
|
37
|
+
try {
|
|
38
|
+
// Try direct getUserMedia first (works if permission already granted)
|
|
39
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
40
|
+
stream.getTracks().forEach(track => track.stop());
|
|
41
|
+
console.log('[VoiceRecorder] Direct permission granted');
|
|
42
|
+
return true;
|
|
43
|
+
} catch (directError) {
|
|
44
|
+
console.log('[VoiceRecorder] Direct permission failed, using iframe method');
|
|
45
|
+
return new Promise((resolve) => {
|
|
46
|
+
this.requestMicrophonePermissionViaIframe(resolve);
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Fallback: Direct permission request for non-extension contexts
|
|
52
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
53
|
+
stream.getTracks().forEach(track => track.stop());
|
|
54
|
+
console.log('[VoiceRecorder] Permission granted');
|
|
55
|
+
return true;
|
|
56
|
+
|
|
57
|
+
} catch (error) {
|
|
58
|
+
console.error('[VoiceRecorder] Microphone permission denied:', error);
|
|
59
|
+
|
|
60
|
+
let errorMessage = 'Microphone permission denied';
|
|
61
|
+
if (error.name === 'NotAllowedError') {
|
|
62
|
+
errorMessage = 'Microphone access was denied. Please allow access and try again.';
|
|
63
|
+
} else if (error.name === 'NotFoundError') {
|
|
64
|
+
errorMessage = 'No microphone found. Please connect a microphone.';
|
|
65
|
+
} else if (error.name === 'NotReadableError') {
|
|
66
|
+
errorMessage = 'Microphone is in use by another application.';
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const permissionError = new Error(errorMessage);
|
|
70
|
+
permissionError.name = 'MicrophonePermissionError';
|
|
71
|
+
permissionError.originalError = error;
|
|
72
|
+
throw permissionError;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Simplified iframe permission request
|
|
77
|
+
async requestMicrophonePermissionViaIframe(resolve) {
|
|
78
|
+
console.log('[VoiceRecorder] Using iframe injection method');
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
const tabs = await chrome.tabs.query({ active: true, currentWindow: true });
|
|
82
|
+
if (!tabs || tabs.length === 0) {
|
|
83
|
+
throw new Error('No active tab found');
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const activeTab = tabs[0];
|
|
87
|
+
|
|
88
|
+
// Check if we can inject into this tab
|
|
89
|
+
if (activeTab.url.startsWith('chrome://') ||
|
|
90
|
+
activeTab.url.startsWith('chrome-extension://')) {
|
|
91
|
+
console.log('[VoiceRecorder] Cannot inject into restricted tab, using tab method');
|
|
92
|
+
return this.showMicrophonePermissionTab(resolve);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Inject iframe
|
|
96
|
+
const response = await chrome.tabs.sendMessage(activeTab.id, {
|
|
97
|
+
type: 'INJECT_MICROPHONE_PERMISSION_IFRAME'
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
if (response && response.success) {
|
|
101
|
+
// Listen for permission result
|
|
102
|
+
const messageHandler = (message) => {
|
|
103
|
+
if (message.type === 'MICROPHONE_PERMISSION_RESULT' && message.source === 'iframe') {
|
|
104
|
+
chrome.runtime.onMessage.removeListener(messageHandler);
|
|
105
|
+
resolve(message.granted || message.success || false);
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
chrome.runtime.onMessage.addListener(messageHandler);
|
|
110
|
+
|
|
111
|
+
// Timeout cleanup
|
|
112
|
+
setTimeout(() => {
|
|
113
|
+
chrome.runtime.onMessage.removeListener(messageHandler);
|
|
114
|
+
resolve(false);
|
|
115
|
+
}, 30000);
|
|
116
|
+
} else {
|
|
117
|
+
throw new Error('Failed to inject iframe');
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
} catch (error) {
|
|
121
|
+
console.error('[VoiceRecorder] Iframe method failed:', error);
|
|
122
|
+
this.showMicrophonePermissionTab(resolve);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Simplified tab permission request
|
|
127
|
+
showMicrophonePermissionTab(resolve) {
|
|
128
|
+
console.log('[VoiceRecorder] Using tab method for permission');
|
|
129
|
+
|
|
130
|
+
try {
|
|
131
|
+
const permissionUrl = chrome.runtime.getURL('permission-request.html');
|
|
132
|
+
|
|
133
|
+
chrome.tabs.create({ url: permissionUrl, active: true }, (tab) => {
|
|
134
|
+
if (chrome.runtime.lastError) {
|
|
135
|
+
console.error('[VoiceRecorder] Failed to create tab:', chrome.runtime.lastError);
|
|
136
|
+
resolve(false);
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Listen for permission result
|
|
141
|
+
const messageHandler = (message) => {
|
|
142
|
+
if (message.type === 'MICROPHONE_PERMISSION_RESULT') {
|
|
143
|
+
chrome.runtime.onMessage.removeListener(messageHandler);
|
|
144
|
+
chrome.tabs.remove(tab.id).catch(() => {});
|
|
145
|
+
resolve(message.granted || false);
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
chrome.runtime.onMessage.addListener(messageHandler);
|
|
150
|
+
|
|
151
|
+
// Timeout cleanup
|
|
152
|
+
setTimeout(() => {
|
|
153
|
+
chrome.runtime.onMessage.removeListener(messageHandler);
|
|
154
|
+
chrome.tabs.remove(tab.id).catch(() => {});
|
|
155
|
+
resolve(false);
|
|
156
|
+
}, 30000);
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
} catch (error) {
|
|
160
|
+
console.error('[VoiceRecorder] Tab method failed:', error);
|
|
161
|
+
resolve(false);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
// Start voice recording
|
|
167
|
+
async startRecording() {
|
|
168
|
+
if (this.isRecording) {
|
|
169
|
+
console.warn('[VoiceRecorder] Already recording');
|
|
170
|
+
return false;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
if (!this.isSupported()) {
|
|
174
|
+
console.error('[VoiceRecorder] Voice recording not supported in this browser');
|
|
175
|
+
throw new Error('Voice recording is not supported in your browser');
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
console.log('[VoiceRecorder] Starting voice recording...');
|
|
180
|
+
|
|
181
|
+
// Check for ASR profiles BEFORE starting recording
|
|
182
|
+
const asrProfiles = await this.apiClient.getASRProfiles(true);
|
|
183
|
+
if (!asrProfiles.profiles || asrProfiles.profiles.length === 0) {
|
|
184
|
+
console.log('[VoiceRecorder] No ASR profiles found, showing configuration modal');
|
|
185
|
+
this.handleNoVoiceProfileError();
|
|
186
|
+
return false;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
console.log(`[VoiceRecorder] Found ${asrProfiles.profiles.length} ASR profile(s)`);
|
|
190
|
+
|
|
191
|
+
// Get microphone stream
|
|
192
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
193
|
+
audio: {
|
|
194
|
+
echoCancellation: true,
|
|
195
|
+
noiseSuppression: true,
|
|
196
|
+
sampleRate: 44100
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
// Create MediaRecorder
|
|
201
|
+
const options = {
|
|
202
|
+
mimeType: 'audio/webm;codecs=opus'
|
|
203
|
+
};
|
|
204
|
+
|
|
205
|
+
// Fallback for browsers that don't support webm
|
|
206
|
+
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
|
|
207
|
+
options.mimeType = 'audio/ogg;codecs=opus';
|
|
208
|
+
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
|
|
209
|
+
options.mimeType = 'audio/wav';
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
this.mediaRecorder = new MediaRecorder(stream, options);
|
|
214
|
+
this.audioChunks = [];
|
|
215
|
+
this.recordingStartTime = Date.now();
|
|
216
|
+
|
|
217
|
+
// Set up event handlers
|
|
218
|
+
this.mediaRecorder.ondataavailable = (event) => {
|
|
219
|
+
if (event.data.size > 0) {
|
|
220
|
+
this.audioChunks.push(event.data);
|
|
221
|
+
}
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
this.mediaRecorder.onstop = () => {
|
|
225
|
+
this.handleRecordingStop();
|
|
226
|
+
};
|
|
227
|
+
|
|
228
|
+
this.mediaRecorder.onerror = (event) => {
|
|
229
|
+
console.error('[VoiceRecorder] MediaRecorder error:', event.error);
|
|
230
|
+
this.stopRecording();
|
|
231
|
+
this.handleRecordingError(event.error);
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
// Start recording
|
|
235
|
+
this.mediaRecorder.start();
|
|
236
|
+
this.isRecording = true;
|
|
237
|
+
|
|
238
|
+
// Set up duration updates
|
|
239
|
+
this.startDurationUpdates();
|
|
240
|
+
|
|
241
|
+
// Set up maximum recording duration
|
|
242
|
+
this.recordingTimeout = setTimeout(() => {
|
|
243
|
+
if (this.isRecording) {
|
|
244
|
+
console.log('[VoiceRecorder] Maximum recording duration reached, stopping automatically');
|
|
245
|
+
this.stopRecording();
|
|
246
|
+
}
|
|
247
|
+
}, this.maxRecordingDuration);
|
|
248
|
+
|
|
249
|
+
// Notify callback
|
|
250
|
+
if (this.onRecordingStart) {
|
|
251
|
+
this.onRecordingStart();
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
console.log('[VoiceRecorder] Voice recording started');
|
|
255
|
+
return true;
|
|
256
|
+
|
|
257
|
+
} catch (error) {
|
|
258
|
+
console.error('[VoiceRecorder] Failed to start recording:', error);
|
|
259
|
+
this.handleRecordingError(error);
|
|
260
|
+
throw error;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Stop voice recording
|
|
265
|
+
stopRecording() {
|
|
266
|
+
if (!this.isRecording || !this.mediaRecorder) {
|
|
267
|
+
console.warn('[VoiceRecorder] Not currently recording');
|
|
268
|
+
return false;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
try {
|
|
272
|
+
console.log('[VoiceRecorder] Stopping voice recording...');
|
|
273
|
+
|
|
274
|
+
// Clear the timeout
|
|
275
|
+
if (this.recordingTimeout) {
|
|
276
|
+
clearTimeout(this.recordingTimeout);
|
|
277
|
+
this.recordingTimeout = null;
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
// Stop the MediaRecorder
|
|
281
|
+
this.mediaRecorder.stop();
|
|
282
|
+
|
|
283
|
+
// Stop all tracks in the stream
|
|
284
|
+
const stream = this.mediaRecorder.stream;
|
|
285
|
+
if (stream) {
|
|
286
|
+
stream.getTracks().forEach(track => track.stop());
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
this.isRecording = false;
|
|
290
|
+
console.log('[VoiceRecorder] Voice recording stopped');
|
|
291
|
+
|
|
292
|
+
return true;
|
|
293
|
+
|
|
294
|
+
} catch (error) {
|
|
295
|
+
console.error('[VoiceRecorder] Error stopping recording:', error);
|
|
296
|
+
this.handleRecordingError(error);
|
|
297
|
+
return false;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// Handle recording stop event
|
|
302
|
+
async handleRecordingStop() {
|
|
303
|
+
try {
|
|
304
|
+
if (this.audioChunks.length === 0) {
|
|
305
|
+
console.warn('[VoiceRecorder] No audio data recorded');
|
|
306
|
+
this.handleRecordingError(new Error('No audio data recorded'));
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// Create audio blob
|
|
311
|
+
const audioBlob = new Blob(this.audioChunks, {
|
|
312
|
+
type: this.mediaRecorder.mimeType
|
|
313
|
+
});
|
|
314
|
+
|
|
315
|
+
const recordingDuration = Date.now() - this.recordingStartTime;
|
|
316
|
+
console.log(`[VoiceRecorder] Recorded ${audioBlob.size} bytes in ${recordingDuration}ms`);
|
|
317
|
+
|
|
318
|
+
// Notify callback
|
|
319
|
+
if (this.onRecordingStop) {
|
|
320
|
+
this.onRecordingStop(audioBlob, recordingDuration);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// Transcribe the audio
|
|
324
|
+
await this.transcribeAudio(audioBlob);
|
|
325
|
+
|
|
326
|
+
} catch (error) {
|
|
327
|
+
console.error('[VoiceRecorder] Error handling recording stop:', error);
|
|
328
|
+
this.handleRecordingError(error);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// Start duration updates
|
|
333
|
+
startDurationUpdates() {
|
|
334
|
+
this.stopDurationUpdates(); // Clear any existing interval
|
|
335
|
+
|
|
336
|
+
this.durationInterval = setInterval(() => {
|
|
337
|
+
const duration = this.getRecordingDuration();
|
|
338
|
+
const formattedDuration = this.formatDuration(duration);
|
|
339
|
+
|
|
340
|
+
if (this.onDurationUpdate) {
|
|
341
|
+
this.onDurationUpdate(formattedDuration, duration);
|
|
342
|
+
}
|
|
343
|
+
}, 1000); // Update every second
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
// Stop duration updates
|
|
347
|
+
stopDurationUpdates() {
|
|
348
|
+
if (this.durationInterval) {
|
|
349
|
+
clearInterval(this.durationInterval);
|
|
350
|
+
this.durationInterval = null;
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
// Format duration in MM:SS format
|
|
355
|
+
formatDuration(milliseconds) {
|
|
356
|
+
const seconds = Math.floor(milliseconds / 1000);
|
|
357
|
+
const minutes = Math.floor(seconds / 60);
|
|
358
|
+
const remainingSeconds = seconds % 60;
|
|
359
|
+
return `${minutes}:${remainingSeconds.toString().padStart(2, '0')}`;
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// Transcribe audio using ASR API
|
|
363
|
+
async transcribeAudio(audioBlob) {
|
|
364
|
+
try {
|
|
365
|
+
console.log('[VoiceRecorder] Transcribing audio...');
|
|
366
|
+
|
|
367
|
+
// Get available ASR profiles
|
|
368
|
+
const asrProfiles = await this.apiClient.getASRProfiles(true);
|
|
369
|
+
|
|
370
|
+
if (!asrProfiles.profiles || asrProfiles.profiles.length === 0) {
|
|
371
|
+
// Show voice profile required modal instead of generic error
|
|
372
|
+
this.handleNoVoiceProfileError();
|
|
373
|
+
return;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// Use the first available ASR profile
|
|
377
|
+
const voiceProfileName = asrProfiles.profiles[0].voice_profile_name;
|
|
378
|
+
console.log(`[VoiceRecorder] Using ASR profile: ${voiceProfileName}`);
|
|
379
|
+
|
|
380
|
+
// Call the ASR API
|
|
381
|
+
const result = await this.apiClient.transcribeAudio(audioBlob, voiceProfileName);
|
|
382
|
+
|
|
383
|
+
if (result.success && result.recognized_text) {
|
|
384
|
+
console.log(`[VoiceRecorder] Transcription successful: "${result.recognized_text}"`);
|
|
385
|
+
|
|
386
|
+
// Notify callback with transcription result
|
|
387
|
+
if (this.onTranscriptionComplete) {
|
|
388
|
+
this.onTranscriptionComplete(result.recognized_text, result);
|
|
389
|
+
}
|
|
390
|
+
} else {
|
|
391
|
+
throw new Error(result.message || 'Transcription failed');
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
} catch (error) {
|
|
395
|
+
console.error('[VoiceRecorder] Transcription error:', error);
|
|
396
|
+
this.handleTranscriptionError(error);
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
// Handle recording errors
|
|
401
|
+
handleRecordingError(error) {
|
|
402
|
+
this.isRecording = false;
|
|
403
|
+
this.cleanup();
|
|
404
|
+
|
|
405
|
+
const errorMessage = error.message || 'Voice recording failed';
|
|
406
|
+
console.error('[VoiceRecorder] Recording error:', errorMessage);
|
|
407
|
+
|
|
408
|
+
if (this.onTranscriptionError) {
|
|
409
|
+
this.onTranscriptionError(errorMessage, 'recording');
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// Handle transcription errors
|
|
414
|
+
handleTranscriptionError(error) {
|
|
415
|
+
const errorMessage = error.message || 'Audio transcription failed';
|
|
416
|
+
console.error('[VoiceRecorder] Transcription error:', errorMessage);
|
|
417
|
+
|
|
418
|
+
if (this.onTranscriptionError) {
|
|
419
|
+
this.onTranscriptionError(errorMessage, 'transcription');
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
// Handle no voice profile error with modal
|
|
424
|
+
handleNoVoiceProfileError() {
|
|
425
|
+
console.log('[VoiceRecorder] No voice profiles configured');
|
|
426
|
+
|
|
427
|
+
// Send message to UI manager to show voice profile required modal
|
|
428
|
+
if (typeof window !== 'undefined' && window.vibeSurfUIManager) {
|
|
429
|
+
try {
|
|
430
|
+
window.vibeSurfUIManager.showVoiceProfileRequiredModal('configure');
|
|
431
|
+
} catch (error) {
|
|
432
|
+
console.error('[VoiceRecorder] Failed to show voice profile modal:', error);
|
|
433
|
+
// Fallback to generic error handling
|
|
434
|
+
this.handleTranscriptionError(new Error('No active ASR profiles found. Please configure an ASR profile in Settings > Voice.'));
|
|
435
|
+
}
|
|
436
|
+
} else {
|
|
437
|
+
// Fallback to generic error handling
|
|
438
|
+
this.handleTranscriptionError(new Error('No active ASR profiles found. Please configure an ASR profile in Settings > Voice.'));
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Cleanup resources
|
|
443
|
+
cleanup() {
|
|
444
|
+
if (this.recordingTimeout) {
|
|
445
|
+
clearTimeout(this.recordingTimeout);
|
|
446
|
+
this.recordingTimeout = null;
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
if (this.mediaRecorder) {
|
|
450
|
+
if (this.mediaRecorder.state !== 'inactive') {
|
|
451
|
+
try {
|
|
452
|
+
this.mediaRecorder.stop();
|
|
453
|
+
} catch (error) {
|
|
454
|
+
console.warn('[VoiceRecorder] Error stopping MediaRecorder during cleanup:', error);
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Stop all tracks in the stream
|
|
459
|
+
const stream = this.mediaRecorder.stream;
|
|
460
|
+
if (stream) {
|
|
461
|
+
stream.getTracks().forEach(track => {
|
|
462
|
+
try {
|
|
463
|
+
track.stop();
|
|
464
|
+
} catch (error) {
|
|
465
|
+
console.warn('[VoiceRecorder] Error stopping track during cleanup:', error);
|
|
466
|
+
}
|
|
467
|
+
});
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
this.mediaRecorder = null;
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
this.audioChunks = [];
|
|
474
|
+
this.isRecording = false;
|
|
475
|
+
this.recordingStartTime = null;
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// Get recording duration
|
|
479
|
+
getRecordingDuration() {
|
|
480
|
+
if (!this.isRecording || !this.recordingStartTime) {
|
|
481
|
+
return 0;
|
|
482
|
+
}
|
|
483
|
+
return Date.now() - this.recordingStartTime;
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
// Check if currently recording
|
|
487
|
+
isCurrentlyRecording() {
|
|
488
|
+
return this.isRecording;
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Check if voice recording should be disabled due to missing ASR profiles
|
|
492
|
+
async isVoiceRecordingAvailable() {
|
|
493
|
+
try {
|
|
494
|
+
const asrProfiles = await this.apiClient.getASRProfiles(true);
|
|
495
|
+
return asrProfiles.profiles && asrProfiles.profiles.length > 0;
|
|
496
|
+
} catch (error) {
|
|
497
|
+
console.error('[VoiceRecorder] Error checking ASR profiles availability:', error);
|
|
498
|
+
return false;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
// Set callbacks
|
|
503
|
+
setCallbacks(callbacks) {
|
|
504
|
+
if (callbacks.onRecordingStart) this.onRecordingStart = callbacks.onRecordingStart;
|
|
505
|
+
if (callbacks.onRecordingStop) this.onRecordingStop = callbacks.onRecordingStop;
|
|
506
|
+
if (callbacks.onTranscriptionComplete) this.onTranscriptionComplete = callbacks.onTranscriptionComplete;
|
|
507
|
+
if (callbacks.onTranscriptionError) this.onTranscriptionError = callbacks.onTranscriptionError;
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// Export for use in other modules
|
|
512
|
+
if (typeof window !== 'undefined') {
|
|
513
|
+
window.VibeSurfVoiceRecorder = VibeSurfVoiceRecorder;
|
|
514
|
+
}
|