react-restyle-components 0.4.13 → 0.4.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (20) hide show
  1. package/lib/src/core-components/src/components/Accordion/AccordionSection/elements.d.ts.map +1 -1
  2. package/lib/src/core-components/src/components/Accordion/AccordionSection/elements.js +71 -20
  3. package/lib/src/core-components/src/components/Modal/ModalDocxContent/gemini.service.d.ts +4 -0
  4. package/lib/src/core-components/src/components/Modal/ModalDocxContent/gemini.service.d.ts.map +1 -0
  5. package/lib/src/core-components/src/components/Modal/ModalDocxContent/gemini.service.js +70 -0
  6. package/lib/src/core-components/src/components/Modal/ModalDocxContent/modal-docx-content.component.d.ts +46 -0
  7. package/lib/src/core-components/src/components/Modal/ModalDocxContent/modal-docx-content.component.d.ts.map +1 -0
  8. package/lib/src/core-components/src/components/Modal/ModalDocxContent/modal-docx-content.component.js +844 -0
  9. package/lib/src/core-components/src/components/Modal/index.d.ts +1 -0
  10. package/lib/src/core-components/src/components/Modal/index.d.ts.map +1 -1
  11. package/lib/src/core-components/src/components/Modal/index.js +1 -0
  12. package/lib/src/core-components/src/tc.global.css +2 -1
  13. package/lib/src/core-components/src/tc.module.css +1 -1
  14. package/lib/src/core-components/src/utils/index.d.ts +1 -0
  15. package/lib/src/core-components/src/utils/index.d.ts.map +1 -1
  16. package/lib/src/core-components/src/utils/index.js +1 -0
  17. package/lib/src/core-components/src/utils/jodit-editor-config.util.d.ts +189 -0
  18. package/lib/src/core-components/src/utils/jodit-editor-config.util.d.ts.map +1 -0
  19. package/lib/src/core-components/src/utils/jodit-editor-config.util.js +335 -0
  20. package/package.json +3 -1
@@ -0,0 +1,844 @@
1
+ import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
2
+ import { useEffect, useState, useRef, useCallback } from 'react';
3
+ import { createJoditConfig, JODIT_PASTE_DIALOG_HANDLER, JODIT_TOOLBAR_BUTTONS, cn, } from '../../../utils';
4
+ import JoditEditor from 'jodit-react';
5
+ import 'jodit/esm/plugins/resizer/resizer';
6
+ import { Icon } from '../..';
7
+ import SpeechRecognition, { useSpeechRecognition, } from 'react-speech-recognition';
8
+ import s from '../../../tc.module.css';
9
+ import { geminiSendMessageService } from './gemini.service';
10
+ // CSS-in-JS styles for enhanced visual design
11
+ const modalStyles = {
12
+ // Glass-morphism overlay
13
+ overlay: {
14
+ background: 'linear-gradient(135deg, rgba(15, 23, 42, 0.85) 0%, rgba(30, 41, 59, 0.9) 100%)',
15
+ backdropFilter: 'blur(8px)',
16
+ },
17
+ // Modal container with subtle gradient border
18
+ container: {
19
+ background: 'linear-gradient(180deg, #ffffff 0%, #f8fafc 100%)',
20
+ boxShadow: '0 25px 50px -12px rgba(0, 0, 0, 0.25), 0 0 0 1px rgba(148, 163, 184, 0.1)',
21
+ },
22
+ // Header gradient
23
+ header: {
24
+ background: 'linear-gradient(135deg, #1e293b 0%, #334155 100%)',
25
+ borderBottom: '1px solid rgba(148, 163, 184, 0.2)',
26
+ },
27
+ // Action button base styles
28
+ actionButton: {
29
+ transition: 'all 0.2s cubic-bezier(0.4, 0, 0.2, 1)',
30
+ },
31
+ // Footer with subtle top border
32
+ footer: {
33
+ background: 'linear-gradient(180deg, #f8fafc 0%, #f1f5f9 100%)',
34
+ borderTop: '1px solid rgba(148, 163, 184, 0.3)',
35
+ },
36
+ };
37
+ // Keyframe animations
38
+ const keyframes = `
39
+ @keyframes modalSlideIn {
40
+ from {
41
+ opacity: 0;
42
+ transform: scale(0.95) translateY(-20px);
43
+ }
44
+ to {
45
+ opacity: 1;
46
+ transform: scale(1) translateY(0);
47
+ }
48
+ }
49
+
50
+ @keyframes overlayFadeIn {
51
+ from { opacity: 0; }
52
+ to { opacity: 1; }
53
+ }
54
+
55
+ @keyframes pulseGlow {
56
+ 0%, 100% {
57
+ box-shadow: 0 0 0 0 rgba(99, 102, 241, 0.4);
58
+ }
59
+ 50% {
60
+ box-shadow: 0 0 20px 8px rgba(99, 102, 241, 0.2);
61
+ }
62
+ }
63
+
64
+ @keyframes recordingPulse {
65
+ 0%, 100% {
66
+ transform: scale(1);
67
+ box-shadow: 0 0 0 0 rgba(220, 38, 38, 0.4);
68
+ }
69
+ 50% {
70
+ transform: scale(1.05);
71
+ box-shadow: 0 0 20px 8px rgba(220, 38, 38, 0.2);
72
+ }
73
+ }
74
+
75
+ @keyframes ai-pulse {
76
+ 0%, 100% {
77
+ transform: scale(1);
78
+ box-shadow: 0 0 20px rgba(99, 102, 241, 0.6), 0 0 40px rgba(168, 85, 247, 0.4);
79
+ }
80
+ 50% {
81
+ transform: scale(1.1);
82
+ box-shadow: 0 0 30px rgba(99, 102, 241, 0.8), 0 0 60px rgba(168, 85, 247, 0.6);
83
+ }
84
+ }
85
+
86
+ @keyframes ai-text-pulse {
87
+ 0%, 100% { opacity: 1; }
88
+ 50% { opacity: 0.7; }
89
+ }
90
+
91
+ @keyframes shimmer {
92
+ 0% { background-position: -200% 0; }
93
+ 100% { background-position: 200% 0; }
94
+ }
95
+
96
+ @keyframes spin {
97
+ from { transform: rotate(0deg); }
98
+ to { transform: rotate(360deg); }
99
+ }
100
+ `;
101
+ export const ModalDocxContent = ({ visible, details = '', onSave, onClose, title = 'Document Editor', aiService = (message, gemini_key) => geminiSendMessageService(message, gemini_key), showAIButton = true, showVoiceButtons = true, uploaderUrl = 'https://www.tech-abl.com/api/assets/uploadFile', folder = 'library', gemini_key = '', classNames = {}, styles = {}, }) => {
102
+ // AI button is only visible when showAIButton is true AND a valid gemini_key is provided
103
+ const isValidGeminiKey = gemini_key && gemini_key.trim() !== '' && gemini_key !== 'xxx';
104
+ const shouldShowAIButton = showAIButton && isValidGeminiKey;
105
+ const joditEditorRef = useRef(null);
106
+ const content = useRef('');
107
+ const [showModal, setShowModal] = useState(visible);
108
+ const parentDivRef = useRef(null);
109
+ const [parentDivWidth, setParentDivWidth] = useState(0);
110
+ const [parentDivHeight, setParentDivHeight] = useState(0);
111
+ // AI processing state
112
+ const [isAIProcessing, setIsAIProcessing] = useState(false);
113
+ // Store selection info (text + occurrence index)
114
+ const selectionInfoRef = useRef(null);
115
+ // Helper: Get selected text and find which occurrence it is
116
+ const getSelectedText = useCallback(() => {
117
+ const editor = joditEditorRef.current;
118
+ if (!editor)
119
+ return '';
120
+ selectionInfoRef.current = null; // Reset
121
+ try {
122
+ const selection = window.getSelection();
123
+ if (selection && selection.rangeCount > 0 && !selection.isCollapsed) {
124
+ const selectedText = selection.toString().trim();
125
+ if (selectedText) {
126
+ // Get the full text content of the editor
127
+ const editorElement = editor.editor || editor.container;
128
+ const fullText = editorElement?.textContent || '';
129
+ // Find the position of selection in the text
130
+ const range = selection.getRangeAt(0);
131
+ // Calculate text offset from start of editor
132
+ let textOffset = 0;
133
+ const treeWalker = document.createTreeWalker(editorElement, NodeFilter.SHOW_TEXT, null);
134
+ let node;
135
+ while ((node = treeWalker.nextNode())) {
136
+ if (node === range.startContainer) {
137
+ textOffset += range.startOffset;
138
+ break;
139
+ }
140
+ textOffset += node.textContent?.length || 0;
141
+ }
142
+ // Count which occurrence this is
143
+ let occurrenceIndex = 0;
144
+ let searchPos = 0;
145
+ const lowerText = fullText.toLowerCase();
146
+ const lowerSelected = selectedText.toLowerCase();
147
+ while (searchPos < textOffset) {
148
+ const foundPos = lowerText.indexOf(lowerSelected, searchPos);
149
+ if (foundPos === -1 || foundPos >= textOffset)
150
+ break;
151
+ occurrenceIndex++;
152
+ searchPos = foundPos + 1;
153
+ }
154
+ selectionInfoRef.current = {
155
+ text: selectedText,
156
+ occurrenceIndex: occurrenceIndex,
157
+ };
158
+ console.log('[getSelectedText] Found selection:', {
159
+ text: selectedText,
160
+ occurrenceIndex,
161
+ textOffset,
162
+ });
163
+ return selectedText;
164
+ }
165
+ }
166
+ }
167
+ catch (error) {
168
+ console.error('[getSelectedText] Error:', error);
169
+ }
170
+ console.log('[getSelectedText] No selection found');
171
+ return '';
172
+ }, []);
173
+ // Helper: Replace the specific occurrence of text in editor content
174
+ const replaceTextInContent = useCallback((originalText, newHtml, occurrenceIndex) => {
175
+ const editor = joditEditorRef.current;
176
+ if (!editor || !originalText)
177
+ return false;
178
+ try {
179
+ const currentHtml = editor.value;
180
+ // Escape special regex characters in the original text
181
+ const escapedText = originalText.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
182
+ // Find all occurrences and replace the specific one
183
+ const regex = new RegExp(escapedText, 'gi');
184
+ let matchIndex = 0;
185
+ const newContent = currentHtml.replace(regex, (match) => {
186
+ if (matchIndex === occurrenceIndex) {
187
+ matchIndex++;
188
+ // console.log(
189
+ // `[replaceTextInContent] Replacing occurrence ${occurrenceIndex}`,
190
+ // );
191
+ return newHtml;
192
+ }
193
+ matchIndex++;
194
+ return match; // Keep other occurrences unchanged
195
+ });
196
+ if (matchIndex > occurrenceIndex) {
197
+ editor.value = newContent;
198
+ // console.log('[replaceTextInContent] Replaced successfully');
199
+ return true;
200
+ }
201
+ // console.log('[replaceTextInContent] Occurrence not found');
202
+ return false;
203
+ }
204
+ catch (error) {
205
+ // console.error('[replaceTextInContent] Error:', error);
206
+ return false;
207
+ }
208
+ }, []);
209
+ // Handle AI button click
210
+ const handleAIButtonClick = useCallback(async () => {
211
+ if (isAIProcessing || !content.current.trim())
212
+ return;
213
+ // Get selected text BEFORE async call (also saves occurrence index)
214
+ const selectedText = getSelectedText();
215
+ const hasSelection = selectedText.length > 0;
216
+ const selectionInfo = selectionInfoRef.current;
217
+ const textToProcess = hasSelection ? selectedText : content.current.trim();
218
+ setIsAIProcessing(true);
219
+ try {
220
+ const response = await aiService(textToProcess, gemini_key);
221
+ if (response?.text && joditEditorRef.current) {
222
+ const aiResponse = `<span style="color: #6366f1; font-style: italic;">${response.text}</span>`;
223
+ if (hasSelection && selectionInfo) {
224
+ // Replace the specific occurrence using saved index
225
+ replaceTextInContent(selectedText, aiResponse, selectionInfo.occurrenceIndex);
226
+ content.current = joditEditorRef.current.value;
227
+ }
228
+ else {
229
+ // Replace all content with AI response (original behavior)
230
+ joditEditorRef.current.value = aiResponse;
231
+ content.current = aiResponse;
232
+ }
233
+ joditEditorRef.current.focus();
234
+ }
235
+ }
236
+ catch (error) {
237
+ console.error('AI Error:', error);
238
+ }
239
+ finally {
240
+ setIsAIProcessing(false);
241
+ }
242
+ // eslint-disable-next-line react-hooks/exhaustive-deps
243
+ }, [isAIProcessing, aiService]);
244
+ // Speech recognition
245
+ const { transcript, listening, resetTranscript, browserSupportsSpeechRecognition, } = useSpeechRecognition();
246
+ const [lastTranscript, setLastTranscript] = useState('');
247
+ // Local state to track if THIS modal started recording (not global listening state)
248
+ const [isRecording, setIsRecording] = useState(false);
249
+ // Ref to track recording state for event handlers (avoids stale closure issues)
250
+ const isRecordingRef = useRef(false);
251
+ // Track if manual mode is active (continuous listening without auto-stop)
252
+ const isManualModeRef = useRef(false);
253
+ // State for UI rendering of manual mode indicator
254
+ const [isManualMode, setIsManualMode] = useState(false);
255
+ // Auto-stop silence timer ref
256
+ const silenceTimerRef = useRef(null);
257
+ // Track last transcript change time for auto-stop
258
+ const lastTranscriptTimeRef = useRef(Date.now());
259
+ // Watchdog interval ref for manual mode (checks every 1 sec if recording is still active)
260
+ const watchdogIntervalRef = useRef(null);
261
+ // Ref to track listening state (avoids stale closure in interval)
262
+ const listeningRef = useRef(listening);
263
+ // Cleanup: Stop listening when component unmounts
264
+ useEffect(() => {
265
+ setTimeout(() => {
266
+ SpeechRecognition.stopListening();
267
+ resetTranscript();
268
+ isRecordingRef.current = false;
269
+ setIsRecording(false);
270
+ isManualModeRef.current = false;
271
+ setIsManualMode(false);
272
+ if (silenceTimerRef.current) {
273
+ clearTimeout(silenceTimerRef.current);
274
+ silenceTimerRef.current = null;
275
+ }
276
+ if (watchdogIntervalRef.current) {
277
+ clearInterval(watchdogIntervalRef.current);
278
+ watchdogIntervalRef.current = null;
279
+ }
280
+ }, 1000);
281
+ // eslint-disable-next-line react-hooks/exhaustive-deps
282
+ }, []);
283
+ // Keep listeningRef in sync with listening state
284
+ useEffect(() => {
285
+ listeningRef.current = listening;
286
+ }, [listening]);
287
+ // Restart counter for debugging
288
+ const restartCountRef = useRef(0);
289
+ // Last forced restart time
290
+ const lastForceRestartRef = useRef(Date.now());
291
+ // Soft restart speech recognition - only called when browser stopped listening
292
+ // Does NOT abort - just starts listening again to avoid losing words
293
+ const softRestartRecognition = useCallback(() => {
294
+ if (!isManualModeRef.current || !isRecordingRef.current)
295
+ return;
296
+ restartCountRef.current += 1;
297
+ console.log(`[Watchdog ${new Date().toLocaleTimeString()}] Soft restart #${restartCountRef.current} (browser had stopped)`);
298
+ // Don't abort - just start listening directly
299
+ // This avoids losing any words that might be in the buffer
300
+ SpeechRecognition.startListening({
301
+ continuous: true,
302
+ interimResults: true,
303
+ language: 'en-US',
304
+ });
305
+ lastForceRestartRef.current = Date.now();
306
+ }, []);
307
+ // Start watchdog function - Only restart when browser stops listening
308
+ const startWatchdog = useCallback(() => {
309
+ // Clear any existing watchdog first
310
+ if (watchdogIntervalRef.current) {
311
+ clearInterval(watchdogIntervalRef.current);
312
+ watchdogIntervalRef.current = null;
313
+ }
314
+ restartCountRef.current = 0;
315
+ lastForceRestartRef.current = Date.now();
316
+ // Create persistent watchdog that runs every 1 second
317
+ watchdogIntervalRef.current = setInterval(() => {
318
+ // Only restart if: manual mode active, recording on, AND browser stopped listening
319
+ // Don't force restart while listening - this would lose words
320
+ if (isManualModeRef.current &&
321
+ isRecordingRef.current &&
322
+ !listeningRef.current) {
323
+ console.log(`[Watchdog ${new Date().toLocaleTimeString()}] Browser stopped - restarting`);
324
+ softRestartRecognition();
325
+ }
326
+ }, 1000);
327
+ console.log('[Watchdog] Started - will restart only when browser stops');
328
+ }, [softRestartRecognition]);
329
+ // Stop watchdog function
330
+ const stopWatchdog = useCallback(() => {
331
+ if (watchdogIntervalRef.current) {
332
+ clearInterval(watchdogIntervalRef.current);
333
+ watchdogIntervalRef.current = null;
334
+ console.log('[Watchdog] Stopped');
335
+ }
336
+ }, []);
337
+ // Listen for speech recognition end/error events when recording starts
338
+ useEffect(() => {
339
+ if (!isRecording)
340
+ return;
341
+ const recognition = SpeechRecognition.getRecognition();
342
+ if (!recognition)
343
+ return;
344
+ const handleEnd = () => {
345
+ // Use ref to get current recording state (avoids stale closure)
346
+ // If recording was stopped (by silence timer or user), don't restart
347
+ if (!isRecordingRef.current) {
348
+ console.log('[handleEnd] Recording stopped - not restarting');
349
+ return;
350
+ }
351
+ // In manual mode, always restart listening
352
+ if (isManualModeRef.current) {
353
+ console.log('[handleEnd] Manual mode - restarting');
354
+ setTimeout(() => {
355
+ if (isManualModeRef.current && isRecordingRef.current) {
356
+ SpeechRecognition.startListening({
357
+ continuous: true,
358
+ interimResults: true,
359
+ language: 'en-US',
360
+ });
361
+ }
362
+ }, 100);
363
+ }
364
+ else if (isRecordingRef.current) {
365
+ // In auto-stop mode, only restart if still recording (browser stopped unexpectedly)
366
+ console.log('[handleEnd] Auto-stop mode - browser stopped, restarting');
367
+ setTimeout(() => {
368
+ if (!isManualModeRef.current && isRecordingRef.current) {
369
+ SpeechRecognition.startListening({
370
+ continuous: true,
371
+ interimResults: true,
372
+ language: 'en-US',
373
+ });
374
+ }
375
+ }, 100);
376
+ }
377
+ };
378
+ const handleError = () => {
379
+ // Use ref to get current recording state (avoids stale closure)
380
+ if (!isRecordingRef.current)
381
+ return;
382
+ // In manual mode, try to restart on error
383
+ if (isManualModeRef.current) {
384
+ setTimeout(() => {
385
+ if (isManualModeRef.current && isRecordingRef.current) {
386
+ SpeechRecognition.startListening({
387
+ continuous: true,
388
+ interimResults: true,
389
+ language: 'en-US',
390
+ });
391
+ }
392
+ }, 100);
393
+ }
394
+ else {
395
+ // In auto-stop mode, restart on error as well
396
+ setTimeout(() => {
397
+ if (!isManualModeRef.current && isRecordingRef.current) {
398
+ SpeechRecognition.startListening({
399
+ continuous: true,
400
+ interimResults: true,
401
+ language: 'en-US',
402
+ });
403
+ }
404
+ }, 100);
405
+ }
406
+ };
407
+ recognition.addEventListener('end', handleEnd);
408
+ recognition.addEventListener('error', handleError);
409
+ return () => {
410
+ recognition.removeEventListener('end', handleEnd);
411
+ recognition.removeEventListener('error', handleError);
412
+ };
413
+ }, [isRecording]);
414
+ // Handle transcript changes - exactly like reference implementation
415
+ useEffect(() => {
416
+ if (!joditEditorRef.current)
417
+ return;
418
+ // Track changes like reference: https://github.com/RahulSM2002/SpeechRecognition/blob/main/src/components/SpeechToTextField.tsx
419
+ if (transcript !== lastTranscript) {
420
+ try {
421
+ const editor = joditEditorRef.current;
422
+ editor.focus();
423
+ // Get only the new text that was added (incremental update)
424
+ const newText = transcript.slice(lastTranscript.length);
425
+ if (newText) {
426
+ // Remove newlines to ensure single line
427
+ const cleanText = newText.replace(/\n/g, ' ').replace(/\r/g, '');
428
+ if (cleanText) {
429
+ // Move cursor to end of editor content
430
+ editor.selection.setCursorIn(editor.editor, false);
431
+ // Check if there's existing text and add space before first speech-to-text input
432
+ const currentValue = editor.value || '';
433
+ const hasExistingText = currentValue.trim().length > 0;
434
+ const isFirstInsert = lastTranscript.length === 0;
435
+ // Add space if there's existing text and this is the first insert after starting recording
436
+ // This ensures proper spacing when speech-to-text starts with existing content
437
+ const textToInsert = hasExistingText && isFirstInsert && !currentValue.endsWith(' ')
438
+ ? ' ' + cleanText
439
+ : cleanText;
440
+ // Use JoditEditor's insertHTML to insert plain text inline
441
+ // Escape HTML to ensure it's treated as plain text
442
+ const escapedText = textToInsert
443
+ .replace(/&/g, '&amp;')
444
+ .replace(/</g, '&lt;')
445
+ .replace(/>/g, '&gt;');
446
+ editor.selection.insertHTML(escapedText);
447
+ // Move cursor to end after insertion
448
+ editor.selection.setCursorIn(editor.editor, false);
449
+ // Update content ref
450
+ content.current = editor.value;
451
+ // Reset silence timer in auto-stop mode when new words are detected
452
+ if (!isManualModeRef.current && isRecordingRef.current) {
453
+ lastTranscriptTimeRef.current = Date.now();
454
+ // Restart the silence timer
455
+ if (silenceTimerRef.current) {
456
+ clearTimeout(silenceTimerRef.current);
457
+ }
458
+ silenceTimerRef.current = setTimeout(() => {
459
+ if (!isManualModeRef.current && isRecordingRef.current) {
460
+ console.log('[Auto-Stop] 5 seconds of silence - stopping');
461
+ isRecordingRef.current = false; // Set ref BEFORE stopping
462
+ setIsRecording(false);
463
+ SpeechRecognition.stopListening();
464
+ }
465
+ }, 5000); // 5 seconds of silence before auto-stop
466
+ }
467
+ }
468
+ }
469
+ setLastTranscript(transcript);
470
+ }
471
+ catch (error) {
472
+ console.error('Error updating transcript:', error);
473
+ }
474
+ }
475
+ }, [transcript, lastTranscript, isRecording]);
476
+ // Auto-stop silence detection constant (5 seconds of silence - increased for better experience)
477
+ const AUTO_STOP_SILENCE_DURATION = 5000;
478
+ // Function to start silence detection timer for auto-stop mode
479
+ const startSilenceTimer = useCallback(() => {
480
+ // Clear any existing timer
481
+ if (silenceTimerRef.current) {
482
+ clearTimeout(silenceTimerRef.current);
483
+ }
484
+ // Start new timer - will stop recording after silence duration
485
+ silenceTimerRef.current = setTimeout(() => {
486
+ if (!isManualModeRef.current && isRecordingRef.current) {
487
+ console.log('[Auto-Stop] Silence timer expired - stopping');
488
+ isRecordingRef.current = false; // Set ref BEFORE stopping to prevent handleEnd restart
489
+ setIsRecording(false);
490
+ SpeechRecognition.stopListening();
491
+ }
492
+ }, AUTO_STOP_SILENCE_DURATION);
493
+ }, []);
494
+ // Handle auto-stop recording (stops automatically after 3 seconds of silence)
495
+ const handleAutoStopRecordClick = useCallback(() => {
496
+ if (!browserSupportsSpeechRecognition) {
497
+ return;
498
+ }
499
+ if (isRecording) {
500
+ isManualModeRef.current = false;
501
+ setIsManualMode(false);
502
+ if (silenceTimerRef.current) {
503
+ clearTimeout(silenceTimerRef.current);
504
+ silenceTimerRef.current = null;
505
+ }
506
+ SpeechRecognition.stopListening();
507
+ isRecordingRef.current = false;
508
+ setIsRecording(false);
509
+ }
510
+ else {
511
+ isManualModeRef.current = false; // Auto-stop mode
512
+ setIsManualMode(false);
513
+ lastTranscriptTimeRef.current = Date.now();
514
+ resetTranscript();
515
+ setLastTranscript('');
516
+ SpeechRecognition.startListening({
517
+ continuous: true,
518
+ interimResults: true,
519
+ language: 'en-US',
520
+ });
521
+ isRecordingRef.current = true;
522
+ setIsRecording(true);
523
+ // Start silence detection timer
524
+ startSilenceTimer();
525
+ }
526
+ }, [
527
+ isRecording,
528
+ resetTranscript,
529
+ browserSupportsSpeechRecognition,
530
+ startSilenceTimer,
531
+ ]);
532
+ // Handle manual-stop recording (requires manual stop - continuous listening)
533
+ const handleManualStopRecordClick = useCallback(() => {
534
+ if (!browserSupportsSpeechRecognition) {
535
+ return;
536
+ }
537
+ if (isRecording) {
538
+ // STOP recording
539
+ isManualModeRef.current = false;
540
+ setIsManualMode(false);
541
+ // Stop watchdog
542
+ stopWatchdog();
543
+ // Clear any silence timer
544
+ if (silenceTimerRef.current) {
545
+ clearTimeout(silenceTimerRef.current);
546
+ silenceTimerRef.current = null;
547
+ }
548
+ SpeechRecognition.stopListening();
549
+ isRecordingRef.current = false;
550
+ setIsRecording(false);
551
+ console.log('[Manual Mode] Stopped by user');
552
+ }
553
+ else {
554
+ // START recording
555
+ isManualModeRef.current = true;
556
+ setIsManualMode(true);
557
+ isRecordingRef.current = true;
558
+ setIsRecording(true);
559
+ // Clear any silence timer (not used in manual mode)
560
+ if (silenceTimerRef.current) {
561
+ clearTimeout(silenceTimerRef.current);
562
+ silenceTimerRef.current = null;
563
+ }
564
+ resetTranscript();
565
+ setLastTranscript('');
566
+ // Start listening
567
+ SpeechRecognition.startListening({
568
+ continuous: true,
569
+ interimResults: true,
570
+ language: 'en-US',
571
+ });
572
+ // Start watchdog to keep it running forever
573
+ startWatchdog();
574
+ console.log('[Manual Mode] Started - will run until manually stopped');
575
+ }
576
+ }, [
577
+ isRecording,
578
+ resetTranscript,
579
+ browserSupportsSpeechRecognition,
580
+ startWatchdog,
581
+ stopWatchdog,
582
+ ]);
583
+ useEffect(() => {
584
+ if (parentDivRef.current) {
585
+ const { width, height } = parentDivRef.current.getBoundingClientRect();
586
+ setParentDivWidth(width - 16);
587
+ setParentDivHeight(height - 120);
588
+ }
589
+ }, [showModal]);
590
+ useEffect(() => {
591
+ content.current = details || '';
592
+ setShowModal(visible);
593
+ // Stop any ongoing speech recognition and reset state when modal opens/closes
594
+ // Always reset local recording state
595
+ isManualModeRef.current = false;
596
+ setIsManualMode(false);
597
+ if (silenceTimerRef.current) {
598
+ clearTimeout(silenceTimerRef.current);
599
+ silenceTimerRef.current = null;
600
+ }
601
+ SpeechRecognition.stopListening();
602
+ resetTranscript();
603
+ setLastTranscript('');
604
+ isRecordingRef.current = false;
605
+ setIsRecording(false);
606
+ // eslint-disable-next-line react-hooks/exhaustive-deps
607
+ }, [visible, details]);
608
+ const config = createJoditConfig({
609
+ height: parentDivHeight,
610
+ width: window.innerWidth / 1.24,
611
+ disabled: false,
612
+ buttons: JODIT_TOOLBAR_BUTTONS,
613
+ events: {
614
+ afterInit: (instance) => {
615
+ joditEditorRef.current = instance;
616
+ },
617
+ afterOpenPasteDialog: JODIT_PASTE_DIALOG_HANDLER.afterOpenPasteDialog,
618
+ },
619
+ uploader: {
620
+ url: uploaderUrl,
621
+ prepareData: function (data) {
622
+ data.append('folder', folder);
623
+ data.delete('path');
624
+ data.delete('source');
625
+ },
626
+ isSuccess: function (resp) {
627
+ console.log({ resp });
628
+ if (joditEditorRef.current) {
629
+ joditEditorRef.current.selection.insertHTML(`<img src=${resp?.url} alt="logo" style="width:100%;height:auto"/>`);
630
+ }
631
+ },
632
+ },
633
+ }, false // Disable speech recognition and remove speech button
634
+ );
635
+ return (_jsxs(_Fragment, { children: [_jsx("style", { children: keyframes }), showModal && (_jsx(_Fragment, { children: _jsx("div", { className: cn(s['fixed'], s['inset-0'], s['z-50'], s['flex'], s['justify-center'], s['items-center'], s['overflow-hidden'], classNames.overlay), style: {
636
+ ...modalStyles.overlay,
637
+ animation: 'overlayFadeIn 0.3s ease-out forwards',
638
+ ...styles.overlay,
639
+ }, children: _jsx("div", { ref: parentDivRef, className: cn(s['relative'], s['w-auto'], s['mx-auto'], classNames.container), style: {
640
+ width: `${config.width + 40}px`,
641
+ animation: 'modalSlideIn 0.4s cubic-bezier(0.16, 1, 0.3, 1) forwards',
642
+ ...styles.container,
643
+ }, children: _jsxs("div", { className: cn(s['relative'], s['flex'], s['flex-col'], s['w-full'], s['overflow-hidden']), style: {
644
+ ...modalStyles.container,
645
+ borderRadius: '24px',
646
+ height: window.outerHeight / 1.3,
647
+ }, children: [_jsxs("div", { className: cn(s['flex'], s['items-center'], s['justify-between'], s['p-4'], classNames.header), style: {
648
+ ...modalStyles.header,
649
+ borderRadius: '24px 24px 0 0',
650
+ padding: '16px 24px',
651
+ ...styles.header,
652
+ }, children: [_jsxs("div", { className: cn(s['flex'], s['items-center']), style: { gap: '12px' }, children: [_jsx("div", { className: cn(s['flex'], s['items-center'], s['justify-center']), style: {
653
+ width: '40px',
654
+ height: '40px',
655
+ borderRadius: '12px',
656
+ background: 'linear-gradient(135deg, #6366f1 0%, #8b5cf6 100%)',
657
+ boxShadow: '0 4px 12px rgba(99, 102, 241, 0.3)',
658
+ }, children: _jsx(Icon, { nameIcon: "IoDocumentTextOutline", propsIcon: { color: '#ffffff', size: 22 } }) }), _jsx("h3", { className: cn(s['text-xl'], s['font-semibold']), style: {
659
+ color: '#f8fafc',
660
+ fontWeight: 700,
661
+ letterSpacing: '-0.025em',
662
+ }, children: title })] }), _jsxs("div", { className: cn(s['flex'], s['items-center']), style: { gap: '12px' }, children: [isRecording && (_jsxs("div", { className: cn(s['flex'], s['items-center']), style: {
663
+ gap: '8px',
664
+ padding: '8px 16px',
665
+ borderRadius: '9999px',
666
+ background: isManualMode
667
+ ? 'linear-gradient(135deg, rgba(220, 38, 38, 0.2) 0%, rgba(185, 28, 28, 0.2) 100%)'
668
+ : 'linear-gradient(135deg, rgba(34, 197, 94, 0.2) 0%, rgba(22, 163, 74, 0.2) 100%)',
669
+ border: `1px solid ${isManualMode ? 'rgba(220, 38, 38, 0.4)' : 'rgba(34, 197, 94, 0.4)'}`,
670
+ }, children: [_jsx("span", { style: {
671
+ width: '10px',
672
+ height: '10px',
673
+ borderRadius: '50%',
674
+ background: isManualMode ? '#dc2626' : '#22c55e',
675
+ animation: 'pulseGlow 1.5s ease-in-out infinite',
676
+ } }), _jsx("span", { className: cn(s['text-sm']), style: {
677
+ fontWeight: 500,
678
+ color: isManualMode ? '#fca5a5' : '#86efac',
679
+ }, children: isManualMode ? 'Continuous Mode' : 'Auto-Stop Mode' })] })), shouldShowAIButton && (_jsx("button", { className: cn(s['relative'], s['flex'], s['items-center'], s['justify-center'], s['border-0'], s['outline-none'], s['focus:outline-none']), style: {
680
+ width: '44px',
681
+ height: '44px',
682
+ borderRadius: '12px',
683
+ cursor: 'pointer',
684
+ background: 'linear-gradient(135deg, #6366f1 0%, #a855f7 100%)',
685
+ boxShadow: isAIProcessing
686
+ ? '0 0 20px rgba(99, 102, 241, 0.6), 0 0 40px rgba(168, 85, 247, 0.4)'
687
+ : '0 4px 15px rgba(99, 102, 241, 0.4)',
688
+ animation: isAIProcessing
689
+ ? 'ai-pulse 1.5s ease-in-out infinite'
690
+ : 'none',
691
+ ...modalStyles.actionButton,
692
+ }, onClick: handleAIButtonClick, disabled: isAIProcessing, title: isAIProcessing
693
+ ? 'AI is processing...'
694
+ : 'Ask AI to enhance content', children: _jsx("span", { className: cn(s['font-bold']), style: {
695
+ fontSize: '14px',
696
+ color: '#ffffff',
697
+ animation: isAIProcessing
698
+ ? 'ai-text-pulse 1.5s ease-in-out infinite'
699
+ : 'none',
700
+ }, children: "AI" }) })), showVoiceButtons && browserSupportsSpeechRecognition && (_jsxs(_Fragment, { children: [_jsx("button", { className: cn(s['flex'], s['items-center'], s['justify-center'], s['border-0'], s['outline-none'], s['focus:outline-none']), style: {
701
+ width: '44px',
702
+ height: '44px',
703
+ borderRadius: '12px',
704
+ cursor: 'pointer',
705
+ background: isRecording && !isManualMode
706
+ ? 'linear-gradient(135deg, #22c55e 0%, #16a34a 100%)'
707
+ : 'linear-gradient(135deg, #4ade80 0%, #22c55e 100%)',
708
+ boxShadow: isRecording && !isManualMode
709
+ ? '0 0 20px rgba(34, 197, 94, 0.5), 0 0 40px rgba(34, 197, 94, 0.2)'
710
+ : '0 4px 12px rgba(34, 197, 94, 0.3)',
711
+ animation: isRecording && !isManualMode
712
+ ? 'pulseGlow 1.5s ease-in-out infinite'
713
+ : 'none',
714
+ ...modalStyles.actionButton,
715
+ }, onClick: handleAutoStopRecordClick, title: "Auto Stop - Stops automatically after 5 seconds of silence", children: _jsx(Icon, { nameIcon: isRecording && !isManualMode
716
+ ? 'IoMic'
717
+ : 'IoMicOutline', propsIcon: { color: '#ffffff', size: 22 } }) }), _jsx("button", { className: cn(s['flex'], s['items-center'], s['justify-center'], s['border-0'], s['outline-none'], s['focus:outline-none']), style: {
718
+ width: '44px',
719
+ height: '44px',
720
+ borderRadius: '12px',
721
+ cursor: 'pointer',
722
+ background: isRecording && isManualMode
723
+ ? 'linear-gradient(135deg, #dc2626 0%, #b91c1c 100%)'
724
+ : 'linear-gradient(135deg, #8b5cf6 0%, #7c3aed 100%)',
725
+ boxShadow: isRecording && isManualMode
726
+ ? '0 0 20px rgba(220, 38, 38, 0.5), 0 0 40px rgba(220, 38, 38, 0.2)'
727
+ : '0 4px 12px rgba(139, 92, 246, 0.3)',
728
+ animation: isRecording && isManualMode
729
+ ? 'recordingPulse 1.5s ease-in-out infinite'
730
+ : 'none',
731
+ ...modalStyles.actionButton,
732
+ }, onClick: handleManualStopRecordClick, title: "Continuous Mode - Records until manually stopped", children: _jsx(Icon, { nameIcon: isRecording && isManualMode
733
+ ? 'IoMic'
734
+ : 'IoMicOutline', propsIcon: { color: '#ffffff', size: 22 } }) })] })), _jsx("div", { style: {
735
+ width: '1px',
736
+ height: '32px',
737
+ margin: '0 4px',
738
+ background: 'rgba(148, 163, 184, 0.3)',
739
+ } }), _jsx("button", { className: cn(s['flex'], s['items-center'], s['justify-center'], s['outline-none'], s['focus:outline-none']), style: {
740
+ width: '44px',
741
+ height: '44px',
742
+ borderRadius: '12px',
743
+ cursor: 'pointer',
744
+ background: 'rgba(239, 68, 68, 0.1)',
745
+ border: '1px solid rgba(239, 68, 68, 0.3)',
746
+ ...modalStyles.actionButton,
747
+ }, onClick: () => {
748
+ isManualModeRef.current = false;
749
+ setIsManualMode(false);
750
+ stopWatchdog();
751
+ if (silenceTimerRef.current) {
752
+ clearTimeout(silenceTimerRef.current);
753
+ silenceTimerRef.current = null;
754
+ }
755
+ SpeechRecognition.stopListening();
756
+ resetTranscript();
757
+ setLastTranscript('');
758
+ isRecordingRef.current = false;
759
+ setIsRecording(false);
760
+ setShowModal(false);
761
+ onClose && onClose();
762
+ }, onMouseEnter: (e) => {
763
+ e.currentTarget.style.background =
764
+ 'rgba(239, 68, 68, 0.2)';
765
+ }, onMouseLeave: (e) => {
766
+ e.currentTarget.style.background =
767
+ 'rgba(239, 68, 68, 0.1)';
768
+ }, title: "Close", children: _jsx(Icon, { nameIcon: "IoCloseOutline", propsIcon: { color: '#f87171', size: 22 } }) })] })] }), _jsx("div", { id: "editorContent", className: cn(s['relative'], s['flex-1'], classNames.body), style: {
769
+ width: '100%',
770
+ overflow: 'hidden',
771
+ padding: '16px',
772
+ background: '#ffffff',
773
+ ...styles.body,
774
+ }, children: _jsx(JoditEditor, { ref: (editor) => {
775
+ joditEditorRef.current = editor;
776
+ }, value: content.current || '', config: {
777
+ ...config,
778
+ width: '100%',
779
+ height: window.outerHeight / 1.5 - 130,
780
+ }, onBlur: (newContent) => {
781
+ content.current = newContent;
782
+ } }) }), _jsxs("div", { className: cn(s['flex'], s['items-center'], s['justify-between'], s['p-4'], classNames.footer), style: {
783
+ ...modalStyles.footer,
784
+ borderRadius: '0 0 24px 24px',
785
+ padding: '16px 24px',
786
+ ...styles.footer,
787
+ }, children: [_jsx("div", { className: cn(s['flex'], s['items-center']), style: { gap: '8px' }, children: _jsxs("span", { style: { fontSize: '12px', color: '#94a3b8' }, children: ["Press", ' ', _jsx("kbd", { style: {
788
+ padding: '2px 6px',
789
+ borderRadius: '4px',
790
+ fontSize: '12px',
791
+ fontFamily: 'monospace',
792
+ background: 'rgba(148, 163, 184, 0.2)',
793
+ border: '1px solid rgba(148, 163, 184, 0.3)',
794
+ color: '#64748b',
795
+ }, children: "Esc" }), ' ', "to close"] }) }), _jsxs("div", { className: cn(s['flex'], s['items-center']), style: { gap: '12px' }, children: [_jsx("button", { className: cn(s['font-bold'], s['uppercase'], s['outline-none'], s['focus:outline-none']), style: {
796
+ padding: '10px 20px',
797
+ borderRadius: '12px',
798
+ fontSize: '14px',
799
+ fontWeight: 600,
800
+ background: 'transparent',
801
+ border: '1px solid rgba(148, 163, 184, 0.4)',
802
+ color: '#64748b',
803
+ cursor: 'pointer',
804
+ transition: 'all 0.2s ease',
805
+ }, type: "button", onClick: () => {
806
+ isManualModeRef.current = false;
807
+ setIsManualMode(false);
808
+ if (silenceTimerRef.current) {
809
+ clearTimeout(silenceTimerRef.current);
810
+ silenceTimerRef.current = null;
811
+ }
812
+ SpeechRecognition.stopListening();
813
+ resetTranscript();
814
+ setLastTranscript('');
815
+ isRecordingRef.current = false;
816
+ setIsRecording(false);
817
+ setShowModal(false);
818
+ onClose && onClose();
819
+ }, onMouseEnter: (e) => {
820
+ e.currentTarget.style.background =
821
+ 'rgba(148, 163, 184, 0.1)';
822
+ }, onMouseLeave: (e) => {
823
+ e.currentTarget.style.background = 'transparent';
824
+ }, children: "Cancel" }), _jsx("button", { className: cn(s['text-white'], s['font-bold'], s['uppercase'], s['shadow'], s['outline-none'], s['focus:outline-none']), style: {
825
+ padding: '10px 24px',
826
+ borderRadius: '12px',
827
+ fontSize: '14px',
828
+ fontWeight: 600,
829
+ background: 'linear-gradient(135deg, #22c55e 0%, #16a34a 100%)',
830
+ border: 'none',
831
+ boxShadow: '0 4px 12px rgba(34, 197, 94, 0.3)',
832
+ cursor: 'pointer',
833
+ transition: 'all 0.2s ease',
834
+ }, type: "button", onClick: () => {
835
+ setShowModal(false);
836
+ onSave(content.current);
837
+ }, onMouseEnter: (e) => {
838
+ e.currentTarget.style.boxShadow =
839
+ '0 6px 20px rgba(34, 197, 94, 0.4)';
840
+ }, onMouseLeave: (e) => {
841
+ e.currentTarget.style.boxShadow =
842
+ '0 4px 12px rgba(34, 197, 94, 0.3)';
843
+ }, children: "Save Changes" })] })] })] }) }) }) }))] }));
844
+ };