@contentgrowth/llm-service 0.8.4 → 0.8.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -148,7 +148,7 @@ function ChatHeader({
148
148
  }
149
149
 
150
150
  // src/ui/react/components/ChatInputArea.tsx
151
- import { useState as useState3, useRef as useRef3, useImperativeHandle, forwardRef } from "react";
151
+ import { useState as useState3, useRef as useRef3, useImperativeHandle, forwardRef, useEffect as useEffect3, useCallback as useCallback3 } from "react";
152
152
  import { StopIcon, PaperAirplaneIcon } from "@heroicons/react/24/outline";
153
153
 
154
154
  // src/ui/react/hooks/useSpeechRecognition.ts
@@ -159,6 +159,8 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
159
159
  const [error, setError] = useState(null);
160
160
  const [isSupported, setIsSupported] = useState(false);
161
161
  const recognitionRef = useRef(null);
162
+ const isSimulatingRef = useRef(false);
163
+ const simulationTimeoutRef = useRef(null);
162
164
  useEffect(() => {
163
165
  if (typeof window !== "undefined") {
164
166
  const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
@@ -169,10 +171,16 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
169
171
  recognition.interimResults = true;
170
172
  recognition.lang = language;
171
173
  recognition.onstart = () => {
174
+ console.log("[useSpeechRecognition] Native onstart event fired");
172
175
  setIsListening(true);
173
176
  setError(null);
174
177
  };
175
178
  recognition.onend = () => {
179
+ console.log("[useSpeechRecognition] Native onend event fired");
180
+ if (isSimulatingRef.current) {
181
+ console.log("[useSpeechRecognition] Ignoring onend due to simulation");
182
+ return;
183
+ }
176
184
  setIsListening(false);
177
185
  if (onEnd) onEnd();
178
186
  };
@@ -192,17 +200,21 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
192
200
  setTranscript((prev) => prev + finalTranscript);
193
201
  };
194
202
  recognition.onerror = (event) => {
203
+ console.error("[useSpeechRecognition] Native onerror event:", event.error);
195
204
  if (event.error === "not-allowed" && process.env.NODE_ENV === "development") {
196
205
  console.warn("Speech recognition blocked. Simulating input for development...");
206
+ isSimulatingRef.current = true;
197
207
  setError(null);
198
208
  setIsListening(true);
199
- setTimeout(() => {
209
+ simulationTimeoutRef.current = setTimeout(() => {
200
210
  const mockText = "This is a simulated voice input for testing.";
201
211
  setTranscript((prev) => prev + (prev ? " " : "") + mockText);
202
212
  if (onResult) onResult(mockText, true);
213
+ isSimulatingRef.current = false;
203
214
  setIsListening(false);
204
215
  if (onEnd) onEnd();
205
- }, 1e3);
216
+ simulationTimeoutRef.current = null;
217
+ }, 3e3);
206
218
  return;
207
219
  }
208
220
  console.error("Speech recognition error", event.error);
@@ -213,26 +225,50 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
213
225
  }
214
226
  }
215
227
  return () => {
216
- if (recognitionRef.current) {
217
- recognitionRef.current.stop();
228
+ if (isSimulatingRef.current && simulationTimeoutRef.current) {
229
+ clearTimeout(simulationTimeoutRef.current);
230
+ simulationTimeoutRef.current = null;
218
231
  }
232
+ recognitionRef.current.stop();
219
233
  };
220
- }, [onResult, onEnd]);
234
+ }, [onResult, onEnd, language]);
221
235
  const start = useCallback(() => {
236
+ console.log("[useSpeechRecognition] start() called");
222
237
  if (recognitionRef.current && !isListening) {
223
238
  try {
224
239
  setTranscript("");
225
240
  recognitionRef.current.start();
241
+ console.log("[useSpeechRecognition] recognitionRef.current.start() executed");
226
242
  } catch (e) {
227
- console.error("Failed to start speech recognition:", e);
243
+ console.error("[useSpeechRecognition] Failed to start speech recognition:", e);
228
244
  }
245
+ } else {
246
+ console.log("[useSpeechRecognition] start() ignored: already listening or no recognition instance", { hasInstance: !!recognitionRef.current, isListening });
229
247
  }
230
248
  }, [isListening]);
231
249
  const stop = useCallback(() => {
250
+ console.log("[useSpeechRecognition] stop() called");
251
+ if (isSimulatingRef.current) {
252
+ console.log("[useSpeechRecognition] Stopping simulation");
253
+ if (simulationTimeoutRef.current) {
254
+ clearTimeout(simulationTimeoutRef.current);
255
+ simulationTimeoutRef.current = null;
256
+ }
257
+ const mockText = "This is a simulated voice input for testing.";
258
+ setTranscript((prev) => prev + (prev ? " " : "") + mockText);
259
+ if (onResult) onResult(mockText, true);
260
+ isSimulatingRef.current = false;
261
+ setIsListening(false);
262
+ if (onEnd) onEnd();
263
+ return;
264
+ }
232
265
  if (recognitionRef.current && isListening) {
266
+ console.log("[useSpeechRecognition] recognitionRef.current.stop() executed");
233
267
  recognitionRef.current.stop();
268
+ } else {
269
+ console.log("[useSpeechRecognition] stop() ignored: not listening", { isListening });
234
270
  }
235
- }, [isListening]);
271
+ }, [isListening, onResult, onEnd]);
236
272
  const resetTranscript = useCallback(() => {
237
273
  setTranscript("");
238
274
  }, []);
@@ -256,23 +292,32 @@ var useAudioRecorder = (onStop) => {
256
292
  const mediaRecorderRef = useRef2(null);
257
293
  const chunksRef = useRef2([]);
258
294
  const start = useCallback2(async () => {
295
+ console.log("[useAudioRecorder] start() called");
259
296
  try {
260
297
  const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
298
+ console.log("[useAudioRecorder] Stream acquired", stream.id);
261
299
  const mediaRecorder = new MediaRecorder(stream);
262
300
  mediaRecorderRef.current = mediaRecorder;
263
301
  chunksRef.current = [];
264
302
  mediaRecorder.ondataavailable = (e) => {
303
+ console.log(`[useAudioRecorder] Data available, size: ${e.data.size}`);
265
304
  if (e.data.size > 0) {
266
305
  chunksRef.current.push(e.data);
267
306
  }
268
307
  };
269
308
  mediaRecorder.onstop = () => {
309
+ console.log(`[useAudioRecorder] Recorder stopped. Chunks: ${chunksRef.current.length}`);
270
310
  const audioBlob = new Blob(chunksRef.current, { type: "audio/webm" });
311
+ console.log(`[useAudioRecorder] Blob created. Size: ${audioBlob.size}, Type: ${audioBlob.type}`);
271
312
  setBlob(audioBlob);
272
313
  setIsRecording(false);
273
314
  if (onStop) onStop(audioBlob);
274
- stream.getTracks().forEach((track) => track.stop());
315
+ stream.getTracks().forEach((track) => {
316
+ console.log(`[useAudioRecorder] Stopping track: ${track.label} (${track.kind})`);
317
+ track.stop();
318
+ });
275
319
  };
320
+ console.log("[useAudioRecorder] Starting MediaRecorder...");
276
321
  mediaRecorder.start();
277
322
  setIsRecording(true);
278
323
  setError(null);
@@ -282,8 +327,12 @@ var useAudioRecorder = (onStop) => {
282
327
  }
283
328
  }, [onStop]);
284
329
  const stop = useCallback2(() => {
330
+ console.log("[useAudioRecorder] stop() called");
285
331
  if (mediaRecorderRef.current && mediaRecorderRef.current.state !== "inactive") {
332
+ console.log(`[useAudioRecorder] Stopping MediaRecorder. State was: ${mediaRecorderRef.current.state}`);
286
333
  mediaRecorderRef.current.stop();
334
+ } else {
335
+ console.log("[useAudioRecorder] stop() ignored. Recorder is inactive or missing.");
287
336
  }
288
337
  }, []);
289
338
  return {
@@ -330,16 +379,27 @@ var ChatInputArea = forwardRef(({
330
379
  hintText,
331
380
  placeholder,
332
381
  value,
333
- onChange
382
+ onChange,
383
+ defaultInputMode = "text"
334
384
  }, ref) => {
335
385
  var _a, _b, _c, _d;
336
386
  const [internalMessage, setInternalMessage] = useState3("");
337
- const [isVoiceActive, setIsVoiceActive] = useState3(false);
338
- const [inputMode, setInputMode] = useState3("text");
387
+ const [voiceTrigger, setVoiceTrigger] = useState3(null);
388
+ const [inputMode, setInputMode] = useState3(defaultInputMode);
389
+ const [isFocused, setIsFocused] = useState3(false);
339
390
  const textareaRef = useRef3(null);
340
391
  const measurementRef = useRef3(null);
392
+ const voiceContainerRef = useRef3(null);
393
+ useEffect3(() => {
394
+ var _a2;
395
+ if (inputMode === "voice") {
396
+ (_a2 = voiceContainerRef.current) == null ? void 0 : _a2.focus();
397
+ }
398
+ }, [inputMode]);
341
399
  const isControlled = value !== void 0;
342
400
  const message = isControlled ? value : internalMessage;
401
+ const messageRef = useRef3(message);
402
+ messageRef.current = message;
343
403
  const { voice: globalVoice } = useChatConfig();
344
404
  const isVoiceEnabled = (_a = globalVoice == null ? void 0 : globalVoice.enabled) != null ? _a : !!propVoiceConfig;
345
405
  const voiceConfig = isVoiceEnabled ? propVoiceConfig || (globalVoice == null ? void 0 : globalVoice.config) : void 0;
@@ -355,31 +415,62 @@ var ChatInputArea = forwardRef(({
355
415
  }
356
416
  };
357
417
  const isInputDisabled = (currentTask == null ? void 0 : currentTask.complete) || (lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactive) && (((_b = lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactiveData) == null ? void 0 : _b.function) === "form" && !(lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.isResponseSubmitted) || ((_c = lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactiveData) == null ? void 0 : _c.function) === "confirm" && !(lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.isResponseSubmitted));
358
- useProactiveResize(textareaRef, measurementRef, message, isInputDisabled || isVoiceActive || inputMode === "voice");
359
- const nativeSpeech = useSpeechRecognition((text) => {
360
- triggerChange(message + (message ? " " : "") + text);
361
- }, () => {
418
+ useProactiveResize(textareaRef, measurementRef, message, isInputDisabled || !!voiceTrigger || inputMode === "voice");
419
+ const handleVoiceKeyDown = (e) => {
420
+ if (inputMode !== "voice" || isInputDisabled) return;
421
+ if (e.code !== "Space") return;
422
+ const activeElement = document.activeElement;
423
+ const isInputActive = activeElement && (activeElement.tagName === "INPUT" || activeElement.tagName === "TEXTAREA" || activeElement instanceof HTMLElement && activeElement.isContentEditable);
424
+ if (isInputActive) return;
425
+ e.preventDefault();
426
+ e.stopPropagation();
427
+ if (voiceTrigger === "click") return;
428
+ if (!e.repeat && !voiceTrigger) {
429
+ startRecording("space");
430
+ }
431
+ };
432
+ const handleVoiceKeyUp = (e) => {
433
+ if (inputMode !== "voice" || isInputDisabled) return;
434
+ if (e.code === "Space") {
435
+ if (voiceTrigger === "space") {
436
+ e.preventDefault();
437
+ stopRecording();
438
+ }
439
+ }
440
+ };
441
+ const handleVoiceResult = useCallback3((text) => {
442
+ console.log("[ChatInputArea] nativeSpeech result:", text);
443
+ triggerChange(messageRef.current + (messageRef.current ? " " : "") + text);
444
+ }, []);
445
+ const handleVoiceEnd = useCallback3(() => {
362
446
  var _a2;
363
- setIsVoiceActive(false);
447
+ console.log("[ChatInputArea] nativeSpeech onEnd triggered");
448
+ setVoiceTrigger(null);
364
449
  (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceEnd) == null ? void 0 : _a2.call(voiceConfig);
365
- }, voiceConfig == null ? void 0 : voiceConfig.language);
450
+ }, [voiceConfig]);
451
+ const nativeSpeech = useSpeechRecognition(handleVoiceResult, handleVoiceEnd, voiceConfig == null ? void 0 : voiceConfig.language);
366
452
  const customRecorder = useAudioRecorder(async (blob) => {
367
453
  var _a2;
368
- setIsVoiceActive(false);
454
+ console.log("[ChatInputArea] customRecorder onStop triggered");
455
+ setVoiceTrigger(null);
369
456
  (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceEnd) == null ? void 0 : _a2.call(voiceConfig);
370
457
  if (voiceConfig == null ? void 0 : voiceConfig.onAudioCapture) {
371
458
  try {
372
459
  const text = await voiceConfig.onAudioCapture(blob);
373
- if (text) triggerChange(message + (message ? " " : "") + text);
460
+ if (text) triggerChange(messageRef.current + (messageRef.current ? " " : "") + text);
374
461
  } catch (e) {
375
- console.error("Audio capture failed", e);
462
+ console.error("[ChatInputArea] Audio capture failed", e);
376
463
  }
377
464
  }
378
465
  });
379
466
  useImperativeHandle(ref, () => ({
380
467
  focus: () => {
381
- var _a2;
382
- (_a2 = textareaRef.current) == null ? void 0 : _a2.focus();
468
+ var _a2, _b2;
469
+ if (inputMode === "voice") {
470
+ (_a2 = voiceContainerRef.current) == null ? void 0 : _a2.focus();
471
+ } else {
472
+ (_b2 = textareaRef.current) == null ? void 0 : _b2.focus();
473
+ }
383
474
  },
384
475
  setValue: (newValue) => {
385
476
  triggerChange(newValue);
@@ -408,24 +499,29 @@ var ChatInputArea = forwardRef(({
408
499
  handleSubmit();
409
500
  }
410
501
  };
411
- const startRecording = async () => {
502
+ const startRecording = async (trigger) => {
412
503
  var _a2;
413
- if (isVoiceActive) return;
414
- setIsVoiceActive(true);
504
+ console.log(`[ChatInputArea] startRecording triggered by: ${trigger}, current voiceTrigger: ${voiceTrigger}`);
505
+ console.log("[ChatInputArea] voiceConfig:", voiceConfig);
506
+ if (voiceTrigger) return;
507
+ setVoiceTrigger(trigger);
415
508
  (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
416
509
  if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
417
510
  if (!nativeSpeech.isSupported) {
418
511
  alert("Speech recognition is not supported in this browser.");
419
- setIsVoiceActive(false);
512
+ setVoiceTrigger(null);
420
513
  return;
421
514
  }
515
+ console.log("[ChatInputArea] Starting nativeSpeech");
422
516
  nativeSpeech.start();
423
517
  } else {
518
+ console.log("[ChatInputArea] Starting customRecorder");
424
519
  await customRecorder.start();
425
520
  }
426
521
  };
427
522
  const stopRecording = () => {
428
- if (!isVoiceActive) return;
523
+ console.log(`[ChatInputArea] stopRecording called. Current voiceTrigger: ${voiceTrigger}`);
524
+ if (!voiceTrigger) return;
429
525
  if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
430
526
  nativeSpeech.stop();
431
527
  } else {
@@ -434,7 +530,7 @@ var ChatInputArea = forwardRef(({
434
530
  };
435
531
  const getPlaceholder = () => {
436
532
  if (placeholder) return placeholder;
437
- if (isVoiceActive) return "Listening...";
533
+ if (voiceTrigger) return "Listening...";
438
534
  if (currentTask == null ? void 0 : currentTask.complete) {
439
535
  return "Task completed!";
440
536
  }
@@ -466,7 +562,7 @@ var ChatInputArea = forwardRef(({
466
562
  {
467
563
  type: "button",
468
564
  onClick: () => {
469
- if (inputMode === "voice" && isVoiceActive) {
565
+ if (inputMode === "voice" && voiceTrigger) {
470
566
  stopRecording();
471
567
  }
472
568
  setInputMode((prev) => prev === "text" ? "voice" : "text");
@@ -482,117 +578,149 @@ var ChatInputArea = forwardRef(({
482
578
  )
483
579
  }
484
580
  ),
485
- /* @__PURE__ */ jsxs3("div", { className: "flex-1 flex items-center border border-gray-300 rounded-lg overflow-hidden focus-within:ring-2 focus-within:ring-blue-500 focus-within:border-blue-500 bg-white min-h-[42px] mb-1", children: [
486
- inputMode === "text" && /* @__PURE__ */ jsxs3(Fragment, { children: [
487
- /* @__PURE__ */ jsx5(
488
- "span",
489
- {
490
- ref: measurementRef,
491
- className: "absolute invisible whitespace-pre-wrap p-0 m-0 text-gray-700 leading-6",
492
- style: { fontSize: "1rem" }
493
- }
494
- ),
495
- /* @__PURE__ */ jsx5(
496
- "textarea",
497
- {
498
- ref: textareaRef,
499
- value: message,
500
- onChange: (e) => {
501
- if (isControlled && onChange) {
502
- onChange(e);
503
- } else {
504
- setInternalMessage(e.target.value);
581
+ /* @__PURE__ */ jsxs3(
582
+ "div",
583
+ {
584
+ ref: voiceContainerRef,
585
+ tabIndex: inputMode === "voice" ? 0 : -1,
586
+ onKeyDown: handleVoiceKeyDown,
587
+ onKeyUp: handleVoiceKeyUp,
588
+ onFocus: () => setIsFocused(true),
589
+ onBlur: () => setIsFocused(false),
590
+ className: "flex-1 flex items-center border border-gray-300 rounded-lg overflow-hidden outline-none focus-within:ring-2 focus-within:ring-blue-500 focus-within:border-blue-500 bg-white min-h-[42px] mb-1",
591
+ children: [
592
+ inputMode === "text" && /* @__PURE__ */ jsxs3(Fragment, { children: [
593
+ /* @__PURE__ */ jsx5(
594
+ "span",
595
+ {
596
+ ref: measurementRef,
597
+ className: "absolute invisible whitespace-pre-wrap p-0 m-0 text-gray-700 leading-6",
598
+ style: { fontSize: "1rem" }
505
599
  }
506
- },
507
- onKeyDown: handleKeyDown,
508
- placeholder: getPlaceholder(),
509
- disabled: isInputDisabled || isVoiceActive,
510
- rows: 1,
511
- className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 disabled:bg-gray-100 resize-none leading-6 w-full ${isInputDisabled ? "cursor-not-allowed" : ""}`
512
- }
513
- )
514
- ] }),
515
- inputMode === "voice" && /* @__PURE__ */ jsx5("div", { className: "flex-grow flex justify-center items-center p-1", children: /* @__PURE__ */ jsx5(
516
- "button",
517
- {
518
- type: "button",
519
- onMouseDown: startRecording,
520
- onMouseUp: stopRecording,
521
- onTouchStart: startRecording,
522
- onTouchEnd: stopRecording,
523
- disabled: isInputDisabled,
524
- className: `flex-grow py-2 text-center font-medium rounded-md transition-colors select-none ${isVoiceActive ? "bg-blue-100 text-blue-700" : "bg-gray-50 text-gray-700 hover:bg-gray-100"}`,
525
- children: isVoiceActive ? "Release to Send" : "Hold to Talk"
526
- }
527
- ) }),
528
- (inputMode === "text" || isSending) && /* @__PURE__ */ jsxs3("div", { className: "relative mx-2 flex-shrink-0", children: [
529
- isSending && /* @__PURE__ */ jsx5("div", { className: "absolute -inset-1", children: /* @__PURE__ */ jsxs3(
530
- "svg",
531
- {
532
- className: "animate-spin h-full w-full text-blue-500 opacity-75",
533
- xmlns: "http://www.w3.org/2000/svg",
534
- fill: "none",
535
- viewBox: "0 0 24 24",
536
- children: [
537
- /* @__PURE__ */ jsx5(
538
- "circle",
539
- {
540
- className: "opacity-25",
541
- cx: "12",
542
- cy: "12",
543
- r: "10",
544
- stroke: "currentColor",
545
- strokeWidth: "4"
546
- }
547
- ),
548
- /* @__PURE__ */ jsx5(
549
- "path",
550
- {
551
- className: "opacity-75",
552
- fill: "currentColor",
553
- d: "M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
600
+ ),
601
+ /* @__PURE__ */ jsx5(
602
+ "textarea",
603
+ {
604
+ ref: textareaRef,
605
+ value: message,
606
+ onChange: (e) => {
607
+ if (isControlled && onChange) {
608
+ onChange(e);
609
+ } else {
610
+ setInternalMessage(e.target.value);
611
+ }
612
+ },
613
+ onKeyDown: handleKeyDown,
614
+ placeholder: getPlaceholder(),
615
+ disabled: isInputDisabled || !!voiceTrigger,
616
+ rows: 1,
617
+ className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 disabled:bg-gray-100 resize-none leading-6 w-full ${isInputDisabled ? "cursor-not-allowed" : ""}`
618
+ }
619
+ )
620
+ ] }),
621
+ inputMode === "voice" && /* @__PURE__ */ jsx5("div", { className: "flex-grow flex flex-col justify-center items-center p-1 relative", children: /* @__PURE__ */ jsx5(
622
+ "button",
623
+ {
624
+ type: "button",
625
+ onClick: () => {
626
+ if (voiceTrigger === "click") {
627
+ stopRecording();
628
+ } else if (!voiceTrigger) {
629
+ startRecording("click");
554
630
  }
555
- )
556
- ]
557
- }
558
- ) }),
559
- /* @__PURE__ */ jsx5(
560
- "button",
561
- {
562
- type: "button",
563
- onClick: (e) => {
564
- if (isSending && onStop) {
565
- e.preventDefault();
566
- onStop();
567
- } else {
568
- handleSubmit();
631
+ },
632
+ disabled: isInputDisabled || voiceTrigger === "space",
633
+ className: `w-full py-2 text-center font-medium rounded-md transition-all select-none flex items-center justify-center gap-2 ${voiceTrigger ? "bg-red-50 text-red-600 animate-pulse border border-red-200" : "bg-gray-50 text-gray-700 hover:bg-gray-100"} ${voiceTrigger === "space" ? "opacity-90 cursor-default" : ""}`,
634
+ children: voiceTrigger ? /* @__PURE__ */ jsxs3(Fragment, { children: [
635
+ /* @__PURE__ */ jsx5("div", { className: "w-2 h-2 rounded-full bg-red-500 animate-ping mr-2" }),
636
+ /* @__PURE__ */ jsxs3("span", { children: [
637
+ "Listening... ",
638
+ voiceTrigger === "space" ? "(Release Space to send)" : "Tap to send"
639
+ ] })
640
+ ] }) : /* @__PURE__ */ jsx5("span", { children: "Tap to Talk" })
641
+ }
642
+ ) }),
643
+ (inputMode === "text" || isSending) && /* @__PURE__ */ jsxs3("div", { className: "relative mx-2 flex-shrink-0", children: [
644
+ isSending && /* @__PURE__ */ jsx5("div", { className: "absolute -inset-1", children: /* @__PURE__ */ jsxs3(
645
+ "svg",
646
+ {
647
+ className: "animate-spin h-full w-full text-blue-500 opacity-75",
648
+ xmlns: "http://www.w3.org/2000/svg",
649
+ fill: "none",
650
+ viewBox: "0 0 24 24",
651
+ children: [
652
+ /* @__PURE__ */ jsx5(
653
+ "circle",
654
+ {
655
+ className: "opacity-25",
656
+ cx: "12",
657
+ cy: "12",
658
+ r: "10",
659
+ stroke: "currentColor",
660
+ strokeWidth: "4"
661
+ }
662
+ ),
663
+ /* @__PURE__ */ jsx5(
664
+ "path",
665
+ {
666
+ className: "opacity-75",
667
+ fill: "currentColor",
668
+ d: "M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
669
+ }
670
+ )
671
+ ]
569
672
  }
570
- },
571
- disabled: (currentTask == null ? void 0 : currentTask.complete) || isSending && !onStop || isInputDisabled || isVoiceActive,
572
- className: `relative z-10 text-white rounded-full p-2 transition-colors duration-200 disabled:bg-gray-400 disabled:cursor-not-allowed ${isSending && onStop ? "bg-red-500 hover:bg-red-600" : "bg-blue-600 hover:bg-blue-700"}`,
573
- title: isSending && onStop ? "Stop generating" : "Send message",
574
- children: isSending ? onStop ? /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" }) : (
575
- // AND we show the overlay spinner outside?
576
- // Actually `ChatInput.tsx` lines 117-140 are `isLoading && (...)`. It is always shown when loading.
577
- // So we have a spinner ring AROUND the button (absolute -inset-1).
578
- // AND potentially a spinner INSIDE the button if no onStop?
579
- // In my case, I will stick to:
580
- // If onStop: Show StopIcon. Button is Red.
581
- // If !onStop: Show Spinner inside? Or just let the outer ring do the work?
582
- // Legacy `Spinner` component usage inside button suggests double spinner if we are not careful.
583
- // But usually `onStop` is provided for streaming.
584
- // If I look at the screenshot, it shows a RED button (with stop icon) and a BLUE ring around it.
585
- // That matches: Red button (bg-red-500) + Blue Spinner Ring (text-blue-500).
586
- // So I will replicate that structure.
587
- /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" })
588
- ) : /* @__PURE__ */ jsx5(PaperAirplaneIcon, { className: "h-5 w-5" })
589
- }
590
- )
591
- ] })
592
- ] })
673
+ ) }),
674
+ /* @__PURE__ */ jsx5(
675
+ "button",
676
+ {
677
+ type: "button",
678
+ onClick: (e) => {
679
+ if (isSending && onStop) {
680
+ e.preventDefault();
681
+ onStop();
682
+ } else {
683
+ handleSubmit();
684
+ }
685
+ },
686
+ disabled: (currentTask == null ? void 0 : currentTask.complete) || isSending && !onStop || isInputDisabled || !!voiceTrigger,
687
+ className: `relative z-10 text-white rounded-full p-2 transition-colors duration-200 disabled:bg-gray-400 disabled:cursor-not-allowed ${isSending && onStop ? "bg-red-500 hover:bg-red-600" : "bg-blue-600 hover:bg-blue-700"}`,
688
+ title: isSending && onStop ? "Stop generating" : "Send message",
689
+ children: isSending ? onStop ? /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" }) : (
690
+ // AND we show the overlay spinner outside?
691
+ // Actually `ChatInput.tsx` lines 117-140 are `isLoading && (...)`. It is always shown when loading.
692
+ // So we have a spinner ring AROUND the button (absolute -inset-1).
693
+ // AND potentially a spinner INSIDE the button if no onStop?
694
+ // In my case, I will stick to:
695
+ // If onStop: Show StopIcon. Button is Red.
696
+ // If !onStop: Show Spinner inside? Or just let the outer ring do the work?
697
+ // Legacy `Spinner` component usage inside button suggests double spinner if we are not careful.
698
+ // But usually `onStop` is provided for streaming.
699
+ // If I look at the screenshot, it shows a RED button (with stop icon) and a BLUE ring around it.
700
+ // That matches: Red button (bg-red-500) + Blue Spinner Ring (text-blue-500).
701
+ // So I will replicate that structure.
702
+ /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" })
703
+ ) : /* @__PURE__ */ jsx5(PaperAirplaneIcon, { className: "h-5 w-5" })
704
+ }
705
+ )
706
+ ] })
707
+ ]
708
+ }
709
+ )
593
710
  ] }),
594
711
  inputHint && /* @__PURE__ */ jsx5("div", { className: "text-sm text-red-500 bg-red-50 py-1 px-4 rounded-lg mt-1", children: inputHint }),
595
- hintText && inputMode === "text" && /* @__PURE__ */ jsx5("p", { className: "text-xs text-gray-500 ml-12 mb-2 mt-1", children: hintText })
712
+ hintText && inputMode === "text" && /* @__PURE__ */ jsx5("p", { className: "text-xs text-gray-500 ml-12 mb-2 mt-1", children: hintText }),
713
+ inputMode === "voice" && !voiceTrigger && /* @__PURE__ */ jsx5(
714
+ "p",
715
+ {
716
+ className: "text-[10px] text-gray-400 font-medium ml-12 text-center -mt-1 mb-1 cursor-pointer hover:text-gray-600 transition-colors",
717
+ onClick: () => {
718
+ var _a2;
719
+ return (_a2 = voiceContainerRef.current) == null ? void 0 : _a2.focus();
720
+ },
721
+ children: isFocused ? "Click to talk or hold space to talk" : "Tap to talk or click here to focus and push space to talk"
722
+ }
723
+ )
596
724
  ] });
597
725
  });
598
726
  ChatInputArea.displayName = "ChatInputArea";