@contentgrowth/llm-service 0.8.8 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -236,6 +236,7 @@ declare const useSpeechRecognition: (onResult?: (text: string, isFinal: boolean)
236
236
 
237
237
  interface AudioRecorderHook {
238
238
  isRecording: boolean;
239
+ isSimulated: boolean;
239
240
  start: () => Promise<void>;
240
241
  stop: () => void;
241
242
  blob: Blob | null;
@@ -148,8 +148,8 @@ function ChatHeader({
148
148
  }
149
149
 
150
150
  // src/ui/react/components/ChatInputArea.tsx
151
- import { useState as useState3, useRef as useRef3, useImperativeHandle, forwardRef, useEffect as useEffect3, useCallback as useCallback3 } from "react";
152
- import { StopIcon, PaperAirplaneIcon } from "@heroicons/react/24/outline";
151
+ import { useState as useState3, useRef as useRef3, useImperativeHandle, forwardRef, useEffect as useEffect3, useCallback as useCallback3, useLayoutEffect } from "react";
152
+ import { MicrophoneIcon, StopIcon, PaperAirplaneIcon } from "@heroicons/react/24/outline";
153
153
 
154
154
  // src/ui/react/hooks/useSpeechRecognition.ts
155
155
  import { useState, useEffect, useCallback, useRef } from "react";
@@ -171,14 +171,11 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
171
171
  recognition.interimResults = true;
172
172
  recognition.lang = language;
173
173
  recognition.onstart = () => {
174
- console.log("[useSpeechRecognition] Native onstart event fired");
175
174
  setIsListening(true);
176
175
  setError(null);
177
176
  };
178
177
  recognition.onend = () => {
179
- console.log("[useSpeechRecognition] Native onend event fired");
180
178
  if (isSimulatingRef.current) {
181
- console.log("[useSpeechRecognition] Ignoring onend due to simulation");
182
179
  return;
183
180
  }
184
181
  setIsListening(false);
@@ -233,23 +230,18 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
233
230
  };
234
231
  }, [onResult, onEnd, language]);
235
232
  const start = useCallback(() => {
236
- console.log("[useSpeechRecognition] start() called");
237
233
  if (recognitionRef.current && !isListening) {
238
234
  try {
239
235
  setTranscript("");
240
236
  recognitionRef.current.start();
241
- console.log("[useSpeechRecognition] recognitionRef.current.start() executed");
242
237
  } catch (e) {
243
238
  console.error("[useSpeechRecognition] Failed to start speech recognition:", e);
244
239
  }
245
240
  } else {
246
- console.log("[useSpeechRecognition] start() ignored: already listening or no recognition instance", { hasInstance: !!recognitionRef.current, isListening });
247
241
  }
248
242
  }, [isListening]);
249
243
  const stop = useCallback(() => {
250
- console.log("[useSpeechRecognition] stop() called");
251
244
  if (isSimulatingRef.current) {
252
- console.log("[useSpeechRecognition] Stopping simulation");
253
245
  if (simulationTimeoutRef.current) {
254
246
  clearTimeout(simulationTimeoutRef.current);
255
247
  simulationTimeoutRef.current = null;
@@ -263,10 +255,8 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
263
255
  return;
264
256
  }
265
257
  if (recognitionRef.current && isListening) {
266
- console.log("[useSpeechRecognition] recognitionRef.current.stop() executed");
267
258
  recognitionRef.current.stop();
268
259
  } else {
269
- console.log("[useSpeechRecognition] stop() ignored: not listening", { isListening });
270
260
  }
271
261
  }, [isListening, onResult, onEnd]);
272
262
  const resetTranscript = useCallback(() => {
@@ -287,37 +277,41 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
287
277
  import { useState as useState2, useRef as useRef2, useCallback as useCallback2 } from "react";
288
278
  var useAudioRecorder = (onStop) => {
289
279
  const [isRecording, setIsRecording] = useState2(false);
280
+ const [isSimulated, setIsSimulated] = useState2(false);
290
281
  const [blob, setBlob] = useState2(null);
291
282
  const [error, setError] = useState2(null);
292
283
  const mediaRecorderRef = useRef2(null);
293
284
  const chunksRef = useRef2([]);
294
285
  const start = useCallback2(async () => {
295
- console.log("[useAudioRecorder] start() called");
296
286
  try {
287
+ if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
288
+ if (process.env.NODE_ENV === "development") {
289
+ console.warn("[useAudioRecorder] MediaDevices not available. Entering simulation mode...");
290
+ setIsRecording(true);
291
+ setIsSimulated(true);
292
+ setError(null);
293
+ return;
294
+ }
295
+ throw new Error("Media devices not available. Ensure you are using HTTPS or localhost.");
296
+ }
297
297
  const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
298
- console.log("[useAudioRecorder] Stream acquired", stream.id);
299
298
  const mediaRecorder = new MediaRecorder(stream);
300
299
  mediaRecorderRef.current = mediaRecorder;
301
300
  chunksRef.current = [];
302
301
  mediaRecorder.ondataavailable = (e) => {
303
- console.log(`[useAudioRecorder] Data available, size: ${e.data.size}`);
304
302
  if (e.data.size > 0) {
305
303
  chunksRef.current.push(e.data);
306
304
  }
307
305
  };
308
306
  mediaRecorder.onstop = () => {
309
- console.log(`[useAudioRecorder] Recorder stopped. Chunks: ${chunksRef.current.length}`);
310
307
  const audioBlob = new Blob(chunksRef.current, { type: "audio/webm" });
311
- console.log(`[useAudioRecorder] Blob created. Size: ${audioBlob.size}, Type: ${audioBlob.type}`);
312
308
  setBlob(audioBlob);
313
309
  setIsRecording(false);
314
310
  if (onStop) onStop(audioBlob);
315
311
  stream.getTracks().forEach((track) => {
316
- console.log(`[useAudioRecorder] Stopping track: ${track.label} (${track.kind})`);
317
312
  track.stop();
318
313
  });
319
314
  };
320
- console.log("[useAudioRecorder] Starting MediaRecorder...");
321
315
  mediaRecorder.start();
322
316
  setIsRecording(true);
323
317
  setError(null);
@@ -327,16 +321,21 @@ var useAudioRecorder = (onStop) => {
327
321
  }
328
322
  }, [onStop]);
329
323
  const stop = useCallback2(() => {
330
- console.log("[useAudioRecorder] stop() called");
324
+ if (isSimulated) {
325
+ setIsRecording(false);
326
+ setIsSimulated(false);
327
+ const simulatedBlob = new Blob(["simulated speech"], { type: "audio/simulated" });
328
+ setBlob(simulatedBlob);
329
+ if (onStop) onStop(simulatedBlob);
330
+ return;
331
+ }
331
332
  if (mediaRecorderRef.current && mediaRecorderRef.current.state !== "inactive") {
332
- console.log(`[useAudioRecorder] Stopping MediaRecorder. State was: ${mediaRecorderRef.current.state}`);
333
333
  mediaRecorderRef.current.stop();
334
- } else {
335
- console.log("[useAudioRecorder] stop() ignored. Recorder is inactive or missing.");
336
334
  }
337
- }, []);
335
+ }, [isSimulated, onStop]);
338
336
  return {
339
337
  isRecording,
338
+ isSimulated,
340
339
  start,
341
340
  stop,
342
341
  blob,
@@ -366,7 +365,7 @@ function useProactiveResize(textareaRef, measurementRef, value, disabled) {
366
365
  }
367
366
 
368
367
  // src/ui/react/components/ChatInputArea.tsx
369
- import { Fragment, jsx as jsx5, jsxs as jsxs3 } from "react/jsx-runtime";
368
+ import { jsx as jsx5, jsxs as jsxs3 } from "react/jsx-runtime";
370
369
  var ChatInputArea = forwardRef(({
371
370
  onSubmit,
372
371
  isSending,
@@ -385,21 +384,22 @@ var ChatInputArea = forwardRef(({
385
384
  var _a, _b, _c, _d;
386
385
  const [internalMessage, setInternalMessage] = useState3("");
387
386
  const [voiceTrigger, setVoiceTrigger] = useState3(null);
388
- const [inputMode, setInputMode] = useState3(defaultInputMode);
389
387
  const [isFocused, setIsFocused] = useState3(false);
390
388
  const textareaRef = useRef3(null);
391
389
  const measurementRef = useRef3(null);
392
- const voiceContainerRef = useRef3(null);
393
- useEffect3(() => {
394
- var _a2;
395
- if (inputMode === "voice") {
396
- (_a2 = voiceContainerRef.current) == null ? void 0 : _a2.focus();
397
- }
398
- }, [inputMode]);
390
+ const pendingSelectionRef = useRef3(null);
399
391
  const isControlled = value !== void 0;
400
392
  const message = isControlled ? value : internalMessage;
401
393
  const messageRef = useRef3(message);
402
394
  messageRef.current = message;
395
+ useLayoutEffect(() => {
396
+ if (pendingSelectionRef.current && textareaRef.current) {
397
+ const { start, end } = pendingSelectionRef.current;
398
+ textareaRef.current.focus();
399
+ textareaRef.current.setSelectionRange(start, end);
400
+ pendingSelectionRef.current = null;
401
+ }
402
+ }, [message]);
403
403
  const onChangeRef = useRef3(onChange);
404
404
  useEffect3(() => {
405
405
  onChangeRef.current = onChange;
@@ -412,7 +412,6 @@ var ChatInputArea = forwardRef(({
412
412
  voiceConfigRef.current = voiceConfig;
413
413
  }, [voiceConfig]);
414
414
  const triggerChange = useCallback3((newValue) => {
415
- console.log("[ChatInputArea] triggerChange called:", { newValue, isControlled, hasOnChange: !!onChangeRef.current, hasTextarea: !!textareaRef.current });
416
415
  if (isControlled && onChangeRef.current) {
417
416
  const syntheticEvent = {
418
417
  target: { value: newValue },
@@ -424,49 +423,49 @@ var ChatInputArea = forwardRef(({
424
423
  }
425
424
  }, [isControlled]);
426
425
  const isInputDisabled = (currentTask == null ? void 0 : currentTask.complete) || (lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactive) && (((_b = lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactiveData) == null ? void 0 : _b.function) === "form" && !(lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.isResponseSubmitted) || ((_c = lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactiveData) == null ? void 0 : _c.function) === "confirm" && !(lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.isResponseSubmitted));
427
- useProactiveResize(textareaRef, measurementRef, message, isInputDisabled || !!voiceTrigger || inputMode === "voice");
428
- const handleVoiceKeyDown = (e) => {
429
- if (inputMode !== "voice" || isInputDisabled) return;
430
- if (e.code !== "Space") return;
431
- const activeElement = document.activeElement;
432
- const isInputActive = activeElement && (activeElement.tagName === "INPUT" || activeElement.tagName === "TEXTAREA" || activeElement instanceof HTMLElement && activeElement.isContentEditable);
433
- if (isInputActive) return;
434
- e.preventDefault();
435
- e.stopPropagation();
436
- if (voiceTrigger === "click") return;
437
- if (!e.repeat && !voiceTrigger) {
438
- startRecording("space");
426
+ useProactiveResize(textareaRef, measurementRef, message, isInputDisabled || !!voiceTrigger);
427
+ const insertTextAtCursor = useCallback3((text) => {
428
+ const textarea = textareaRef.current;
429
+ const currentVal = messageRef.current || "";
430
+ if (!textarea) {
431
+ triggerChange(currentVal + (currentVal ? " " : "") + text);
432
+ return;
439
433
  }
440
- };
441
- const handleVoiceKeyUp = (e) => {
442
- if (inputMode !== "voice" || isInputDisabled) return;
443
- if (e.code === "Space") {
444
- if (voiceTrigger === "space") {
445
- e.preventDefault();
446
- stopRecording();
447
- }
434
+ const start = textarea.selectionStart;
435
+ const end = textarea.selectionEnd;
436
+ const before = currentVal.substring(0, start);
437
+ const after = currentVal.substring(end);
438
+ const prefix = start > 0 && !/\s$/.test(before) ? " " : "";
439
+ const newText = before + prefix + text + after;
440
+ const selectionStart = start + prefix.length;
441
+ const selectionEnd = selectionStart + text.length;
442
+ pendingSelectionRef.current = { start: selectionStart, end: selectionEnd };
443
+ triggerChange(newText);
444
+ }, [triggerChange]);
445
+ const handleVoiceResult = useCallback3((text, isFinal) => {
446
+ if (isFinal) {
447
+ insertTextAtCursor(text);
448
448
  }
449
- };
450
- const handleVoiceResult = useCallback3((text) => {
451
- console.log("[ChatInputArea] nativeSpeech result:", text);
452
- triggerChange(messageRef.current + (messageRef.current ? " " : "") + text);
453
- }, []);
449
+ }, [insertTextAtCursor]);
454
450
  const handleVoiceEnd = useCallback3(() => {
455
451
  var _a2, _b2;
456
- console.log("[ChatInputArea] nativeSpeech onEnd triggered");
457
452
  setVoiceTrigger(null);
458
453
  (_b2 = (_a2 = voiceConfigRef.current) == null ? void 0 : _a2.onVoiceEnd) == null ? void 0 : _b2.call(_a2);
459
454
  }, []);
460
455
  const nativeSpeech = useSpeechRecognition(handleVoiceResult, handleVoiceEnd, voiceConfig == null ? void 0 : voiceConfig.language);
461
456
  const customRecorder = useAudioRecorder(async (blob) => {
462
457
  var _a2, _b2, _c2;
463
- console.log("[ChatInputArea] customRecorder onStop triggered");
464
458
  setVoiceTrigger(null);
465
459
  (_b2 = (_a2 = voiceConfigRef.current) == null ? void 0 : _a2.onVoiceEnd) == null ? void 0 : _b2.call(_a2);
460
+ if (blob.type === "audio/simulated") {
461
+ console.log("[ChatInputArea] Handling simulated audio capture");
462
+ insertTextAtCursor("This is a simulated transcription for development testing.");
463
+ return;
464
+ }
466
465
  if ((_c2 = voiceConfigRef.current) == null ? void 0 : _c2.onAudioCapture) {
467
466
  try {
468
467
  const text = await voiceConfigRef.current.onAudioCapture(blob);
469
- if (text) triggerChange(messageRef.current + (messageRef.current ? " " : "") + text);
468
+ if (text) insertTextAtCursor(text);
470
469
  } catch (e) {
471
470
  console.error("[ChatInputArea] Audio capture failed", e);
472
471
  }
@@ -474,12 +473,8 @@ var ChatInputArea = forwardRef(({
474
473
  });
475
474
  useImperativeHandle(ref, () => ({
476
475
  focus: () => {
477
- var _a2, _b2;
478
- if (inputMode === "voice") {
479
- (_a2 = voiceContainerRef.current) == null ? void 0 : _a2.focus();
480
- } else {
481
- (_b2 = textareaRef.current) == null ? void 0 : _b2.focus();
482
- }
476
+ var _a2;
477
+ (_a2 = textareaRef.current) == null ? void 0 : _a2.focus();
483
478
  },
484
479
  setValue: (newValue) => {
485
480
  triggerChange(newValue);
@@ -510,8 +505,6 @@ var ChatInputArea = forwardRef(({
510
505
  };
511
506
  const startRecording = async (trigger) => {
512
507
  var _a2;
513
- console.log(`[ChatInputArea] startRecording triggered by: ${trigger}, current voiceTrigger: ${voiceTrigger}`);
514
- console.log("[ChatInputArea] voiceConfig:", voiceConfig);
515
508
  if (voiceTrigger) return;
516
509
  setVoiceTrigger(trigger);
517
510
  (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
@@ -521,15 +514,16 @@ var ChatInputArea = forwardRef(({
521
514
  setVoiceTrigger(null);
522
515
  return;
523
516
  }
524
- console.log("[ChatInputArea] Starting nativeSpeech");
525
517
  nativeSpeech.start();
526
518
  } else {
527
- console.log("[ChatInputArea] Starting customRecorder");
528
519
  await customRecorder.start();
529
520
  }
521
+ setTimeout(() => {
522
+ var _a3;
523
+ return (_a3 = textareaRef.current) == null ? void 0 : _a3.focus();
524
+ }, 0);
530
525
  };
531
526
  const stopRecording = () => {
532
- console.log(`[ChatInputArea] stopRecording called. Current voiceTrigger: ${voiceTrigger}`);
533
527
  if (!voiceTrigger) return;
534
528
  if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
535
529
  nativeSpeech.stop();
@@ -540,9 +534,7 @@ var ChatInputArea = forwardRef(({
540
534
  const getPlaceholder = () => {
541
535
  if (placeholder) return placeholder;
542
536
  if (voiceTrigger) return "Listening...";
543
- if (currentTask == null ? void 0 : currentTask.complete) {
544
- return "Task completed!";
545
- }
537
+ if (currentTask == null ? void 0 : currentTask.complete) return "Task completed!";
546
538
  if ((lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactive) && (lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.interactiveData) && !(lastInteractiveMessage == null ? void 0 : lastInteractiveMessage.isResponseSubmitted)) {
547
539
  const interactiveType = lastInteractiveMessage.interactiveData.function;
548
540
  switch (interactiveType) {
@@ -571,85 +563,54 @@ var ChatInputArea = forwardRef(({
571
563
  {
572
564
  type: "button",
573
565
  onClick: () => {
574
- if (inputMode === "voice" && voiceTrigger) {
566
+ if (voiceTrigger) {
575
567
  stopRecording();
568
+ } else {
569
+ startRecording("click");
576
570
  }
577
- setInputMode((prev) => prev === "text" ? "voice" : "text");
578
571
  },
579
- className: "mb-1 p-2 text-gray-500 hover:text-gray-700 hover:bg-gray-100 rounded-full transition-colors flex-shrink-0 border border-gray-300 bg-white",
580
- title: inputMode === "text" ? "Switch to Voice" : "Switch to Keyboard",
581
- children: inputMode === "text" ? (
582
- // Voice Icon (Waveform)
583
- /* @__PURE__ */ jsx5("svg", { xmlns: "http://www.w3.org/2000/svg", viewBox: "0 0 24 24", fill: "currentColor", className: "w-5 h-5 text-gray-600", children: /* @__PURE__ */ jsx5("path", { d: "M11.25 4.532A.75.75 0 0 1 12 5.25v13.5a.75.75 0 0 1-1.5 0V5.25a.75.75 0 0 1 .75-.718ZM7.5 8.25a.75.75 0 0 1 .75.75v5.25a.75.75 0 0 1-1.5 0V9a.75.75 0 0 1 .75-.75Zm9 0a.75.75 0 0 1 .75.75v5.25a.75.75 0 0 1-1.5 0V9a.75.75 0 0 1 .75-.75ZM3.75 10.5a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-1.5 0v-1.5a.75.75 0 0 1 .75-.75Zm16.5 0a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-1.5 0v-1.5a.75.75 0 0 1 .75-.75Z" }) })
584
- ) : (
585
- // Keyboard Icon (Filled)
586
- /* @__PURE__ */ jsx5("svg", { xmlns: "http://www.w3.org/2000/svg", viewBox: "0 0 24 24", fill: "currentColor", className: "w-5 h-5 text-gray-600", children: /* @__PURE__ */ jsx5("path", { fillRule: "evenodd", d: "M3 6a3 3 0 0 1 3-3h12a3 3 0 0 1 3 3v12a3 3 0 0 1-3 3H6a3 3 0 0 1-3-3V6Zm4.5 3a.75.75 0 0 1 .75-.75h1.5a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-.75.75h-1.5a.75.75 0 0 1-.75-.75V9Zm6 0a.75.75 0 0 1 .75-.75h1.5a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-.75.75h-1.5a.75.75 0 0 1-.75-.75V9Zm6 0a.75.75 0 0 1 .75-.75h1.5a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-.75.75h-1.5a.75.75 0 0 1-.75-.75V9Zm-12 4.5a.75.75 0 0 1 .75-.75h1.5a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-.75.75h-1.5a.75.75 0 0 1-.75-.75v-1.5Zm6 0a.75.75 0 0 1 .75-.75h6.75a.75.75 0 0 1 .75.75v1.5a.75.75 0 0 1-.75.75h-6.75a.75.75 0 0 1-.75-.75v-1.5Z", clipRule: "evenodd" }) })
587
- )
572
+ className: `mb-1 p-2 rounded-full transition-all duration-300 flex-shrink-0 border ${voiceTrigger ? "text-white border-orange-400 bg-orange-500 scale-110 shadow-lg animate-pulse" : "text-gray-500 border-gray-300 bg-white hover:text-gray-700 hover:bg-gray-100"}`,
573
+ title: voiceTrigger ? "Stop Recording" : "Start Voice Input",
574
+ children: /* @__PURE__ */ jsx5(MicrophoneIcon, { className: "w-5 h-5" })
588
575
  }
589
576
  ),
590
577
  /* @__PURE__ */ jsxs3(
591
578
  "div",
592
579
  {
593
- ref: voiceContainerRef,
594
- tabIndex: inputMode === "voice" ? 0 : -1,
595
- onKeyDown: handleVoiceKeyDown,
596
- onKeyUp: handleVoiceKeyUp,
597
- onFocus: () => setIsFocused(true),
598
- onBlur: () => setIsFocused(false),
599
- className: "flex-1 flex items-center border border-gray-300 rounded-lg overflow-hidden outline-none focus-within:ring-2 focus-within:ring-blue-500 focus-within:border-blue-500 bg-white min-h-[42px] mb-1",
580
+ tabIndex: -1,
581
+ className: `flex-1 flex items-center border border-gray-300 rounded-lg overflow-hidden outline-none bg-white min-h-[42px] mb-1 transition-all ${voiceTrigger ? "ring-2 ring-orange-100 border-orange-300" : "focus-within:ring-2 focus-within:ring-blue-500 focus-within:border-blue-500"}`,
600
582
  children: [
601
- inputMode === "text" && /* @__PURE__ */ jsxs3(Fragment, { children: [
602
- /* @__PURE__ */ jsx5(
603
- "span",
604
- {
605
- ref: measurementRef,
606
- className: "absolute invisible whitespace-pre-wrap p-0 m-0 text-gray-700 leading-6",
607
- style: { fontSize: "1rem" }
608
- }
609
- ),
610
- /* @__PURE__ */ jsx5(
611
- "textarea",
612
- {
613
- ref: textareaRef,
614
- value: message,
615
- onChange: (e) => {
616
- if (isControlled && onChange) {
617
- onChange(e);
618
- } else {
619
- setInternalMessage(e.target.value);
620
- }
621
- },
622
- onKeyDown: handleKeyDown,
623
- placeholder: getPlaceholder(),
624
- disabled: isInputDisabled || !!voiceTrigger,
625
- rows: 1,
626
- className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 disabled:bg-gray-100 resize-none leading-6 w-full ${isInputDisabled ? "cursor-not-allowed" : ""}`
627
- }
628
- )
629
- ] }),
630
- inputMode === "voice" && /* @__PURE__ */ jsx5("div", { className: "flex-grow flex flex-col justify-center items-center p-1 relative", children: /* @__PURE__ */ jsx5(
631
- "button",
583
+ /* @__PURE__ */ jsx5(
584
+ "span",
585
+ {
586
+ ref: measurementRef,
587
+ className: "absolute invisible whitespace-pre-wrap p-0 m-0 text-gray-700 leading-6",
588
+ style: { fontSize: "1rem" }
589
+ }
590
+ ),
591
+ /* @__PURE__ */ jsx5(
592
+ "textarea",
632
593
  {
633
- type: "button",
634
- onClick: () => {
635
- if (voiceTrigger === "click") {
636
- stopRecording();
637
- } else if (!voiceTrigger) {
638
- startRecording("click");
594
+ ref: textareaRef,
595
+ value: message,
596
+ onChange: (e) => {
597
+ if (isControlled && onChange) {
598
+ onChange(e);
599
+ } else {
600
+ setInternalMessage(e.target.value);
639
601
  }
640
602
  },
641
- disabled: isInputDisabled || voiceTrigger === "space",
642
- className: `w-full py-2 text-center font-medium rounded-md transition-all select-none flex items-center justify-center gap-2 ${voiceTrigger ? "bg-red-50 text-red-600 animate-pulse border border-red-200" : "bg-gray-50 text-gray-700 hover:bg-gray-100"} ${voiceTrigger === "space" ? "opacity-90 cursor-default" : ""}`,
643
- children: voiceTrigger ? /* @__PURE__ */ jsxs3(Fragment, { children: [
644
- /* @__PURE__ */ jsx5("div", { className: "w-2 h-2 rounded-full bg-red-500 animate-ping mr-2" }),
645
- /* @__PURE__ */ jsxs3("span", { children: [
646
- "Listening... ",
647
- voiceTrigger === "space" ? "(Release Space to send)" : "Tap to send"
648
- ] })
649
- ] }) : /* @__PURE__ */ jsx5("span", { children: "Tap to Talk" })
603
+ onKeyDown: handleKeyDown,
604
+ onFocus: () => setIsFocused(true),
605
+ onBlur: () => setIsFocused(false),
606
+ placeholder: getPlaceholder(),
607
+ disabled: isInputDisabled,
608
+ readOnly: !!voiceTrigger,
609
+ rows: 1,
610
+ className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 resize-none leading-6 w-full ${isInputDisabled ? "bg-gray-100 cursor-not-allowed" : "bg-transparent"} ${voiceTrigger ? "cursor-default" : ""}`
650
611
  }
651
- ) }),
652
- (inputMode === "text" || isSending) && /* @__PURE__ */ jsxs3("div", { className: "relative mx-2 flex-shrink-0", children: [
612
+ ),
613
+ /* @__PURE__ */ jsxs3("div", { className: "relative mx-2 flex-shrink-0", children: [
653
614
  isSending && /* @__PURE__ */ jsx5("div", { className: "absolute -inset-1", children: /* @__PURE__ */ jsxs3(
654
615
  "svg",
655
616
  {
@@ -692,24 +653,10 @@ var ChatInputArea = forwardRef(({
692
653
  handleSubmit();
693
654
  }
694
655
  },
695
- disabled: (currentTask == null ? void 0 : currentTask.complete) || isSending && !onStop || isInputDisabled || !!voiceTrigger,
656
+ disabled: (currentTask == null ? void 0 : currentTask.complete) || isSending && !onStop || isInputDisabled,
696
657
  className: `relative z-10 text-white rounded-full p-2 transition-colors duration-200 disabled:bg-gray-400 disabled:cursor-not-allowed ${isSending && onStop ? "bg-red-500 hover:bg-red-600" : "bg-blue-600 hover:bg-blue-700"}`,
697
658
  title: isSending && onStop ? "Stop generating" : "Send message",
698
- children: isSending ? onStop ? /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" }) : (
699
- // AND we show the overlay spinner outside?
700
- // Actually `ChatInput.tsx` lines 117-140 are `isLoading && (...)`. It is always shown when loading.
701
- // So we have a spinner ring AROUND the button (absolute -inset-1).
702
- // AND potentially a spinner INSIDE the button if no onStop?
703
- // In my case, I will stick to:
704
- // If onStop: Show StopIcon. Button is Red.
705
- // If !onStop: Show Spinner inside? Or just let the outer ring do the work?
706
- // Legacy `Spinner` component usage inside button suggests double spinner if we are not careful.
707
- // But usually `onStop` is provided for streaming.
708
- // If I look at the screenshot, it shows a RED button (with stop icon) and a BLUE ring around it.
709
- // That matches: Red button (bg-red-500) + Blue Spinner Ring (text-blue-500).
710
- // So I will replicate that structure.
711
- /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" })
712
- ) : /* @__PURE__ */ jsx5(PaperAirplaneIcon, { className: "h-5 w-5" })
659
+ children: isSending ? onStop ? /* @__PURE__ */ jsx5(StopIcon, { className: "h-5 w-5" }) : /* @__PURE__ */ jsx5("div", { className: "w-5 h-5" }) : /* @__PURE__ */ jsx5(PaperAirplaneIcon, { className: "h-5 w-5" })
713
660
  }
714
661
  )
715
662
  ] })
@@ -718,18 +665,7 @@ var ChatInputArea = forwardRef(({
718
665
  )
719
666
  ] }),
720
667
  inputHint && /* @__PURE__ */ jsx5("div", { className: "text-sm text-red-500 bg-red-50 py-1 px-4 rounded-lg mt-1", children: inputHint }),
721
- hintText && inputMode === "text" && /* @__PURE__ */ jsx5("p", { className: "text-xs text-gray-500 ml-12 mb-2 mt-1", children: hintText }),
722
- inputMode === "voice" && !voiceTrigger && /* @__PURE__ */ jsx5(
723
- "p",
724
- {
725
- className: "text-[10px] text-gray-400 font-medium ml-12 text-center -mt-1 mb-1 cursor-pointer hover:text-gray-600 transition-colors",
726
- onClick: () => {
727
- var _a2;
728
- return (_a2 = voiceContainerRef.current) == null ? void 0 : _a2.focus();
729
- },
730
- children: isFocused ? "Click to talk or hold space to talk" : "Tap to talk or click here to focus and push space to talk"
731
- }
732
- )
668
+ /* @__PURE__ */ jsx5("div", { className: "mb-2 mt-0.5 min-h-[0.75rem]", style: { marginLeft: "48px" }, children: /* @__PURE__ */ jsx5("p", { className: `text-[10px] leading-tight transition-colors duration-200 ${voiceTrigger ? "text-orange-600 font-medium" : "text-gray-400"}`, children: voiceTrigger ? "Listening... tap mic icon again to stop" : hintText || (voiceConfig ? "Type in text or tap mic icon to talk" : "Type your message...") }) })
733
669
  ] });
734
670
  });
735
671
  ChatInputArea.displayName = "ChatInputArea";