@contentgrowth/llm-service 0.9.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -57,7 +57,7 @@ var MessageBubble = ({
57
57
  )
58
58
  ] }) });
59
59
  }
60
- return /* @__PURE__ */ jsxs("div", { className: `flex items-start gap-3 my-4 ${isUser ? "justify-end" : "justify-start"}`, children: [
60
+ return /* @__PURE__ */ jsxs("div", { className: `flex items-start gap-3 my-1 ${isUser ? "justify-end" : "justify-start"}`, children: [
61
61
  !isUser && /* @__PURE__ */ jsx3("div", { className: "flex-shrink-0 h-8 w-8 rounded-full bg-blue-500 flex items-center justify-center text-white", children: /* @__PURE__ */ jsx3(SparklesIcon, { className: "h-5 w-5" }) }),
62
62
  /* @__PURE__ */ jsxs(
63
63
  "div",
@@ -384,6 +384,8 @@ var ChatInputArea = forwardRef(({
384
384
  var _a, _b, _c, _d;
385
385
  const [internalMessage, setInternalMessage] = useState3("");
386
386
  const [voiceTrigger, setVoiceTrigger] = useState3(null);
387
+ const [isTranscribing, setIsTranscribing] = useState3(false);
388
+ const [voiceError, setVoiceError] = useState3(null);
387
389
  const [isFocused, setIsFocused] = useState3(false);
388
390
  const textareaRef = useRef3(null);
389
391
  const measurementRef = useRef3(null);
@@ -412,6 +414,7 @@ var ChatInputArea = forwardRef(({
412
414
  voiceConfigRef.current = voiceConfig;
413
415
  }, [voiceConfig]);
414
416
  const triggerChange = useCallback3((newValue) => {
417
+ setVoiceError(null);
415
418
  if (isControlled && onChangeRef.current) {
416
419
  const syntheticEvent = {
417
420
  target: { value: newValue },
@@ -456,10 +459,14 @@ var ChatInputArea = forwardRef(({
456
459
  const customRecorder = useAudioRecorder(async (blob) => {
457
460
  var _a2, _b2, _c2;
458
461
  setVoiceTrigger(null);
462
+ setIsTranscribing(true);
463
+ setVoiceError(null);
459
464
  (_b2 = (_a2 = voiceConfigRef.current) == null ? void 0 : _a2.onVoiceEnd) == null ? void 0 : _b2.call(_a2);
460
465
  if (blob.type === "audio/simulated") {
461
466
  console.log("[ChatInputArea] Handling simulated audio capture");
467
+ await new Promise((resolve) => setTimeout(resolve, 1500));
462
468
  insertTextAtCursor("This is a simulated transcription for development testing.");
469
+ setIsTranscribing(false);
463
470
  return;
464
471
  }
465
472
  if ((_c2 = voiceConfigRef.current) == null ? void 0 : _c2.onAudioCapture) {
@@ -468,7 +475,12 @@ var ChatInputArea = forwardRef(({
468
475
  if (text) insertTextAtCursor(text);
469
476
  } catch (e) {
470
477
  console.error("[ChatInputArea] Audio capture failed", e);
478
+ setVoiceError(e.message || "Transcription failed");
479
+ } finally {
480
+ setIsTranscribing(false);
471
481
  }
482
+ } else {
483
+ setIsTranscribing(false);
472
484
  }
473
485
  });
474
486
  useImperativeHandle(ref, () => ({
@@ -505,8 +517,9 @@ var ChatInputArea = forwardRef(({
505
517
  };
506
518
  const startRecording = async (trigger) => {
507
519
  var _a2;
508
- if (voiceTrigger) return;
520
+ if (voiceTrigger || isTranscribing) return;
509
521
  setVoiceTrigger(trigger);
522
+ setVoiceError(null);
510
523
  (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
511
524
  if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
512
525
  if (!nativeSpeech.isSupported) {
@@ -601,15 +614,22 @@ var ChatInputArea = forwardRef(({
601
614
  }
602
615
  },
603
616
  onKeyDown: handleKeyDown,
604
- onFocus: () => setIsFocused(true),
617
+ onFocus: () => {
618
+ setIsFocused(true);
619
+ setVoiceError(null);
620
+ },
605
621
  onBlur: () => setIsFocused(false),
606
622
  placeholder: getPlaceholder(),
607
623
  disabled: isInputDisabled,
608
- readOnly: !!voiceTrigger,
624
+ readOnly: !!voiceTrigger || isTranscribing,
609
625
  rows: 1,
610
- className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 resize-none leading-6 w-full ${isInputDisabled ? "bg-gray-100 cursor-not-allowed" : "bg-transparent"} ${voiceTrigger ? "cursor-default" : ""}`
626
+ className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 resize-none leading-6 w-full ${isInputDisabled ? "bg-gray-100 cursor-not-allowed" : "bg-transparent"} ${voiceTrigger || isTranscribing ? "cursor-default" : ""}`
611
627
  }
612
628
  ),
629
+ isTranscribing && /* @__PURE__ */ jsx5("div", { className: "flex-shrink-0 animate-spin mr-2", children: /* @__PURE__ */ jsxs3("svg", { className: "w-4 h-4 text-orange-500", viewBox: "0 0 24 24", children: [
630
+ /* @__PURE__ */ jsx5("circle", { className: "opacity-25", cx: "12", cy: "12", r: "10", stroke: "currentColor", strokeWidth: "4", fill: "none" }),
631
+ /* @__PURE__ */ jsx5("path", { className: "opacity-75", fill: "currentColor", d: "M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z" })
632
+ ] }) }),
613
633
  /* @__PURE__ */ jsxs3("div", { className: "relative mx-2 flex-shrink-0", children: [
614
634
  isSending && /* @__PURE__ */ jsx5("div", { className: "absolute -inset-1", children: /* @__PURE__ */ jsxs3(
615
635
  "svg",
@@ -665,7 +685,10 @@ var ChatInputArea = forwardRef(({
665
685
  )
666
686
  ] }),
667
687
  inputHint && /* @__PURE__ */ jsx5("div", { className: "text-sm text-red-500 bg-red-50 py-1 px-4 rounded-lg mt-1", children: inputHint }),
668
- /* @__PURE__ */ jsx5("div", { className: "mb-2 mt-0.5 min-h-[0.75rem]", style: { marginLeft: "48px" }, children: /* @__PURE__ */ jsx5("p", { className: `text-[10px] leading-tight transition-colors duration-200 ${voiceTrigger ? "text-orange-600 font-medium" : "text-gray-400"}`, children: voiceTrigger ? "Listening... tap mic icon again to stop" : hintText || (voiceConfig ? "Type in text or tap mic icon to talk" : "Type your message...") }) })
688
+ /* @__PURE__ */ jsx5("div", { className: "ml-[46px] mb-2 mt-0.5 min-h-[0.75rem]", style: { marginLeft: "48px" }, children: /* @__PURE__ */ jsx5("p", { className: `text-[10px] leading-tight transition-all duration-200 ${voiceError ? "text-red-500" : voiceTrigger || isTranscribing ? "text-orange-600 font-medium" : "text-gray-400"}`, children: voiceError ? /* @__PURE__ */ jsxs3("span", { className: "flex items-center gap-1 font-semibold italic", children: [
689
+ "Error: ",
690
+ voiceError
691
+ ] }) : isTranscribing ? "Transcribing, please wait..." : voiceTrigger ? "Listening... tap mic icon again to stop" : hintText || (voiceConfig ? "Type in text or tap mic icon to talk" : "Type your message...") }) })
669
692
  ] });
670
693
  });
671
694
  ChatInputArea.displayName = "ChatInputArea";
@@ -1188,7 +1211,7 @@ var ChatMessageList = ({
1188
1211
  "div",
1189
1212
  {
1190
1213
  ref: chatContainerRef,
1191
- className: "flex-1 overflow-y-auto p-4 space-y-4 bg-gray-50",
1214
+ className: "flex-1 overflow-y-auto p-4 space-y-8 bg-gray-50",
1192
1215
  children: [
1193
1216
  chatHistory.length === 0 && !isProcessing && /* @__PURE__ */ jsxs8("div", { className: "text-center py-8", children: [
1194
1217
  /* @__PURE__ */ jsx11("h3", { className: "text-lg font-medium text-gray-700 mb-2", children: "How can I help you today?" }),
@@ -1244,7 +1267,7 @@ var ChatMessageList = ({
1244
1267
  "div",
1245
1268
  {
1246
1269
  ref: processingIndicatorRef,
1247
- className: "flex justify-start",
1270
+ className: "flex justify-start my-4",
1248
1271
  children: /* @__PURE__ */ jsx11("div", { className: "bg-white text-gray-800 border border-gray-200 rounded-lg px-4 py-2 max-w-[85%]", children: /* @__PURE__ */ jsxs8("div", { className: "flex items-center", children: [
1249
1272
  /* @__PURE__ */ jsx11("span", { className: "text-sm", children: processingHint }),
1250
1273
  /* @__PURE__ */ jsxs8("span", { className: "ml-2 flex space-x-1", children: [