@contentgrowth/llm-service 0.9.0 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -98,7 +98,7 @@ var MessageBubble = ({
98
98
  )
99
99
  ] }) });
100
100
  }
101
- return /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)("div", { className: `flex items-start gap-3 my-4 ${isUser ? "justify-end" : "justify-start"}`, children: [
101
+ return /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)("div", { className: `flex items-start gap-3 my-1 ${isUser ? "justify-end" : "justify-start"}`, children: [
102
102
  !isUser && /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("div", { className: "flex-shrink-0 h-8 w-8 rounded-full bg-blue-500 flex items-center justify-center text-white", children: /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(import_solid.SparklesIcon, { className: "h-5 w-5" }) }),
103
103
  /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)(
104
104
  "div",
@@ -425,6 +425,8 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
425
425
  var _a, _b, _c, _d;
426
426
  const [internalMessage, setInternalMessage] = (0, import_react5.useState)("");
427
427
  const [voiceTrigger, setVoiceTrigger] = (0, import_react5.useState)(null);
428
+ const [isTranscribing, setIsTranscribing] = (0, import_react5.useState)(false);
429
+ const [voiceError, setVoiceError] = (0, import_react5.useState)(null);
428
430
  const [isFocused, setIsFocused] = (0, import_react5.useState)(false);
429
431
  const textareaRef = (0, import_react5.useRef)(null);
430
432
  const measurementRef = (0, import_react5.useRef)(null);
@@ -453,6 +455,7 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
453
455
  voiceConfigRef.current = voiceConfig;
454
456
  }, [voiceConfig]);
455
457
  const triggerChange = (0, import_react5.useCallback)((newValue) => {
458
+ setVoiceError(null);
456
459
  if (isControlled && onChangeRef.current) {
457
460
  const syntheticEvent = {
458
461
  target: { value: newValue },
@@ -497,10 +500,14 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
497
500
  const customRecorder = useAudioRecorder(async (blob) => {
498
501
  var _a2, _b2, _c2;
499
502
  setVoiceTrigger(null);
503
+ setIsTranscribing(true);
504
+ setVoiceError(null);
500
505
  (_b2 = (_a2 = voiceConfigRef.current) == null ? void 0 : _a2.onVoiceEnd) == null ? void 0 : _b2.call(_a2);
501
506
  if (blob.type === "audio/simulated") {
502
507
  console.log("[ChatInputArea] Handling simulated audio capture");
508
+ await new Promise((resolve) => setTimeout(resolve, 1500));
503
509
  insertTextAtCursor("This is a simulated transcription for development testing.");
510
+ setIsTranscribing(false);
504
511
  return;
505
512
  }
506
513
  if ((_c2 = voiceConfigRef.current) == null ? void 0 : _c2.onAudioCapture) {
@@ -509,7 +516,12 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
509
516
  if (text) insertTextAtCursor(text);
510
517
  } catch (e) {
511
518
  console.error("[ChatInputArea] Audio capture failed", e);
519
+ setVoiceError(e.message || "Transcription failed");
520
+ } finally {
521
+ setIsTranscribing(false);
512
522
  }
523
+ } else {
524
+ setIsTranscribing(false);
513
525
  }
514
526
  });
515
527
  (0, import_react5.useImperativeHandle)(ref, () => ({
@@ -546,8 +558,9 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
546
558
  };
547
559
  const startRecording = async (trigger) => {
548
560
  var _a2;
549
- if (voiceTrigger) return;
561
+ if (voiceTrigger || isTranscribing) return;
550
562
  setVoiceTrigger(trigger);
563
+ setVoiceError(null);
551
564
  (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
552
565
  if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
553
566
  if (!nativeSpeech.isSupported) {
@@ -606,13 +619,17 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
606
619
  onClick: () => {
607
620
  if (voiceTrigger) {
608
621
  stopRecording();
609
- } else {
622
+ } else if (!isTranscribing) {
610
623
  startRecording("click");
611
624
  }
612
625
  },
613
- className: `mb-1 p-2 rounded-full transition-all duration-300 flex-shrink-0 border ${voiceTrigger ? "text-white border-orange-400 bg-orange-500 scale-110 shadow-lg animate-pulse" : "text-gray-500 border-gray-300 bg-white hover:text-gray-700 hover:bg-gray-100"}`,
614
- title: voiceTrigger ? "Stop Recording" : "Start Voice Input",
615
- children: /* @__PURE__ */ (0, import_jsx_runtime5.jsx)(import_outline.MicrophoneIcon, { className: "w-5 h-5" })
626
+ className: `mb-1 p-2 rounded-full transition-all duration-300 flex-shrink-0 border ${voiceTrigger || isTranscribing ? "text-white border-orange-400 bg-orange-500 scale-110 shadow-lg" : "text-gray-500 border-gray-300 bg-white hover:text-gray-700 hover:bg-gray-100"} ${voiceTrigger ? "animate-pulse" : ""} ${isTranscribing ? "cursor-wait" : ""}`,
627
+ disabled: isTranscribing,
628
+ title: isTranscribing ? "Transcribing..." : voiceTrigger ? "Stop Recording" : "Start Voice Input",
629
+ children: isTranscribing ? /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("div", { className: "animate-spin w-5 h-5 flex items-center justify-center", children: /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("svg", { className: "w-5 h-5 text-white", viewBox: "0 0 24 24", children: [
630
+ /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("circle", { className: "opacity-25", cx: "12", cy: "12", r: "10", stroke: "currentColor", strokeWidth: "4", fill: "none" }),
631
+ /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("path", { className: "opacity-75", fill: "currentColor", d: "M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z" })
632
+ ] }) }) : /* @__PURE__ */ (0, import_jsx_runtime5.jsx)(import_outline.MicrophoneIcon, { className: "w-5 h-5" })
616
633
  }
617
634
  ),
618
635
  /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)(
@@ -642,13 +659,16 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
642
659
  }
643
660
  },
644
661
  onKeyDown: handleKeyDown,
645
- onFocus: () => setIsFocused(true),
662
+ onFocus: () => {
663
+ setIsFocused(true);
664
+ setVoiceError(null);
665
+ },
646
666
  onBlur: () => setIsFocused(false),
647
667
  placeholder: getPlaceholder(),
648
668
  disabled: isInputDisabled,
649
- readOnly: !!voiceTrigger,
669
+ readOnly: !!voiceTrigger || isTranscribing,
650
670
  rows: 1,
651
- className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 resize-none leading-6 w-full ${isInputDisabled ? "bg-gray-100 cursor-not-allowed" : "bg-transparent"} ${voiceTrigger ? "cursor-default" : ""}`
671
+ className: `flex-grow px-4 py-2 outline-none text-gray-700 placeholder-gray-500 resize-none leading-6 w-full ${isInputDisabled ? "bg-gray-100 cursor-not-allowed" : "bg-transparent"} ${voiceTrigger || isTranscribing ? "cursor-default" : ""}`
652
672
  }
653
673
  ),
654
674
  /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("div", { className: "relative mx-2 flex-shrink-0", children: [
@@ -706,7 +726,10 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
706
726
  )
707
727
  ] }),
708
728
  inputHint && /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("div", { className: "text-sm text-red-500 bg-red-50 py-1 px-4 rounded-lg mt-1", children: inputHint }),
709
- /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("div", { className: "mb-2 mt-0.5 min-h-[0.75rem]", style: { marginLeft: "48px" }, children: /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("p", { className: `text-[10px] leading-tight transition-colors duration-200 ${voiceTrigger ? "text-orange-600 font-medium" : "text-gray-400"}`, children: voiceTrigger ? "Listening... tap mic icon again to stop" : hintText || (voiceConfig ? "Type in text or tap mic icon to talk" : "Type your message...") }) })
729
+ /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("div", { className: "ml-[46px] mb-2 mt-0.5 min-h-[0.75rem]", style: { marginLeft: "48px" }, children: /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("p", { className: `text-[10px] leading-tight transition-all duration-200 ${voiceError ? "text-red-500" : voiceTrigger || isTranscribing ? "text-orange-600 font-medium" : "text-gray-400"}`, children: voiceError ? /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("span", { className: "flex items-center gap-1 font-semibold italic", children: [
730
+ "Error: ",
731
+ voiceError
732
+ ] }) : isTranscribing ? "Transcribing, please wait..." : voiceTrigger ? "Listening... tap mic icon again to stop" : hintText || (voiceConfig ? "Type in text or tap mic icon to talk" : "Type your message...") }) })
710
733
  ] });
711
734
  });
712
735
  ChatInputArea.displayName = "ChatInputArea";
@@ -1229,7 +1252,7 @@ var ChatMessageList = ({
1229
1252
  "div",
1230
1253
  {
1231
1254
  ref: chatContainerRef,
1232
- className: "flex-1 overflow-y-auto p-4 space-y-4 bg-gray-50",
1255
+ className: "flex-1 overflow-y-auto p-4 space-y-8 bg-gray-50",
1233
1256
  children: [
1234
1257
  chatHistory.length === 0 && !isProcessing && /* @__PURE__ */ (0, import_jsx_runtime11.jsxs)("div", { className: "text-center py-8", children: [
1235
1258
  /* @__PURE__ */ (0, import_jsx_runtime11.jsx)("h3", { className: "text-lg font-medium text-gray-700 mb-2", children: "How can I help you today?" }),
@@ -1285,7 +1308,7 @@ var ChatMessageList = ({
1285
1308
  "div",
1286
1309
  {
1287
1310
  ref: processingIndicatorRef,
1288
- className: "flex justify-start",
1311
+ className: "flex justify-start my-4",
1289
1312
  children: /* @__PURE__ */ (0, import_jsx_runtime11.jsx)("div", { className: "bg-white text-gray-800 border border-gray-200 rounded-lg px-4 py-2 max-w-[85%]", children: /* @__PURE__ */ (0, import_jsx_runtime11.jsxs)("div", { className: "flex items-center", children: [
1290
1313
  /* @__PURE__ */ (0, import_jsx_runtime11.jsx)("span", { className: "text-sm", children: processingHint }),
1291
1314
  /* @__PURE__ */ (0, import_jsx_runtime11.jsxs)("span", { className: "ml-2 flex space-x-1", children: [