@contentgrowth/llm-service 0.9.95 → 0.9.96

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -546,6 +546,10 @@ var ChatInputArea = forwardRef(({
546
546
  const originalLog = console.log;
547
547
  const originalWarn = console.warn;
548
548
  const originalError = console.error;
549
+ const formatTime = () => {
550
+ const now = /* @__PURE__ */ new Date();
551
+ return `${now.getHours().toString().padStart(2, "0")}:${now.getMinutes().toString().padStart(2, "0")}:${now.getSeconds().toString().padStart(2, "0")}.${now.getMilliseconds().toString().padStart(3, "0")}`;
552
+ };
549
553
  const addLog = (type, args) => {
550
554
  try {
551
555
  const msg = args.map((arg) => {
@@ -553,7 +557,7 @@ var ChatInputArea = forwardRef(({
553
557
  if (typeof arg === "object") return JSON.stringify(arg);
554
558
  return String(arg);
555
559
  }).join(" ");
556
- setLogs((prev) => [`[${type}] ${msg}`, ...prev].slice(0, 50));
560
+ setLogs((prev) => [`[${formatTime()}] [${type}] ${msg}`, ...prev].slice(0, 50));
557
561
  } catch (e) {
558
562
  }
559
563
  };
@@ -717,57 +721,63 @@ var ChatInputArea = forwardRef(({
717
721
  return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) || "ontouchstart" in window || navigator.maxTouchPoints > 0;
718
722
  }, []);
719
723
  const startRecording = async (trigger) => {
720
- var _a2, _b2;
721
724
  console.log("[ChatInputArea] startRecording called. trigger:", trigger, "isMobile:", isMobile());
722
725
  console.log("[ChatInputArea] Current state - voiceTrigger:", voiceTrigger, "isTranscribing:", isTranscribing);
723
- if (isMobile()) {
724
- console.log("[ChatInputArea] SKIPPING textarea focus on mobile to prevent keyboard conflict");
725
- if (document.activeElement instanceof HTMLElement) {
726
- console.log("[ChatInputArea] Blur active element on mobile BEFORE start");
727
- document.activeElement.blur();
728
- }
729
- }
730
726
  if (voiceTrigger || isTranscribing) {
731
727
  console.log("[ChatInputArea] startRecording ignored - already active");
732
728
  return;
733
729
  }
734
- setVoiceTrigger(trigger);
735
- setVoiceError(null);
736
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
737
- console.log("[ChatInputArea] Using native speech recognition");
738
- if (!nativeSpeech.isSupported) {
739
- console.error("[ChatInputArea] Native speech not supported");
740
- alert("Speech recognition is not supported in this browser.");
741
- setVoiceTrigger(null);
742
- return;
730
+ const performStart = async () => {
731
+ var _a2, _b2;
732
+ setVoiceTrigger(trigger);
733
+ setVoiceError(null);
734
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
735
+ console.log("[ChatInputArea] Using native speech recognition");
736
+ if (!nativeSpeech.isSupported) {
737
+ console.error("[ChatInputArea] Native speech not supported");
738
+ alert("Speech recognition is not supported in this browser.");
739
+ setVoiceTrigger(null);
740
+ return;
741
+ }
742
+ console.log("[ChatInputArea] Calling nativeSpeech.start()...");
743
+ nativeSpeech.start();
744
+ console.log("[ChatInputArea] nativeSpeech.start() called");
745
+ console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (after nativeSpeech.start)...");
746
+ try {
747
+ (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
748
+ console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
749
+ } catch (e) {
750
+ console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
751
+ }
752
+ } else {
753
+ console.log("[ChatInputArea] Using custom recorder");
754
+ console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (custom mode)...");
755
+ try {
756
+ (_b2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _b2.call(voiceConfig);
757
+ console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
758
+ } catch (e) {
759
+ console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
760
+ }
761
+ await customRecorder.start();
762
+ console.log("[ChatInputArea] Custom recorder started");
743
763
  }
744
- console.log("[ChatInputArea] Calling nativeSpeech.start()...");
745
- nativeSpeech.start();
746
- console.log("[ChatInputArea] nativeSpeech.start() called");
747
- console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (after nativeSpeech.start)...");
748
- try {
749
- (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
750
- console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
751
- } catch (e) {
752
- console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
764
+ };
765
+ if (isMobile()) {
766
+ console.log("[ChatInputArea] SKIPPING textarea focus on mobile to prevent keyboard conflict");
767
+ if (document.activeElement instanceof HTMLElement) {
768
+ console.log("[ChatInputArea] Blur active element on mobile BEFORE start");
769
+ document.activeElement.blur();
753
770
  }
771
+ console.log("[ChatInputArea] Mobile: Waiting 300ms before starting recognition...");
772
+ setTimeout(() => {
773
+ performStart();
774
+ }, 300);
754
775
  } else {
755
- console.log("[ChatInputArea] Using custom recorder");
756
- console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (custom mode)...");
757
- try {
758
- (_b2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _b2.call(voiceConfig);
759
- console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
760
- } catch (e) {
761
- console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
762
- }
763
- await customRecorder.start();
764
- console.log("[ChatInputArea] Custom recorder started");
765
- }
766
- if (!isMobile()) {
776
+ await performStart();
767
777
  console.log("[ChatInputArea] Re-focusing textarea (desktop only)");
768
778
  setTimeout(() => {
769
- var _a3;
770
- return (_a3 = textareaRef.current) == null ? void 0 : _a3.focus();
779
+ var _a2;
780
+ return (_a2 = textareaRef.current) == null ? void 0 : _a2.focus();
771
781
  }, 0);
772
782
  }
773
783
  };
@@ -981,6 +991,10 @@ var TapToTalk = ({
981
991
  const originalLog = console.log;
982
992
  const originalWarn = console.warn;
983
993
  const originalError = console.error;
994
+ const formatTime = () => {
995
+ const now = /* @__PURE__ */ new Date();
996
+ return `${now.getHours().toString().padStart(2, "0")}:${now.getMinutes().toString().padStart(2, "0")}:${now.getSeconds().toString().padStart(2, "0")}.${now.getMilliseconds().toString().padStart(3, "0")}`;
997
+ };
984
998
  const addLog = (type, args) => {
985
999
  try {
986
1000
  const msg = args.map((arg) => {
@@ -988,7 +1002,7 @@ var TapToTalk = ({
988
1002
  if (typeof arg === "object") return JSON.stringify(arg);
989
1003
  return String(arg);
990
1004
  }).join(" ");
991
- setLogs((prev) => [`[${type}] ${msg}`, ...prev].slice(0, 50));
1005
+ setLogs((prev) => [`[${formatTime()}] [${type}] ${msg}`, ...prev].slice(0, 50));
992
1006
  } catch (e) {
993
1007
  }
994
1008
  };
@@ -1108,38 +1122,47 @@ var TapToTalk = ({
1108
1122
  } else {
1109
1123
  console.log("[TapToTalk] Starting voice... mode:", voiceConfig == null ? void 0 : voiceConfig.mode);
1110
1124
  setErrorMsg(null);
1125
+ const performStart = async () => {
1126
+ setVoiceTrigger("click");
1127
+ console.log("[TapToTalk] voiceTrigger set to click");
1128
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
1129
+ console.log("[TapToTalk] Starting custom recorder...");
1130
+ try {
1131
+ await customRecorder.start();
1132
+ console.log("[TapToTalk] Custom recorder started successfully");
1133
+ } catch (e2) {
1134
+ console.error("[TapToTalk] Custom recorder failed:", e2);
1135
+ setErrorMsg("Mic access denied");
1136
+ setVoiceTrigger(null);
1137
+ }
1138
+ } else {
1139
+ console.log("[TapToTalk] Starting native speech recognition...");
1140
+ if (!nativeSpeech.isSupported) {
1141
+ console.error("[TapToTalk] Native speech not supported");
1142
+ setErrorMsg("Speech not supported");
1143
+ setVoiceTrigger(null);
1144
+ return;
1145
+ }
1146
+ console.log("[TapToTalk] Calling nativeSpeech.start()...");
1147
+ nativeSpeech.start();
1148
+ console.log("[TapToTalk] nativeSpeech.start() called");
1149
+ }
1150
+ };
1111
1151
  if (isMobile()) {
1112
1152
  console.log("[TapToTalk] Mobile: Blurring active element and skipping onFocusTarget");
1113
1153
  if (document.activeElement instanceof HTMLElement) {
1114
1154
  document.activeElement.blur();
1115
1155
  }
1116
- } else if (onFocusTarget) {
1117
- console.log("[TapToTalk] Desktop: calling onFocusTarget()");
1118
- onFocusTarget();
1119
- }
1120
- setVoiceTrigger("click");
1121
- console.log("[TapToTalk] voiceTrigger set to click");
1122
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
1123
- console.log("[TapToTalk] Starting custom recorder...");
1124
- try {
1125
- await customRecorder.start();
1126
- console.log("[TapToTalk] Custom recorder started successfully");
1127
- } catch (e2) {
1128
- console.error("[TapToTalk] Custom recorder failed:", e2);
1129
- setErrorMsg("Mic access denied");
1130
- setVoiceTrigger(null);
1131
- }
1156
+ console.log("[TapToTalk] Mobile: Waiting 300ms before starting recognition...");
1157
+ setTimeout(() => {
1158
+ performStart();
1159
+ }, 300);
1132
1160
  } else {
1133
- console.log("[TapToTalk] Starting native speech recognition...");
1134
- if (!nativeSpeech.isSupported) {
1135
- console.error("[TapToTalk] Native speech not supported");
1136
- setErrorMsg("Speech not supported");
1137
- setVoiceTrigger(null);
1138
- return;
1161
+ if (onFocusTarget) {
1162
+ console.log("[TapToTalk] Desktop: calling onFocusTarget()");
1163
+ onFocusTarget();
1139
1164
  }
1140
- console.log("[TapToTalk] Calling nativeSpeech.start()...");
1141
- nativeSpeech.start();
1142
- console.log("[TapToTalk] nativeSpeech.start() called");
1165
+ performStart();
1143
1166
  }
1144
1167
  }
1145
1168
  } finally {