@contentgrowth/llm-service 0.9.96 → 0.9.97

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -599,7 +599,7 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
599
599
  if (typeof arg === "object") return JSON.stringify(arg);
600
600
  return String(arg);
601
601
  }).join(" ");
602
- setLogs((prev) => [`[${formatTime()}] [${type}] ${msg}`, ...prev].slice(0, 50));
602
+ setLogs((prev) => [...prev, `[${formatTime()}] [${type}] ${msg}`].slice(-50));
603
603
  } catch (e) {
604
604
  }
605
605
  };
@@ -763,63 +763,50 @@ var ChatInputArea = (0, import_react5.forwardRef)(({
763
763
  return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) || "ontouchstart" in window || navigator.maxTouchPoints > 0;
764
764
  }, []);
765
765
  const startRecording = async (trigger) => {
766
+ var _a2, _b2;
766
767
  console.log("[ChatInputArea] startRecording called. trigger:", trigger, "isMobile:", isMobile());
767
768
  console.log("[ChatInputArea] Current state - voiceTrigger:", voiceTrigger, "isTranscribing:", isTranscribing);
768
769
  if (voiceTrigger || isTranscribing) {
769
770
  console.log("[ChatInputArea] startRecording ignored - already active");
770
771
  return;
771
772
  }
772
- const performStart = async () => {
773
- var _a2, _b2;
774
- setVoiceTrigger(trigger);
775
- setVoiceError(null);
776
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
777
- console.log("[ChatInputArea] Using native speech recognition");
778
- if (!nativeSpeech.isSupported) {
779
- console.error("[ChatInputArea] Native speech not supported");
780
- alert("Speech recognition is not supported in this browser.");
781
- setVoiceTrigger(null);
782
- return;
783
- }
784
- console.log("[ChatInputArea] Calling nativeSpeech.start()...");
785
- nativeSpeech.start();
786
- console.log("[ChatInputArea] nativeSpeech.start() called");
787
- console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (after nativeSpeech.start)...");
788
- try {
789
- (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
790
- console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
791
- } catch (e) {
792
- console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
793
- }
794
- } else {
795
- console.log("[ChatInputArea] Using custom recorder");
796
- console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (custom mode)...");
797
- try {
798
- (_b2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _b2.call(voiceConfig);
799
- console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
800
- } catch (e) {
801
- console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
802
- }
803
- await customRecorder.start();
804
- console.log("[ChatInputArea] Custom recorder started");
773
+ setVoiceTrigger(trigger);
774
+ setVoiceError(null);
775
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
776
+ console.log("[ChatInputArea] Using native speech recognition");
777
+ if (!nativeSpeech.isSupported) {
778
+ console.error("[ChatInputArea] Native speech not supported");
779
+ alert("Speech recognition is not supported in this browser.");
780
+ setVoiceTrigger(null);
781
+ return;
805
782
  }
806
- };
807
- if (isMobile()) {
808
- console.log("[ChatInputArea] SKIPPING textarea focus on mobile to prevent keyboard conflict");
809
- if (document.activeElement instanceof HTMLElement) {
810
- console.log("[ChatInputArea] Blur active element on mobile BEFORE start");
811
- document.activeElement.blur();
783
+ console.log("[ChatInputArea] Calling nativeSpeech.start()...");
784
+ nativeSpeech.start();
785
+ console.log("[ChatInputArea] nativeSpeech.start() called");
786
+ console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists...");
787
+ try {
788
+ (_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
789
+ console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
790
+ } catch (e) {
791
+ console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
812
792
  }
813
- console.log("[ChatInputArea] Mobile: Waiting 300ms before starting recognition...");
814
- setTimeout(() => {
815
- performStart();
816
- }, 300);
817
793
  } else {
818
- await performStart();
794
+ console.log("[ChatInputArea] Using custom recorder");
795
+ console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists (custom mode)...");
796
+ try {
797
+ (_b2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _b2.call(voiceConfig);
798
+ console.log("[ChatInputArea] voiceConfig.onVoiceStart completed");
799
+ } catch (e) {
800
+ console.error("[ChatInputArea] voiceConfig.onVoiceStart threw error", e);
801
+ }
802
+ await customRecorder.start();
803
+ console.log("[ChatInputArea] Custom recorder started");
804
+ }
805
+ if (!isMobile()) {
819
806
  console.log("[ChatInputArea] Re-focusing textarea (desktop only)");
820
807
  setTimeout(() => {
821
- var _a2;
822
- return (_a2 = textareaRef.current) == null ? void 0 : _a2.focus();
808
+ var _a3;
809
+ return (_a3 = textareaRef.current) == null ? void 0 : _a3.focus();
823
810
  }, 0);
824
811
  }
825
812
  };
@@ -1044,7 +1031,7 @@ var TapToTalk = ({
1044
1031
  if (typeof arg === "object") return JSON.stringify(arg);
1045
1032
  return String(arg);
1046
1033
  }).join(" ");
1047
- setLogs((prev) => [`[${formatTime()}] [${type}] ${msg}`, ...prev].slice(0, 50));
1034
+ setLogs((prev) => [...prev, `[${formatTime()}] [${type}] ${msg}`].slice(-50));
1048
1035
  } catch (e) {
1049
1036
  }
1050
1037
  };
@@ -1164,47 +1151,33 @@ var TapToTalk = ({
1164
1151
  } else {
1165
1152
  console.log("[TapToTalk] Starting voice... mode:", voiceConfig == null ? void 0 : voiceConfig.mode);
1166
1153
  setErrorMsg(null);
1167
- const performStart = async () => {
1168
- setVoiceTrigger("click");
1169
- console.log("[TapToTalk] voiceTrigger set to click");
1170
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
1171
- console.log("[TapToTalk] Starting custom recorder...");
1172
- try {
1173
- await customRecorder.start();
1174
- console.log("[TapToTalk] Custom recorder started successfully");
1175
- } catch (e2) {
1176
- console.error("[TapToTalk] Custom recorder failed:", e2);
1177
- setErrorMsg("Mic access denied");
1178
- setVoiceTrigger(null);
1179
- }
1180
- } else {
1181
- console.log("[TapToTalk] Starting native speech recognition...");
1182
- if (!nativeSpeech.isSupported) {
1183
- console.error("[TapToTalk] Native speech not supported");
1184
- setErrorMsg("Speech not supported");
1185
- setVoiceTrigger(null);
1186
- return;
1187
- }
1188
- console.log("[TapToTalk] Calling nativeSpeech.start()...");
1189
- nativeSpeech.start();
1190
- console.log("[TapToTalk] nativeSpeech.start() called");
1191
- }
1192
- };
1193
- if (isMobile()) {
1194
- console.log("[TapToTalk] Mobile: Blurring active element and skipping onFocusTarget");
1195
- if (document.activeElement instanceof HTMLElement) {
1196
- document.activeElement.blur();
1154
+ setVoiceTrigger("click");
1155
+ console.log("[TapToTalk] voiceTrigger set to click");
1156
+ if (!isMobile() && onFocusTarget) {
1157
+ console.log("[TapToTalk] Desktop: calling onFocusTarget()");
1158
+ onFocusTarget();
1159
+ }
1160
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
1161
+ console.log("[TapToTalk] Starting custom recorder...");
1162
+ try {
1163
+ await customRecorder.start();
1164
+ console.log("[TapToTalk] Custom recorder started successfully");
1165
+ } catch (e2) {
1166
+ console.error("[TapToTalk] Custom recorder failed:", e2);
1167
+ setErrorMsg("Mic access denied");
1168
+ setVoiceTrigger(null);
1197
1169
  }
1198
- console.log("[TapToTalk] Mobile: Waiting 300ms before starting recognition...");
1199
- setTimeout(() => {
1200
- performStart();
1201
- }, 300);
1202
1170
  } else {
1203
- if (onFocusTarget) {
1204
- console.log("[TapToTalk] Desktop: calling onFocusTarget()");
1205
- onFocusTarget();
1171
+ console.log("[TapToTalk] Starting native speech recognition...");
1172
+ if (!nativeSpeech.isSupported) {
1173
+ console.error("[TapToTalk] Native speech not supported");
1174
+ setErrorMsg("Speech not supported");
1175
+ setVoiceTrigger(null);
1176
+ return;
1206
1177
  }
1207
- performStart();
1178
+ console.log("[TapToTalk] Calling nativeSpeech.start()...");
1179
+ nativeSpeech.start();
1180
+ console.log("[TapToTalk] nativeSpeech.start() called");
1208
1181
  }
1209
1182
  }
1210
1183
  } finally {