@contentgrowth/llm-service 0.9.98 → 0.9.99

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -219,7 +219,7 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
219
219
  console.log("[useSpeechRecognition] Creating NEW recognition instance within user gesture context. Timestamp:", Date.now());
220
220
  const recognition = new SpeechRecognition();
221
221
  const isMobile = /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) || "ontouchstart" in window || navigator.maxTouchPoints > 0;
222
- recognition.continuous = !isMobile;
222
+ recognition.continuous = true;
223
223
  recognition.interimResults = true;
224
224
  recognition.lang = languageRef.current;
225
225
  console.log("[useSpeechRecognition] Instance created. continuous:", recognition.continuous, "interimResults:", recognition.interimResults, "lang:", recognition.lang, "isMobile:", isMobile, "instanceId:", instanceIdRef.current);
@@ -979,7 +979,6 @@ var TapToTalk = ({
979
979
  const globalConfig = useChatConfig();
980
980
  const voiceConfig = propVoiceConfig || ((_a = globalConfig.voice) == null ? void 0 : _a.config);
981
981
  const [isTranscribing, setIsTranscribing] = useState4(false);
982
- const [voiceTrigger, setVoiceTrigger] = useState4(null);
983
982
  const [errorMsg, setErrorMsg] = useState4(null);
984
983
  const [showDebug, setShowDebug] = useState4(false);
985
984
  const [logs, setLogs] = useState4([]);
@@ -1025,14 +1024,14 @@ var TapToTalk = ({
1025
1024
  navigator.clipboard.writeText(logs.join("\n")).then(() => alert("Logs copied to clipboard")).catch((err) => console.error("Failed to copy logs", err));
1026
1025
  }, [logs]);
1027
1026
  const handleVoiceResult = useCallback4((text, isFinal) => {
1027
+ console.log("[TapToTalk] Native speech result:", text, isFinal);
1028
1028
  if (isFinal) {
1029
1029
  onResult(text);
1030
1030
  setErrorMsg(null);
1031
- setVoiceTrigger(null);
1032
1031
  }
1033
1032
  }, [onResult]);
1034
1033
  const handleVoiceEnd = useCallback4(() => {
1035
- setVoiceTrigger(null);
1034
+ console.log("[TapToTalk] Native speech ended");
1036
1035
  }, []);
1037
1036
  const nativeSpeech = useSpeechRecognition(handleVoiceResult, handleVoiceEnd, voiceConfig == null ? void 0 : voiceConfig.language);
1038
1037
  React3.useEffect(() => {
@@ -1042,7 +1041,6 @@ var TapToTalk = ({
1042
1041
  }
1043
1042
  }, [nativeSpeech.error]);
1044
1043
  const customRecorder = useAudioRecorder(async (blob) => {
1045
- setVoiceTrigger(null);
1046
1044
  setIsTranscribing(true);
1047
1045
  setErrorMsg(null);
1048
1046
  if (blob.type === "audio/simulated") {
@@ -1066,7 +1064,7 @@ var TapToTalk = ({
1066
1064
  setIsTranscribing(false);
1067
1065
  }
1068
1066
  });
1069
- const isListening = !!voiceTrigger || nativeSpeech.isListening || customRecorder.isRecording;
1067
+ const isListening = nativeSpeech.isListening || customRecorder.isRecording;
1070
1068
  const isActive = isListening || isTranscribing;
1071
1069
  const processingRef = useRef4(false);
1072
1070
  const isMobile = useCallback4(() => {
@@ -1088,7 +1086,6 @@ var TapToTalk = ({
1088
1086
  console.log("[TapToTalk] Debug trigger force-stop");
1089
1087
  if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") nativeSpeech.stop();
1090
1088
  else customRecorder.stop();
1091
- setVoiceTrigger(null);
1092
1089
  }
1093
1090
  return;
1094
1091
  }
@@ -1111,12 +1108,9 @@ var TapToTalk = ({
1111
1108
  } else {
1112
1109
  customRecorder.stop();
1113
1110
  }
1114
- setVoiceTrigger(null);
1115
1111
  } else {
1116
1112
  console.log("[TapToTalk] Starting voice... mode:", voiceConfig == null ? void 0 : voiceConfig.mode);
1117
1113
  setErrorMsg(null);
1118
- setVoiceTrigger("click");
1119
- console.log("[TapToTalk] voiceTrigger set to click");
1120
1114
  if (!isMobile() && onFocusTarget) {
1121
1115
  console.log("[TapToTalk] Desktop: calling onFocusTarget()");
1122
1116
  onFocusTarget();
@@ -1129,14 +1123,12 @@ var TapToTalk = ({
1129
1123
  } catch (e2) {
1130
1124
  console.error("[TapToTalk] Custom recorder failed:", e2);
1131
1125
  setErrorMsg("Mic access denied");
1132
- setVoiceTrigger(null);
1133
1126
  }
1134
1127
  } else {
1135
1128
  console.log("[TapToTalk] Starting native speech recognition...");
1136
1129
  if (!nativeSpeech.isSupported) {
1137
1130
  console.error("[TapToTalk] Native speech not supported");
1138
1131
  setErrorMsg("Speech not supported");
1139
- setVoiceTrigger(null);
1140
1132
  return;
1141
1133
  }
1142
1134
  console.log("[TapToTalk] Calling nativeSpeech.start()...");
@@ -1189,6 +1181,7 @@ var TapToTalk = ({
1189
1181
  {
1190
1182
  type: "button",
1191
1183
  onClick: toggleVoice,
1184
+ style: { touchAction: "manipulation" },
1192
1185
  disabled: disabled || isTranscribing && !isListening,
1193
1186
  className: `flex items-center justify-center gap-3 px-6 py-3 rounded-xl transition-all duration-300 w-full font-medium shadow-md active:scale-[0.98]
1194
1187
  ${bgColor} text-white