@contentgrowth/llm-service 0.9.8 → 0.9.91

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -167,6 +167,7 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
167
167
  onResultRef.current = onResult;
168
168
  onEndRef.current = onEnd;
169
169
  }, [onResult, onEnd]);
170
+ const isStartingRef = useRef(false);
170
171
  useEffect(() => {
171
172
  if (typeof window !== "undefined") {
172
173
  const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
@@ -177,11 +178,13 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
177
178
  recognition.interimResults = true;
178
179
  recognition.onstart = () => {
179
180
  console.log("[useSpeechRecognition] Native onstart event fired");
181
+ isStartingRef.current = false;
180
182
  setIsListening(true);
181
183
  setError(null);
182
184
  };
183
185
  recognition.onend = () => {
184
186
  console.log("[useSpeechRecognition] Native onend event fired");
187
+ isStartingRef.current = false;
185
188
  if (isSimulatingRef.current) {
186
189
  return;
187
190
  }
@@ -205,6 +208,7 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
205
208
  };
206
209
  recognition.onerror = (event) => {
207
210
  console.error("[useSpeechRecognition] Native onerror event:", event.error);
211
+ isStartingRef.current = false;
208
212
  if (event.error === "not-allowed" && process.env.NODE_ENV === "development") {
209
213
  console.warn("Speech recognition blocked. Simulating input for development...");
210
214
  isSimulatingRef.current = true;
@@ -246,20 +250,36 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
246
250
  }
247
251
  }, [language]);
248
252
  const start = useCallback(() => {
249
- console.log("[useSpeechRecognition] start() called");
250
- if (isSimulatingRef.current) return;
253
+ console.log("[useSpeechRecognition] start() called. isListening:", isListening, "isStarting:", isStartingRef.current, "hasInstance:", !!recognitionRef.current);
254
+ if (isSimulatingRef.current) {
255
+ console.log("[useSpeechRecognition] isSimulating, ignoring start");
256
+ return;
257
+ }
251
258
  if (!recognitionRef.current) {
252
259
  console.error("[useSpeechRecognition] Recognition instance missing");
253
260
  return;
254
261
  }
262
+ if (isStartingRef.current) {
263
+ console.warn("[useSpeechRecognition] Already starting - ignoring duplicate call");
264
+ return;
265
+ }
266
+ if (recognitionRef.current.isListening) {
267
+ console.warn("[useSpeechRecognition] Already listening (native prop) - ignoring");
268
+ }
269
+ if (isListening) {
270
+ console.warn("[useSpeechRecognition] App state says already listening - ignoring");
271
+ return;
272
+ }
255
273
  try {
256
274
  setTranscript("");
275
+ isStartingRef.current = true;
257
276
  recognitionRef.current.start();
258
- console.log("[useSpeechRecognition] recognition.start() executed");
277
+ console.log("[useSpeechRecognition] recognition.start() executed successfully");
259
278
  } catch (error2) {
279
+ isStartingRef.current = false;
260
280
  console.error("[useSpeechRecognition] Failed to start recognition:", error2);
261
281
  }
262
- }, []);
282
+ }, [isListening]);
263
283
  const stop = useCallback(() => {
264
284
  console.log("[useSpeechRecognition] stop() called");
265
285
  if (isSimulatingRef.current) {
@@ -888,58 +908,80 @@ var TapToTalk = ({
888
908
  });
889
909
  const isListening = !!voiceTrigger || nativeSpeech.isListening || customRecorder.isRecording;
890
910
  const isActive = isListening || isTranscribing;
891
- const toggleVoice = async () => {
892
- console.log("[TapToTalk] toggleVoice called. isActive:", isActive);
893
- const now = Date.now();
894
- if (now - tapCountRef.current.lastTap < 500) {
895
- tapCountRef.current.count++;
896
- } else {
897
- tapCountRef.current.count = 1;
911
+ const processingRef = useRef4(false);
912
+ const toggleVoice = async (e) => {
913
+ if (e) {
914
+ e.preventDefault();
915
+ e.stopPropagation();
898
916
  }
899
- tapCountRef.current.lastTap = now;
900
- if (tapCountRef.current.count >= 5) {
901
- setShowDebug((prev) => !prev);
902
- tapCountRef.current.count = 0;
903
- if (isActive) {
904
- console.log("[TapToTalk] Debug trigger force-stop");
905
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") nativeSpeech.stop();
906
- else customRecorder.stop();
907
- setVoiceTrigger(null);
908
- }
917
+ console.trace("[TapToTalk] toggleVoice called trace");
918
+ if (processingRef.current) {
919
+ console.log("[TapToTalk] toggleVoice ignored - processing");
909
920
  return;
910
921
  }
911
- if (isActive) {
912
- if (isTranscribing && !isListening) {
913
- console.log("[TapToTalk] Ignoring click during transcription");
914
- return;
915
- }
916
- console.log("[TapToTalk] Stopping voice...");
917
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
918
- nativeSpeech.stop();
922
+ processingRef.current = true;
923
+ console.log("[TapToTalk] toggleVoice called. isActive:", isActive);
924
+ try {
925
+ const now = Date.now();
926
+ if (now - tapCountRef.current.lastTap < 500) {
927
+ tapCountRef.current.count++;
919
928
  } else {
920
- customRecorder.stop();
929
+ tapCountRef.current.count = 1;
921
930
  }
922
- setVoiceTrigger(null);
923
- } else {
924
- console.log("[TapToTalk] Starting voice...");
925
- setErrorMsg(null);
926
- onFocusTarget == null ? void 0 : onFocusTarget();
927
- setVoiceTrigger("click");
928
- if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
929
- try {
930
- await customRecorder.start();
931
- } catch (e) {
932
- setErrorMsg("Mic access denied");
931
+ tapCountRef.current.lastTap = now;
932
+ if (tapCountRef.current.count >= 5) {
933
+ setShowDebug((prev) => !prev);
934
+ tapCountRef.current.count = 0;
935
+ if (isActive) {
936
+ console.log("[TapToTalk] Debug trigger force-stop");
937
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") nativeSpeech.stop();
938
+ else customRecorder.stop();
933
939
  setVoiceTrigger(null);
934
940
  }
935
- } else {
936
- if (!nativeSpeech.isSupported) {
937
- setErrorMsg("Speech not supported");
938
- setVoiceTrigger(null);
941
+ return;
942
+ }
943
+ if (isActive) {
944
+ if (isTranscribing && !isListening) {
945
+ console.log("[TapToTalk] Ignoring click during transcription");
939
946
  return;
940
947
  }
941
- nativeSpeech.start();
948
+ console.log("[TapToTalk] Stopping voice...");
949
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
950
+ nativeSpeech.stop();
951
+ } else {
952
+ customRecorder.stop();
953
+ }
954
+ setVoiceTrigger(null);
955
+ } else {
956
+ console.log("[TapToTalk] Starting voice...");
957
+ setErrorMsg(null);
958
+ if (onFocusTarget) {
959
+ console.log("[TapToTalk] calling onFocusTarget() - this might trigger keyboard");
960
+ onFocusTarget();
961
+ } else {
962
+ console.log("[TapToTalk] onFocusTarget is undefined");
963
+ }
964
+ setVoiceTrigger("click");
965
+ if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
966
+ try {
967
+ await customRecorder.start();
968
+ } catch (e2) {
969
+ setErrorMsg("Mic access denied");
970
+ setVoiceTrigger(null);
971
+ }
972
+ } else {
973
+ if (!nativeSpeech.isSupported) {
974
+ setErrorMsg("Speech not supported");
975
+ setVoiceTrigger(null);
976
+ return;
977
+ }
978
+ nativeSpeech.start();
979
+ }
942
980
  }
981
+ } finally {
982
+ setTimeout(() => {
983
+ processingRef.current = false;
984
+ }, 300);
943
985
  }
944
986
  };
945
987
  let bgColor = accentColor;