@contentgrowth/llm-service 0.9.9 → 0.9.91
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -250,8 +250,11 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
|
|
|
250
250
|
}
|
|
251
251
|
}, [language]);
|
|
252
252
|
const start = useCallback(() => {
|
|
253
|
-
console.log("[useSpeechRecognition] start() called");
|
|
254
|
-
if (isSimulatingRef.current)
|
|
253
|
+
console.log("[useSpeechRecognition] start() called. isListening:", isListening, "isStarting:", isStartingRef.current, "hasInstance:", !!recognitionRef.current);
|
|
254
|
+
if (isSimulatingRef.current) {
|
|
255
|
+
console.log("[useSpeechRecognition] isSimulating, ignoring start");
|
|
256
|
+
return;
|
|
257
|
+
}
|
|
255
258
|
if (!recognitionRef.current) {
|
|
256
259
|
console.error("[useSpeechRecognition] Recognition instance missing");
|
|
257
260
|
return;
|
|
@@ -261,7 +264,7 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
|
|
|
261
264
|
return;
|
|
262
265
|
}
|
|
263
266
|
if (recognitionRef.current.isListening) {
|
|
264
|
-
console.warn("[useSpeechRecognition] Already listening - ignoring");
|
|
267
|
+
console.warn("[useSpeechRecognition] Already listening (native prop) - ignoring");
|
|
265
268
|
}
|
|
266
269
|
if (isListening) {
|
|
267
270
|
console.warn("[useSpeechRecognition] App state says already listening - ignoring");
|
|
@@ -271,7 +274,7 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
|
|
|
271
274
|
setTranscript("");
|
|
272
275
|
isStartingRef.current = true;
|
|
273
276
|
recognitionRef.current.start();
|
|
274
|
-
console.log("[useSpeechRecognition] recognition.start() executed");
|
|
277
|
+
console.log("[useSpeechRecognition] recognition.start() executed successfully");
|
|
275
278
|
} catch (error2) {
|
|
276
279
|
isStartingRef.current = false;
|
|
277
280
|
console.error("[useSpeechRecognition] Failed to start recognition:", error2);
|
|
@@ -906,7 +909,12 @@ var TapToTalk = ({
|
|
|
906
909
|
const isListening = !!voiceTrigger || nativeSpeech.isListening || customRecorder.isRecording;
|
|
907
910
|
const isActive = isListening || isTranscribing;
|
|
908
911
|
const processingRef = useRef4(false);
|
|
909
|
-
const toggleVoice = async () => {
|
|
912
|
+
const toggleVoice = async (e) => {
|
|
913
|
+
if (e) {
|
|
914
|
+
e.preventDefault();
|
|
915
|
+
e.stopPropagation();
|
|
916
|
+
}
|
|
917
|
+
console.trace("[TapToTalk] toggleVoice called trace");
|
|
910
918
|
if (processingRef.current) {
|
|
911
919
|
console.log("[TapToTalk] toggleVoice ignored - processing");
|
|
912
920
|
return;
|
|
@@ -947,12 +955,17 @@ var TapToTalk = ({
|
|
|
947
955
|
} else {
|
|
948
956
|
console.log("[TapToTalk] Starting voice...");
|
|
949
957
|
setErrorMsg(null);
|
|
950
|
-
|
|
958
|
+
if (onFocusTarget) {
|
|
959
|
+
console.log("[TapToTalk] calling onFocusTarget() - this might trigger keyboard");
|
|
960
|
+
onFocusTarget();
|
|
961
|
+
} else {
|
|
962
|
+
console.log("[TapToTalk] onFocusTarget is undefined");
|
|
963
|
+
}
|
|
951
964
|
setVoiceTrigger("click");
|
|
952
965
|
if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
|
|
953
966
|
try {
|
|
954
967
|
await customRecorder.start();
|
|
955
|
-
} catch (
|
|
968
|
+
} catch (e2) {
|
|
956
969
|
setErrorMsg("Mic access denied");
|
|
957
970
|
setVoiceTrigger(null);
|
|
958
971
|
}
|