@contentgrowth/llm-service 0.9.91 → 0.9.92
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -169,45 +169,62 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
|
|
|
169
169
|
}, [onResult, onEnd]);
|
|
170
170
|
const isStartingRef = useRef(false);
|
|
171
171
|
useEffect(() => {
|
|
172
|
+
const isMobile = typeof window !== "undefined" && (/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) || "ontouchstart" in window || navigator.maxTouchPoints > 0);
|
|
172
173
|
if (typeof window !== "undefined") {
|
|
173
174
|
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
|
|
175
|
+
console.log("[useSpeechRecognition] Init - SpeechRecognition available:", !!SpeechRecognition, "isMobile:", isMobile);
|
|
174
176
|
if (SpeechRecognition) {
|
|
175
177
|
setIsSupported(true);
|
|
176
178
|
const recognition = new SpeechRecognition();
|
|
177
179
|
recognition.continuous = true;
|
|
178
180
|
recognition.interimResults = true;
|
|
181
|
+
console.log("[useSpeechRecognition] Created recognition instance. continuous:", recognition.continuous, "interimResults:", recognition.interimResults);
|
|
179
182
|
recognition.onstart = () => {
|
|
180
|
-
console.log("[useSpeechRecognition] Native onstart event fired");
|
|
183
|
+
console.log("[useSpeechRecognition] Native onstart event fired. Timestamp:", Date.now());
|
|
181
184
|
isStartingRef.current = false;
|
|
182
185
|
setIsListening(true);
|
|
183
186
|
setError(null);
|
|
184
187
|
};
|
|
185
188
|
recognition.onend = () => {
|
|
186
|
-
console.log("[useSpeechRecognition] Native onend event fired");
|
|
189
|
+
console.log("[useSpeechRecognition] Native onend event fired. Timestamp:", Date.now());
|
|
187
190
|
isStartingRef.current = false;
|
|
188
191
|
if (isSimulatingRef.current) {
|
|
192
|
+
console.log("[useSpeechRecognition] onend ignored - simulating");
|
|
189
193
|
return;
|
|
190
194
|
}
|
|
191
195
|
setIsListening(false);
|
|
192
196
|
if (onEndRef.current) onEndRef.current();
|
|
193
197
|
};
|
|
194
198
|
recognition.onresult = (event) => {
|
|
199
|
+
console.log("[useSpeechRecognition] onresult event. results count:", event.results.length);
|
|
195
200
|
let interimTranscript = "";
|
|
196
201
|
let finalTranscript = "";
|
|
197
202
|
for (let i = event.results.length - 1; i < event.results.length; ++i) {
|
|
198
203
|
const result = event.results[i];
|
|
199
204
|
if (result.isFinal) {
|
|
200
205
|
finalTranscript += result[0].transcript;
|
|
206
|
+
console.log("[useSpeechRecognition] Final transcript:", finalTranscript);
|
|
201
207
|
if (onResultRef.current) onResultRef.current(finalTranscript, true);
|
|
202
208
|
} else {
|
|
203
209
|
interimTranscript += result[0].transcript;
|
|
210
|
+
console.log("[useSpeechRecognition] Interim transcript:", interimTranscript);
|
|
204
211
|
if (onResultRef.current) onResultRef.current(interimTranscript, false);
|
|
205
212
|
}
|
|
206
213
|
}
|
|
207
214
|
setTranscript((prev) => prev + finalTranscript);
|
|
208
215
|
};
|
|
209
216
|
recognition.onerror = (event) => {
|
|
210
|
-
console.error("[useSpeechRecognition] Native onerror event:", event.error);
|
|
217
|
+
console.error("[useSpeechRecognition] Native onerror event:", event.error, "Timestamp:", Date.now());
|
|
218
|
+
console.error("[useSpeechRecognition] Error details - This could be caused by:");
|
|
219
|
+
if (event.error === "aborted") {
|
|
220
|
+
console.error("[useSpeechRecognition] - aborted: Recognition was aborted. Common causes: keyboard appeared, focus changed, another recognition started, or page navigation");
|
|
221
|
+
} else if (event.error === "not-allowed") {
|
|
222
|
+
console.error("[useSpeechRecognition] - not-allowed: Microphone permission denied");
|
|
223
|
+
} else if (event.error === "no-speech") {
|
|
224
|
+
console.error("[useSpeechRecognition] - no-speech: No speech detected");
|
|
225
|
+
} else if (event.error === "network") {
|
|
226
|
+
console.error("[useSpeechRecognition] - network: Network error during recognition");
|
|
227
|
+
}
|
|
211
228
|
isStartingRef.current = false;
|
|
212
229
|
if (event.error === "not-allowed" && process.env.NODE_ENV === "development") {
|
|
213
230
|
console.warn("Speech recognition blocked. Simulating input for development...");
|
|
@@ -250,7 +267,13 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
|
|
|
250
267
|
}
|
|
251
268
|
}, [language]);
|
|
252
269
|
const start = useCallback(() => {
|
|
253
|
-
|
|
270
|
+
var _a;
|
|
271
|
+
const startTimestamp = Date.now();
|
|
272
|
+
console.log("[useSpeechRecognition] start() called. Timestamp:", startTimestamp);
|
|
273
|
+
console.log("[useSpeechRecognition] State check - isListening:", isListening, "isStarting:", isStartingRef.current, "hasInstance:", !!recognitionRef.current);
|
|
274
|
+
if (typeof document !== "undefined") {
|
|
275
|
+
console.log("[useSpeechRecognition] Document hasFocus:", document.hasFocus(), "activeElement:", (_a = document.activeElement) == null ? void 0 : _a.tagName);
|
|
276
|
+
}
|
|
254
277
|
if (isSimulatingRef.current) {
|
|
255
278
|
console.log("[useSpeechRecognition] isSimulating, ignoring start");
|
|
256
279
|
return;
|
|
@@ -273,11 +296,15 @@ var useSpeechRecognition = (onResult, onEnd, language = "en-US") => {
|
|
|
273
296
|
try {
|
|
274
297
|
setTranscript("");
|
|
275
298
|
isStartingRef.current = true;
|
|
299
|
+
console.log("[useSpeechRecognition] About to call recognition.start(). Timestamp:", Date.now());
|
|
276
300
|
recognitionRef.current.start();
|
|
277
|
-
console.log("[useSpeechRecognition] recognition.start() executed successfully");
|
|
301
|
+
console.log("[useSpeechRecognition] recognition.start() executed successfully. Timestamp:", Date.now());
|
|
278
302
|
} catch (error2) {
|
|
279
303
|
isStartingRef.current = false;
|
|
280
|
-
console.error("[useSpeechRecognition] Failed to start recognition:", error2);
|
|
304
|
+
console.error("[useSpeechRecognition] Failed to start recognition:", (error2 == null ? void 0 : error2.message) || error2);
|
|
305
|
+
if ((error2 == null ? void 0 : error2.name) === "InvalidStateError") {
|
|
306
|
+
console.error("[useSpeechRecognition] InvalidStateError - recognition may already be running");
|
|
307
|
+
}
|
|
281
308
|
}
|
|
282
309
|
}, [isListening]);
|
|
283
310
|
const stop = useCallback(() => {
|
|
@@ -601,26 +628,47 @@ var ChatInputArea = forwardRef(({
|
|
|
601
628
|
handleSubmit();
|
|
602
629
|
}
|
|
603
630
|
};
|
|
631
|
+
const isMobile = useCallback3(() => {
|
|
632
|
+
if (typeof window === "undefined") return false;
|
|
633
|
+
return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) || "ontouchstart" in window || navigator.maxTouchPoints > 0;
|
|
634
|
+
}, []);
|
|
604
635
|
const startRecording = async (trigger) => {
|
|
605
636
|
var _a2;
|
|
606
|
-
|
|
637
|
+
console.log("[ChatInputArea] startRecording called. trigger:", trigger, "isMobile:", isMobile());
|
|
638
|
+
console.log("[ChatInputArea] Current state - voiceTrigger:", voiceTrigger, "isTranscribing:", isTranscribing);
|
|
639
|
+
if (voiceTrigger || isTranscribing) {
|
|
640
|
+
console.log("[ChatInputArea] startRecording ignored - already active");
|
|
641
|
+
return;
|
|
642
|
+
}
|
|
607
643
|
setVoiceTrigger(trigger);
|
|
608
644
|
setVoiceError(null);
|
|
645
|
+
console.log("[ChatInputArea] Calling voiceConfig.onVoiceStart if exists...");
|
|
609
646
|
(_a2 = voiceConfig == null ? void 0 : voiceConfig.onVoiceStart) == null ? void 0 : _a2.call(voiceConfig);
|
|
610
647
|
if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "native") {
|
|
648
|
+
console.log("[ChatInputArea] Using native speech recognition");
|
|
611
649
|
if (!nativeSpeech.isSupported) {
|
|
650
|
+
console.error("[ChatInputArea] Native speech not supported");
|
|
612
651
|
alert("Speech recognition is not supported in this browser.");
|
|
613
652
|
setVoiceTrigger(null);
|
|
614
653
|
return;
|
|
615
654
|
}
|
|
655
|
+
console.log("[ChatInputArea] Calling nativeSpeech.start()...");
|
|
616
656
|
nativeSpeech.start();
|
|
657
|
+
console.log("[ChatInputArea] nativeSpeech.start() called");
|
|
617
658
|
} else {
|
|
659
|
+
console.log("[ChatInputArea] Using custom recorder");
|
|
618
660
|
await customRecorder.start();
|
|
661
|
+
console.log("[ChatInputArea] Custom recorder started");
|
|
662
|
+
}
|
|
663
|
+
if (!isMobile()) {
|
|
664
|
+
console.log("[ChatInputArea] Re-focusing textarea (desktop only)");
|
|
665
|
+
setTimeout(() => {
|
|
666
|
+
var _a3;
|
|
667
|
+
return (_a3 = textareaRef.current) == null ? void 0 : _a3.focus();
|
|
668
|
+
}, 0);
|
|
669
|
+
} else {
|
|
670
|
+
console.log("[ChatInputArea] SKIPPING textarea focus on mobile to prevent keyboard conflict");
|
|
619
671
|
}
|
|
620
|
-
setTimeout(() => {
|
|
621
|
-
var _a3;
|
|
622
|
-
return (_a3 = textareaRef.current) == null ? void 0 : _a3.focus();
|
|
623
|
-
}, 0);
|
|
624
672
|
};
|
|
625
673
|
const stopRecording = () => {
|
|
626
674
|
if (!voiceTrigger) return;
|
|
@@ -909,18 +957,22 @@ var TapToTalk = ({
|
|
|
909
957
|
const isListening = !!voiceTrigger || nativeSpeech.isListening || customRecorder.isRecording;
|
|
910
958
|
const isActive = isListening || isTranscribing;
|
|
911
959
|
const processingRef = useRef4(false);
|
|
960
|
+
const isMobile = useCallback4(() => {
|
|
961
|
+
if (typeof window === "undefined") return false;
|
|
962
|
+
return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) || "ontouchstart" in window || navigator.maxTouchPoints > 0;
|
|
963
|
+
}, []);
|
|
912
964
|
const toggleVoice = async (e) => {
|
|
913
965
|
if (e) {
|
|
914
966
|
e.preventDefault();
|
|
915
967
|
e.stopPropagation();
|
|
916
968
|
}
|
|
917
|
-
console.
|
|
969
|
+
console.log("[TapToTalk] toggleVoice called. isMobile:", isMobile());
|
|
918
970
|
if (processingRef.current) {
|
|
919
971
|
console.log("[TapToTalk] toggleVoice ignored - processing");
|
|
920
972
|
return;
|
|
921
973
|
}
|
|
922
974
|
processingRef.current = true;
|
|
923
|
-
console.log("[TapToTalk] toggleVoice called. isActive:", isActive);
|
|
975
|
+
console.log("[TapToTalk] toggleVoice called. isActive:", isActive, "isListening:", isListening, "isTranscribing:", isTranscribing);
|
|
924
976
|
try {
|
|
925
977
|
const now = Date.now();
|
|
926
978
|
if (now - tapCountRef.current.lastTap < 500) {
|
|
@@ -953,29 +1005,39 @@ var TapToTalk = ({
|
|
|
953
1005
|
}
|
|
954
1006
|
setVoiceTrigger(null);
|
|
955
1007
|
} else {
|
|
956
|
-
console.log("[TapToTalk] Starting voice...");
|
|
1008
|
+
console.log("[TapToTalk] Starting voice... mode:", voiceConfig == null ? void 0 : voiceConfig.mode);
|
|
957
1009
|
setErrorMsg(null);
|
|
958
|
-
if (onFocusTarget) {
|
|
959
|
-
console.log("[TapToTalk] calling onFocusTarget()
|
|
1010
|
+
if (onFocusTarget && !isMobile()) {
|
|
1011
|
+
console.log("[TapToTalk] calling onFocusTarget() (desktop only)");
|
|
960
1012
|
onFocusTarget();
|
|
1013
|
+
} else if (onFocusTarget) {
|
|
1014
|
+
console.log("[TapToTalk] SKIPPING onFocusTarget on mobile to prevent keyboard conflict");
|
|
961
1015
|
} else {
|
|
962
1016
|
console.log("[TapToTalk] onFocusTarget is undefined");
|
|
963
1017
|
}
|
|
964
1018
|
setVoiceTrigger("click");
|
|
1019
|
+
console.log("[TapToTalk] voiceTrigger set to click");
|
|
965
1020
|
if ((voiceConfig == null ? void 0 : voiceConfig.mode) === "custom") {
|
|
1021
|
+
console.log("[TapToTalk] Starting custom recorder...");
|
|
966
1022
|
try {
|
|
967
1023
|
await customRecorder.start();
|
|
1024
|
+
console.log("[TapToTalk] Custom recorder started successfully");
|
|
968
1025
|
} catch (e2) {
|
|
1026
|
+
console.error("[TapToTalk] Custom recorder failed:", e2);
|
|
969
1027
|
setErrorMsg("Mic access denied");
|
|
970
1028
|
setVoiceTrigger(null);
|
|
971
1029
|
}
|
|
972
1030
|
} else {
|
|
1031
|
+
console.log("[TapToTalk] Starting native speech recognition...");
|
|
973
1032
|
if (!nativeSpeech.isSupported) {
|
|
1033
|
+
console.error("[TapToTalk] Native speech not supported");
|
|
974
1034
|
setErrorMsg("Speech not supported");
|
|
975
1035
|
setVoiceTrigger(null);
|
|
976
1036
|
return;
|
|
977
1037
|
}
|
|
1038
|
+
console.log("[TapToTalk] Calling nativeSpeech.start()...");
|
|
978
1039
|
nativeSpeech.start();
|
|
1040
|
+
console.log("[TapToTalk] nativeSpeech.start() called");
|
|
979
1041
|
}
|
|
980
1042
|
}
|
|
981
1043
|
} finally {
|