@bytexbyte/nxtlinq-ai-agent-sdk 1.6.8 → 1.6.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAS/B,OAAO,EACL,kBAAkB,EAClB,YAAY,EAGb,MAAM,uBAAuB,CAAC;AAI/B,eAAO,MAAM,UAAU,0BAMtB,CAAC;AAEF,eAAO,MAAM,eAAe,EAAE,KAAK,CAAC,EAAE,CAAC,YAAY,CA+6DlD,CAAC"}
1
+ {"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAS/B,OAAO,EACL,kBAAkB,EAClB,YAAY,EAGb,MAAM,uBAAuB,CAAC;AAI/B,eAAO,MAAM,UAAU,0BAMtB,CAAC;AAEF,eAAO,MAAM,eAAe,EAAE,KAAK,CAAC,EAAE,CAAC,YAAY,CAs/DlD,CAAC"}
@@ -35,7 +35,7 @@ idvBannerDismissSeconds = 86400,
35
35
  isStopRecordingOnSend = false, }) => {
36
36
  const nxtlinqApi = React.useMemo(() => createNxtlinqApi(apiKey, apiSecret), [apiKey, apiSecret]);
37
37
  // Custom hook
38
- const { isRecording, transcript, start: startRecording, stop: stopRecording, clear: clearRecording } = useSpeechToTextFromMic({
38
+ const { isRecording, transcript, partialTranscript, start: startRecording, stop: stopRecording, clear: clearRecording } = useSpeechToTextFromMic({
39
39
  apiKey,
40
40
  apiSecret
41
41
  });
@@ -77,6 +77,40 @@ isStopRecordingOnSend = false, }) => {
77
77
  // Refs for input value and recording state
78
78
  const isRecordingRef = React.useRef(false);
79
79
  const textInputRef = React.useRef(null);
80
+ const lastPartialRangeRef = React.useRef(null);
81
+ function insertPartial(input, partial, caret) {
82
+ let start = caret;
83
+ let end = caret;
84
+ if (lastPartialRangeRef.current) {
85
+ start = lastPartialRangeRef.current.start;
86
+ end = lastPartialRangeRef.current.end;
87
+ }
88
+ const before = input.slice(0, start);
89
+ const after = input.slice(end);
90
+ const next = before + partial + after;
91
+ const newCaret = start + partial.length;
92
+ lastPartialRangeRef.current = { start, end: newCaret };
93
+ return { next, caret: newCaret };
94
+ }
95
+ function finalizePartial(input, finalText) {
96
+ if (!lastPartialRangeRef.current) {
97
+ return { next: input, caret: input.length };
98
+ }
99
+ const { start, end } = lastPartialRangeRef.current;
100
+ const before = input.slice(0, start);
101
+ const after = input.slice(end);
102
+ const next = before + finalText + after;
103
+ const caret = start + finalText.length;
104
+ lastPartialRangeRef.current = null;
105
+ return { next, caret };
106
+ }
107
+ function normalizeTranscript(text) {
108
+ return text
109
+ .toLowerCase()
110
+ .replace(/[.,!?;:]/g, '')
111
+ .replace(/\s+/g, ' ')
112
+ .trim();
113
+ }
80
114
  // Simple token cleanup function
81
115
  const clearExpiredToken = React.useCallback(() => {
82
116
  try {
@@ -104,10 +138,34 @@ isStopRecordingOnSend = false, }) => {
104
138
  signerRef.current = signer;
105
139
  }, [signer]);
106
140
  React.useEffect(() => {
107
- if (!transcript) {
141
+ if (!textInputRef.current)
108
142
  return;
143
+ const el = textInputRef.current;
144
+ const selStart = el.selectionStart ?? inputValue.length;
145
+ if (partialTranscript) {
146
+ const { next, caret } = insertPartial(inputValue, partialTranscript, selStart);
147
+ setInputValue(next);
148
+ setTimeout(() => {
149
+ if (textInputRef.current) {
150
+ textInputRef.current.selectionStart = caret;
151
+ textInputRef.current.selectionEnd = caret;
152
+ }
153
+ }, 0);
109
154
  }
110
- setInputValue(transcript);
155
+ }, [partialTranscript]);
156
+ React.useEffect(() => {
157
+ if (!transcript)
158
+ return;
159
+ if (!textInputRef.current)
160
+ return;
161
+ const { next, caret } = finalizePartial(inputValue, transcript);
162
+ setInputValue(normalizeTranscript(next));
163
+ setTimeout(() => {
164
+ if (textInputRef.current) {
165
+ textInputRef.current.selectionStart = caret;
166
+ textInputRef.current.selectionEnd = caret;
167
+ }
168
+ }, 0);
111
169
  }, [transcript]);
112
170
  React.useEffect(() => {
113
171
  isRecordingRef.current = isRecording;
@@ -1 +1 @@
1
- {"version":3,"file":"MessageInput.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageInput.tsx"],"names":[],"mappings":"AAKA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAI/B,eAAO,MAAM,YAAY,EAAE,KAAK,CAAC,EAqGhC,CAAC"}
1
+ {"version":3,"file":"MessageInput.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageInput.tsx"],"names":[],"mappings":"AAKA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAI/B,eAAO,MAAM,YAAY,EAAE,KAAK,CAAC,EAoGhC,CAAC"}
@@ -24,7 +24,7 @@ export const MessageInput = () => {
24
24
  border-top: 1px solid #eee !important;
25
25
  `, children: [_jsx(InputBase, { value: inputValue, onChange: (e) => setInputValue(e.target.value), onKeyPress: handleKeyPress, placeholder: inputPlaceholder, fullWidth: true, inputProps: {
26
26
  ref: textInputRef
27
- }, endAdornment: _jsx(IconButton, { onClick: () => isRecording ? stopRecording() : startRecording(), children: isRecording ? _jsx(MicIcon, {}) : _jsx(MicOffIcon, {}) }), onFocus: () => stopRecording(), css: css `
27
+ }, endAdornment: _jsx(IconButton, { onClick: () => isRecording ? stopRecording() : startRecording(), children: isRecording ? _jsx(MicIcon, {}) : _jsx(MicOffIcon, {}) }), css: css `
28
28
  flex: 1 !important;
29
29
  padding: 10px !important;
30
30
  border: 1px solid #ddd !important;
@@ -1 +1 @@
1
- {"version":3,"file":"PermissionForm.d.ts","sourceRoot":"","sources":["../../../src/components/ui/PermissionForm.tsx"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAK/B,UAAU,mBAAmB;IAC3B,OAAO,EAAE,MAAM,IAAI,CAAC;CACrB;AAED,eAAO,MAAM,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC,mBAAmB,CA2iBxD,CAAC"}
1
+ {"version":3,"file":"PermissionForm.d.ts","sourceRoot":"","sources":["../../../src/components/ui/PermissionForm.tsx"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAK/B,UAAU,mBAAmB;IAC3B,OAAO,EAAE,MAAM,IAAI,CAAC;CACrB;AAED,eAAO,MAAM,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC,mBAAmB,CA4jBxD,CAAC"}
@@ -161,6 +161,23 @@ export const PermissionForm = ({ onClose }) => {
161
161
  display: flex !important;
162
162
  flex-direction: column !important;
163
163
  overflow: hidden !important;
164
+
165
+ /* Mobile responsive adjustments */
166
+ @media (max-width: 768px) {
167
+ width: 75vw !important;
168
+ max-width: 75vw !important;
169
+ padding: 20px !important;
170
+ border-radius: 8px !important;
171
+ max-height: 65vh !important;
172
+ }
173
+
174
+ @media (max-width: 480px) {
175
+ width: 70vw !important;
176
+ max-width: 70vw !important;
177
+ padding: 16px !important;
178
+ border-radius: 6px !important;
179
+ max-height: 60vh !important;
180
+ }
164
181
  `, children: [_jsxs("div", { css: css `
165
182
  display: flex !important;
166
183
  justify-content: space-between !important;
@@ -1 +1 @@
1
- {"version":3,"file":"isolatedStyles.d.ts","sourceRoot":"","sources":["../../../../src/components/ui/styles/isolatedStyles.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,2CAwBpB,CAAC;AAGF,eAAO,MAAM,YAAY,2CAmExB,CAAC;AAGF,eAAO,MAAM,cAAc,2CA0B1B,CAAC;AAGF,eAAO,MAAM,UAAU,2CActB,CAAC;AAGF,eAAO,MAAM,UAAU,2CAStB,CAAC;AAGF,eAAO,MAAM,WAAW,2CAMvB,CAAC;AAGF,eAAO,MAAM,YAAY,2CA0BxB,CAAC;AAGF,eAAO,MAAM,WAAW,2CAoBvB,CAAC;AAGF,eAAO,MAAM,oBAAoB,2CAQhC,CAAC;AAGF,eAAO,MAAM,aAAa,2CAKzB,CAAC;AAGF,eAAO,MAAM,WAAW,2CAKvB,CAAC;AAGF,eAAO,MAAM,cAAc,2CAQ1B,CAAC;AAGF,eAAO,MAAM,kBAAkB,2CAQ9B,CAAC;AAGF,eAAO,MAAM,mBAAmB,2CAS/B,CAAC;AAGF,eAAO,MAAM,aAAa,2CAezB,CAAC;AAGF,eAAO,MAAM,YAAY,2CAexB,CAAC;AAGF,eAAO,MAAM,eAAe,2CAc3B,CAAC;AAGF,eAAO,MAAM,gBAAgB,2CAQ5B,CAAC;AAGF,eAAO,MAAM,cAAc,2CAM1B,CAAC;AAGF,eAAO,MAAM,UAAU,2CAYtB,CAAC;AAGF,eAAO,MAAM,QAAQ,2CAMpB,CAAC;AAGF,eAAO,MAAM,iBAAiB,2CAc7B,CAAC;AAGF,eAAO,MAAM,YAAY,2CAIxB,CAAC;AAGF,eAAO,MAAM,UAAU,2CAItB,CAAC;AAGF,eAAO,MAAM,YAAY,2CAIxB,CAAC;AAGF,eAAO,MAAM,SAAS,2CAIrB,CAAC;AAGF,eAAO,MAAM,gBAAgB,2CAiB5B,CAAC;AAGF,eAAO,MAAM,YAAY,2CAYxB,CAAC;AAGF,eAAO,MAAM,SAAS,2CAWrB,CAAC;AAGF,eAAO,MAAM,cAAc,2CAM1B,CAAC;AAGF,eAAO,MAAM,aAAa,2CAMzB,CAAC;AAGF,eAAO,MAAM,eAAe,2CAoB3B,CAAC;AAGF,eAAO,MAAM,gBAAgB,2CAmB5B,CAAC;AAGF,eAAO,MAAM,cAAc,2CAO1B,CAAC"}
1
+ {"version":3,"file":"isolatedStyles.d.ts","sourceRoot":"","sources":["../../../../src/components/ui/styles/isolatedStyles.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,2CAwBpB,CAAC;AAGF,eAAO,MAAM,YAAY,2CAmExB,CAAC;AAGF,eAAO,MAAM,cAAc,2CA6C1B,CAAC;AAGF,eAAO,MAAM,UAAU,2CAuCtB,CAAC;AAGF,eAAO,MAAM,UAAU,2CAStB,CAAC;AAGF,eAAO,MAAM,WAAW,2CAMvB,CAAC;AAGF,eAAO,MAAM,YAAY,2CA0BxB,CAAC;AAGF,eAAO,MAAM,WAAW,2CAoBvB,CAAC;AAGF,eAAO,MAAM,oBAAoB,2CAQhC,CAAC;AAGF,eAAO,MAAM,aAAa,2CAKzB,CAAC;AAGF,eAAO,MAAM,WAAW,2CAKvB,CAAC;AAGF,eAAO,MAAM,cAAc,2CAQ1B,CAAC;AAGF,eAAO,MAAM,kBAAkB,2CAQ9B,CAAC;AAGF,eAAO,MAAM,mBAAmB,2CAS/B,CAAC;AAGF,eAAO,MAAM,aAAa,2CAezB,CAAC;AAGF,eAAO,MAAM,YAAY,2CAexB,CAAC;AAGF,eAAO,MAAM,eAAe,2CAc3B,CAAC;AAGF,eAAO,MAAM,gBAAgB,2CAQ5B,CAAC;AAGF,eAAO,MAAM,cAAc,2CAM1B,CAAC;AAGF,eAAO,MAAM,UAAU,2CAYtB,CAAC;AAGF,eAAO,MAAM,QAAQ,2CAMpB,CAAC;AAGF,eAAO,MAAM,iBAAiB,2CAc7B,CAAC;AAGF,eAAO,MAAM,YAAY,2CAIxB,CAAC;AAGF,eAAO,MAAM,UAAU,2CAItB,CAAC;AAGF,eAAO,MAAM,YAAY,2CAIxB,CAAC;AAGF,eAAO,MAAM,SAAS,2CAIrB,CAAC;AAGF,eAAO,MAAM,gBAAgB,2CAiB5B,CAAC;AAGF,eAAO,MAAM,YAAY,2CAyBxB,CAAC;AAGF,eAAO,MAAM,SAAS,2CAWrB,CAAC;AAGF,eAAO,MAAM,cAAc,2CAM1B,CAAC;AAGF,eAAO,MAAM,aAAa,2CAMzB,CAAC;AAGF,eAAO,MAAM,eAAe,2CAoB3B,CAAC;AAGF,eAAO,MAAM,gBAAgB,2CAmB5B,CAAC;AAGF,eAAO,MAAM,cAAc,2CAO1B,CAAC"}
@@ -111,6 +111,7 @@ export const floatingButton = css `
111
111
  font-size: 14px !important;
112
112
  font-weight: 500 !important;
113
113
  font-family: inherit !important;
114
+ max-width: calc(100vw - 40px) !important;
114
115
 
115
116
  &:disabled {
116
117
  cursor: not-allowed !important;
@@ -121,6 +122,24 @@ export const floatingButton = css `
121
122
  background-color: #0056b3 !important;
122
123
  transform: translateY(-2px) !important;
123
124
  }
125
+
126
+ /* Mobile responsive adjustments */
127
+ @media (max-width: 768px) {
128
+ bottom: 15px !important;
129
+ right: 15px !important;
130
+ max-width: calc(100vw - 30px) !important;
131
+ padding: 8px 16px !important;
132
+ font-size: 13px !important;
133
+ }
134
+
135
+ @media (max-width: 480px) {
136
+ bottom: 10px !important;
137
+ right: 10px !important;
138
+ max-width: calc(100vw - 20px) !important;
139
+ padding: 6px 12px !important;
140
+ font-size: 12px !important;
141
+ border-radius: 16px !important;
142
+ }
124
143
  `;
125
144
  // Chat window container styles
126
145
  export const chatWindow = css `
@@ -128,7 +147,9 @@ export const chatWindow = css `
128
147
  bottom: 20px !important;
129
148
  right: 20px !important;
130
149
  width: 500px !important;
150
+ max-width: calc(100vw - 40px) !important;
131
151
  height: 600px !important;
152
+ max-height: calc(100vh - 40px) !important;
132
153
  background-color: white !important;
133
154
  border-radius: 10px !important;
134
155
  box-shadow: 0 5px 15px rgba(0,0,0,0.2) !important;
@@ -137,6 +158,29 @@ export const chatWindow = css `
137
158
  z-index: 1001 !important;
138
159
  overflow: hidden !important;
139
160
  font-family: inherit !important;
161
+
162
+ /* Mobile responsive adjustments */
163
+ @media (max-width: 768px) {
164
+ width: 70vw !important;
165
+ max-width: 70vw !important;
166
+ height: 60vh !important;
167
+ max-height: 60vh !important;
168
+ bottom: 20px !important;
169
+ right: 20px !important;
170
+ left: auto !important;
171
+ border-radius: 8px !important;
172
+ }
173
+
174
+ @media (max-width: 480px) {
175
+ width: 65vw !important;
176
+ max-width: 65vw !important;
177
+ height: 55vh !important;
178
+ max-height: 55vh !important;
179
+ bottom: 15px !important;
180
+ right: 15px !important;
181
+ left: auto !important;
182
+ border-radius: 6px !important;
183
+ }
140
184
  `;
141
185
  // Header styles
142
186
  export const chatHeader = css `
@@ -424,6 +468,19 @@ export const modalOverlay = css `
424
468
  justify-content: center !important;
425
469
  z-index: 1002 !important;
426
470
  padding: 20px !important;
471
+
472
+ /* Mobile responsive adjustments */
473
+ @media (max-width: 768px) {
474
+ padding: 15px !important;
475
+ align-items: flex-start !important;
476
+ padding-top: 20px !important;
477
+ }
478
+
479
+ @media (max-width: 480px) {
480
+ padding: 10px !important;
481
+ align-items: flex-start !important;
482
+ padding-top: 15px !important;
483
+ }
427
484
  `;
428
485
  // IDV banner styles
429
486
  export const idvBanner = css `
@@ -1,12 +1,15 @@
1
1
  import { SpeechRecognizer } from 'microsoft-cognitiveservices-speech-sdk';
2
2
  import { Dispatch, SetStateAction } from 'react';
3
3
  /**
4
- * Start speech recognition, continuously updates speechToTextArray (each sentence as an array element)
4
+ * Start speech recognition
5
+ * - partialTranscript: 暫存逐字 (recognizing)
6
+ * - setSpeechToTextArray: 完整句子陣列 (recognized)
5
7
  */
6
- export declare const startSpeechToTextFromMic: (setSpeechToTextArray: Dispatch<SetStateAction<string[]>>, config: {
8
+ export declare const startSpeechToTextFromMic: (setSpeechToTextArray: Dispatch<SetStateAction<string[]>>, // 完整句子
9
+ config: {
7
10
  apiKey: string;
8
11
  apiSecret: string;
9
- }, historyRef: React.MutableRefObject<string[]>, indexRef: React.MutableRefObject<number>) => Promise<SpeechRecognizer | undefined>;
12
+ }, historyRef: React.MutableRefObject<string[]>, indexRef: React.MutableRefObject<number>, setPartialTranscript?: Dispatch<SetStateAction<string>>) => Promise<SpeechRecognizer | undefined>;
10
13
  /**
11
14
  * Stop speech recognition
12
15
  */
@@ -1 +1 @@
1
- {"version":3,"file":"helper.d.ts","sourceRoot":"","sources":["../../../../src/core/lib/useSpeechToTextFromMic/helper.ts"],"names":[],"mappings":"AACA,OAAO,EAML,gBAAgB,EACjB,MAAM,wCAAwC,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,OAAO,CAAC;AAMjD;;GAEG;AACH,eAAO,MAAM,wBAAwB,GACnC,sBAAsB,QAAQ,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,EACxD,QAAQ;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,EAC7C,YAAY,KAAK,CAAC,gBAAgB,CAAC,MAAM,EAAE,CAAC,EAC5C,UAAU,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,KACvC,OAAO,CAAC,gBAAgB,GAAG,SAAS,CA4CtC,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,eAAe,GAAI,YAAY,gBAAgB,GAAG,SAAS,SAIvE,CAAC;AAEF,wBAAsB,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM;;;;;;;;GAwBxE"}
1
+ {"version":3,"file":"helper.d.ts","sourceRoot":"","sources":["../../../../src/core/lib/useSpeechToTextFromMic/helper.ts"],"names":[],"mappings":"AAAA,OAAO,EAML,gBAAgB,EACjB,MAAM,wCAAwC,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,OAAO,CAAC;AAIjD;;;;GAIG;AACH,eAAO,MAAM,wBAAwB,GACnC,sBAAsB,QAAQ,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,EAAI,OAAO;AACnE,QAAQ;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,EAC7C,YAAY,KAAK,CAAC,gBAAgB,CAAC,MAAM,EAAE,CAAC,EAC5C,UAAU,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,EACxC,uBAAuB,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC,KACtD,OAAO,CAAC,gBAAgB,GAAG,SAAS,CAsDtC,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,eAAe,GAAI,YAAY,gBAAgB,GAAG,SAAS,SAIvE,CAAC;AAEF,wBAAsB,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM;;;;;;;;GAwBxE"}
@@ -1,11 +1,14 @@
1
1
  import { AudioConfig, CancellationReason, PropertyId, ResultReason, SpeechConfig, SpeechRecognizer, } from 'microsoft-cognitiveservices-speech-sdk';
2
2
  import Cookie from 'universal-cookie';
3
3
  import { createNxtlinqApi } from '../../../api/nxtlinq-api';
4
- let recognizer;
5
4
  /**
6
- * Start speech recognition, continuously updates speechToTextArray (each sentence as an array element)
5
+ * Start speech recognition
6
+ * - partialTranscript: 暫存逐字 (recognizing)
7
+ * - setSpeechToTextArray: 完整句子陣列 (recognized)
7
8
  */
8
- export const startSpeechToTextFromMic = async (setSpeechToTextArray, config, historyRef, indexRef) => {
9
+ export const startSpeechToTextFromMic = async (setSpeechToTextArray, // 完整句子
10
+ config, historyRef, indexRef, setPartialTranscript // ✅ 新增:暫存逐字
11
+ ) => {
9
12
  const tokenRes = await getTokenOrRefresh(config.apiKey, config.apiSecret);
10
13
  if (!tokenRes.authToken || !tokenRes.region) {
11
14
  console.error('Speech token retrieval failed:', tokenRes.error);
@@ -13,20 +16,28 @@ export const startSpeechToTextFromMic = async (setSpeechToTextArray, config, his
13
16
  }
14
17
  const speechConfig = SpeechConfig.fromAuthorizationToken(tokenRes.authToken, tokenRes.region);
15
18
  speechConfig.speechRecognitionLanguage = 'en-US';
19
+ // 靜音判斷與 segmentation 設定
16
20
  speechConfig.setProperty(PropertyId.SpeechServiceConnection_InitialSilenceTimeoutMs, '10000');
17
21
  speechConfig.setProperty(PropertyId.SpeechServiceConnection_EndSilenceTimeoutMs, '86400000');
18
22
  speechConfig.setProperty(PropertyId.Speech_SegmentationSilenceTimeoutMs, '1000');
19
23
  const audioConfig = AudioConfig.fromDefaultMicrophoneInput();
20
24
  const recognizer = new SpeechRecognizer(speechConfig, audioConfig);
25
+ // 暫存逐字
21
26
  recognizer.recognizing = (_s, e) => {
22
- historyRef.current[indexRef.current] = e.result.text;
23
- setSpeechToTextArray([...historyRef.current]);
27
+ if (setPartialTranscript) {
28
+ setPartialTranscript(e.result.text);
29
+ }
24
30
  };
31
+ // 完整句子
25
32
  recognizer.recognized = (_s, e) => {
26
33
  if (e.result.reason === ResultReason.RecognizedSpeech) {
27
- historyRef.current[indexRef.current] = e.result.text;
28
- setSpeechToTextArray([...historyRef.current]);
34
+ const text = e.result.text;
35
+ historyRef.current[indexRef.current] = text;
29
36
  indexRef.current += 1;
37
+ // ✅ 關鍵:只把「本次完整句子」回推給 UI
38
+ setSpeechToTextArray([text]);
39
+ if (setPartialTranscript)
40
+ setPartialTranscript('');
30
41
  }
31
42
  };
32
43
  recognizer.canceled = (_s, e) => {
@@ -1,6 +1,7 @@
1
1
  interface Props {
2
2
  apiKey: string;
3
3
  apiSecret: string;
4
+ autoClearTranscript?: boolean;
4
5
  }
5
6
  type UseSpeechToTextFromMicResult = {
6
7
  start: () => Promise<void>;
@@ -8,7 +9,8 @@ type UseSpeechToTextFromMicResult = {
8
9
  clear: () => void;
9
10
  isRecording: boolean;
10
11
  transcript: string;
12
+ partialTranscript: string;
11
13
  };
12
- export declare function useSpeechToTextFromMic({ apiKey, apiSecret }: Props): UseSpeechToTextFromMicResult;
14
+ export declare function useSpeechToTextFromMic({ apiKey, apiSecret, autoClearTranscript, }: Props): UseSpeechToTextFromMicResult;
13
15
  export {};
14
16
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/core/lib/useSpeechToTextFromMic/index.ts"],"names":[],"mappings":"AAIA,UAAU,KAAK;IACb,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,KAAK,4BAA4B,GAAG;IAClC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,IAAI,EAAE,MAAM,IAAI,CAAC;IACjB,KAAK,EAAE,MAAM,IAAI,CAAC;IAClB,WAAW,EAAE,OAAO,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF,wBAAgB,sBAAsB,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,EAAE,KAAK,GAAG,4BAA4B,CAqDjG"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/core/lib/useSpeechToTextFromMic/index.ts"],"names":[],"mappings":"AAIA,UAAU,KAAK;IACb,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAED,KAAK,4BAA4B,GAAG;IAClC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,IAAI,EAAE,MAAM,IAAI,CAAC;IACjB,KAAK,EAAE,MAAM,IAAI,CAAC;IAClB,WAAW,EAAE,OAAO,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;IACnB,iBAAiB,EAAE,MAAM,CAAC;CAC3B,CAAC;AAEF,wBAAgB,sBAAsB,CAAC,EACrC,MAAM,EACN,SAAS,EACT,mBAA0B,GAC3B,EAAE,KAAK,GAAG,4BAA4B,CAuEtC"}
@@ -1,8 +1,9 @@
1
- import { useRef, useState } from 'react';
1
+ import { useEffect, useRef, useState } from 'react';
2
2
  import { startSpeechToTextFromMic, stopRecognition } from './helper';
3
- export function useSpeechToTextFromMic({ apiKey, apiSecret }) {
3
+ export function useSpeechToTextFromMic({ apiKey, apiSecret, autoClearTranscript = true, }) {
4
4
  const [isRecording, setIsRecording] = useState(false);
5
5
  const [transcriptArray, setTranscriptArray] = useState([]);
6
+ const [partialTranscript, setPartialTranscript] = useState('');
6
7
  const [recognizer, setRecognizer] = useState();
7
8
  const wakelock = useRef();
8
9
  const historyRef = useRef([]);
@@ -25,7 +26,7 @@ export function useSpeechToTextFromMic({ apiKey, apiSecret }) {
25
26
  clear();
26
27
  await lockWakeState();
27
28
  setIsRecording(true);
28
- const recognizerInstance = await startSpeechToTextFromMic(setTranscriptArray, { apiKey, apiSecret }, historyRef, indexRef);
29
+ const recognizerInstance = await startSpeechToTextFromMic(setTranscriptArray, { apiKey, apiSecret }, historyRef, indexRef, setPartialTranscript);
29
30
  setRecognizer(recognizerInstance);
30
31
  };
31
32
  const stop = () => {
@@ -38,7 +39,16 @@ export function useSpeechToTextFromMic({ apiKey, apiSecret }) {
38
39
  historyRef.current = [];
39
40
  indexRef.current = 0;
40
41
  setTranscriptArray([]);
42
+ setPartialTranscript('');
41
43
  };
44
+ useEffect(() => {
45
+ if (autoClearTranscript && transcriptArray.length > 0) {
46
+ const timer = setTimeout(() => {
47
+ setTranscriptArray([]);
48
+ }, 100);
49
+ return () => clearTimeout(timer);
50
+ }
51
+ }, [transcriptArray, autoClearTranscript]);
42
52
  const transcript = transcriptArray.join(' ');
43
53
  return {
44
54
  start,
@@ -46,5 +56,6 @@ export function useSpeechToTextFromMic({ apiKey, apiSecret }) {
46
56
  clear,
47
57
  isRecording,
48
58
  transcript,
59
+ partialTranscript,
49
60
  };
50
61
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bytexbyte/nxtlinq-ai-agent-sdk",
3
- "version": "1.6.8",
3
+ "version": "1.6.10",
4
4
  "description": "Nxtlinq AI Agent SDK - Proprietary Software with enhanced async operation handling",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",