dmed-voice-assistant 1.2.8 → 1.2.10

Sign up to get free protection for your applications and to get access to all the features.
@@ -89,6 +89,8 @@ const Recognition = _ref4 => {
89
89
  const [voiceList, setVoiceList] = (0, _react.useState)([]);
90
90
  const languageList = ['Auto-Detect', 'English', 'Chinese (Simplified)'];
91
91
  const [selectedLanguage, setSelectedLanguage] = (0, _react.useState)(0);
92
+ const recognitionBoxRef = (0, _react.useRef)(null);
93
+ const isClickedStopRef = (0, _react.useRef)(false);
92
94
  const recognitionRef = (0, _react.useRef)(null);
93
95
  const mediaRecorderRef = (0, _react.useRef)(null);
94
96
  const [result, setResult] = (0, _react.useState)([]);
@@ -165,6 +167,7 @@ const Recognition = _ref4 => {
165
167
  setRecordTime(prevCount => prevCount + 1);
166
168
  }, 1000);
167
169
  setIntervalId(id);
170
+ isClickedStopRef.current = false;
168
171
  recognitionRef.current.start();
169
172
  }
170
173
  } catch (error) {
@@ -173,6 +176,7 @@ const Recognition = _ref4 => {
173
176
  };
174
177
  const stopRecording = () => {
175
178
  if (recognitionRef.current && mediaRecorderRef.current) {
179
+ isClickedStopRef.current = true;
176
180
  recognitionRef.current.stop();
177
181
  if (onRecognitionStopEvent) {
178
182
  onRecognitionStopEvent();
@@ -248,8 +252,12 @@ const Recognition = _ref4 => {
248
252
  console.error('Speech recognition error:', event.error);
249
253
  };
250
254
  recognition.onend = () => {
251
- setIsStarted(false);
252
- clearInterval(intervalId);
255
+ if (!isClickedStopRef.current && recognitionRef.current) {
256
+ recognitionRef.current.start();
257
+ } else {
258
+ setIsStarted(false);
259
+ clearInterval(intervalId);
260
+ }
253
261
  console.log('Speech recognition ended');
254
262
  };
255
263
  recognitionRef.current = recognition;
@@ -325,6 +333,11 @@ const Recognition = _ref4 => {
325
333
  (0, _react.useEffect)(() => {
326
334
  setHistoryList(recognitionHistoryList);
327
335
  }, [recognitionHistoryList]);
336
+ (0, _react.useEffect)(() => {
337
+ if (result.length > 0 && recognitionBoxRef.current) {
338
+ recognitionBoxRef.current.scrollTop = recognitionBoxRef.current.scrollHeight;
339
+ }
340
+ }, [result]);
328
341
  return /*#__PURE__*/(0, _jsxRuntime.jsxs)(_material.Box, {
329
342
  className: "bg-white rounded-[5px] p-[20px] w-[440px]",
330
343
  sx: {
@@ -465,10 +478,17 @@ const Recognition = _ref4 => {
465
478
  })]
466
479
  })]
467
480
  }), isStarted && /*#__PURE__*/(0, _jsxRuntime.jsx)(_material.Box, {
481
+ ref: recognitionBoxRef,
468
482
  className: "flex items-center flex-wrap space-x-1 mt-1 p-[9px]",
469
483
  sx: {
470
484
  maxWidth: "387px",
471
- overflow: "hidden"
485
+ maxHeight: "100px",
486
+ overflowX: "hidden",
487
+ overflowY: "scroll",
488
+ scrollbarWidth: "none",
489
+ "&::-webkit-scrollbar": {
490
+ display: "none"
491
+ }
472
492
  },
473
493
  children: result.map((item, index) => {
474
494
  return /*#__PURE__*/(0, _jsxRuntime.jsxs)(_material.Box, {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dmed-voice-assistant",
3
- "version": "1.2.8",
3
+ "version": "1.2.10",
4
4
  "main": "dist/index.js",
5
5
  "files": [
6
6
  "dist"