dmed-voice-assistant 1.2.7 → 1.2.9

Sign up to get free protection for your applications and to get access to all the features.
package/dist/index.js CHANGED
@@ -25,6 +25,7 @@ const VoiceAssistant = _ref => {
25
25
  onNewRecognitionEvent,
26
26
  onRecognitionDataChange,
27
27
  onCloseRecognition,
28
+ onRecognitionStartEvent,
28
29
  onRecognitionStopEvent,
29
30
  onRealTimeRecognitionCommandEvent
30
31
  } = _ref;
@@ -48,6 +49,7 @@ const VoiceAssistant = _ref => {
48
49
  onRecognitionDataChange: onRecognitionDataChange,
49
50
  isOnlyRecognitionMode: isOnlyRecognitionMode,
50
51
  onCloseRecognition: onCloseRecognition,
52
+ onRecognitionStartEvent: onRecognitionStartEvent,
51
53
  onRecognitionStopEvent: onRecognitionStopEvent
52
54
  })]
53
55
  })
@@ -77,6 +77,7 @@ const Recognition = _ref4 => {
77
77
  onRecognitionDataChange,
78
78
  onCloseRecognition,
79
79
  onRealTimeRecognitionCommandEvent,
80
+ onRecognitionStartEvent,
80
81
  onRecognitionStopEvent
81
82
  } = _ref4;
82
83
  const [open, setOpen] = (0, _react.useState)(false);
@@ -88,6 +89,7 @@ const Recognition = _ref4 => {
88
89
  const [voiceList, setVoiceList] = (0, _react.useState)([]);
89
90
  const languageList = ['Auto-Detect', 'English', 'Chinese (Simplified)'];
90
91
  const [selectedLanguage, setSelectedLanguage] = (0, _react.useState)(0);
92
+ const recognitionBoxRef = (0, _react.useRef)(null);
91
93
  const recognitionRef = (0, _react.useRef)(null);
92
94
  const mediaRecorderRef = (0, _react.useRef)(null);
93
95
  const [result, setResult] = (0, _react.useState)([]);
@@ -155,6 +157,9 @@ const Recognition = _ref4 => {
155
157
  mediaRecorderRef.current = newRecorder;
156
158
  }
157
159
  mediaRecorderRef.current.start();
160
+ if (onRecognitionStartEvent) {
161
+ onRecognitionStartEvent();
162
+ }
158
163
  setResult([]);
159
164
  setRecordTime(0);
160
165
  const id = setInterval(async () => {
@@ -321,6 +326,11 @@ const Recognition = _ref4 => {
321
326
  (0, _react.useEffect)(() => {
322
327
  setHistoryList(recognitionHistoryList);
323
328
  }, [recognitionHistoryList]);
329
+ (0, _react.useEffect)(() => {
330
+ if (result.length > 0 && recognitionBoxRef.current) {
331
+ recognitionBoxRef.current.scrollTop = recognitionBoxRef.current.scrollHeight;
332
+ }
333
+ }, [result]);
324
334
  return /*#__PURE__*/(0, _jsxRuntime.jsxs)(_material.Box, {
325
335
  className: "bg-white rounded-[5px] p-[20px] w-[440px]",
326
336
  sx: {
@@ -461,10 +471,17 @@ const Recognition = _ref4 => {
461
471
  })]
462
472
  })]
463
473
  }), isStarted && /*#__PURE__*/(0, _jsxRuntime.jsx)(_material.Box, {
474
+ ref: recognitionBoxRef,
464
475
  className: "flex items-center flex-wrap space-x-1 mt-1 p-[9px]",
465
476
  sx: {
466
477
  maxWidth: "387px",
467
- overflow: "hidden"
478
+ maxHeight: "100px",
479
+ overflowX: "hidden",
480
+ overflowY: "scroll",
481
+ scrollbarWidth: "none",
482
+ "&::-webkit-scrollbar": {
483
+ display: "none"
484
+ }
468
485
  },
469
486
  children: result.map((item, index) => {
470
487
  return /*#__PURE__*/(0, _jsxRuntime.jsxs)(_material.Box, {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dmed-voice-assistant",
3
- "version": "1.2.7",
3
+ "version": "1.2.9",
4
4
  "main": "dist/index.js",
5
5
  "files": [
6
6
  "dist"