@topthink/chat 1.0.45 → 1.0.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/es/index.js +1 -1
  2. package/package.json +3 -3
package/es/index.js CHANGED
@@ -543,7 +543,7 @@ import*as e from"react";import{useCallback as t,useEffect as r,useState as n,use
543
543
  background-color: var(--bs-gray-200);
544
544
  }
545
545
 
546
- `;class Ie{#e;#t;constructor(){this.#t=new K}onResult(e){this.#t.on("result",e)}onRecording(e){this.#t.on("recording",e)}onTranscribing(e){this.#t.on("transcribing",e)}start(){if(!this.#e){const e=window.SpeechRecognition||window.webkitSpeechRecognition;if(!e)return void E.error("浏览器不支持语音识别");const t=new e;t.continuous=!0,t.lang=navigator.language,t.onresult=e=>{const t=Array.from(e.results).at(-1)?.item(0).transcript;t&&this.#t.emit("result",t)},t.onspeechend=()=>{this.#t.emit("recording",!1)},t.onerror=()=>{this.#t.emit("recording",!1)},this.#e=t}this.#t.emit("recording",!0),this.#e.start()}stop(){this.#e?.stop(),this.#t.emit("recording",!1)}}class Ue{#t;#r;#n;#i;#o;constructor(e,t){this.#r=e,this.#n=t,this.#t=new K}async start(){this.#i=await navigator.mediaDevices.getUserMedia({audio:!0}),this.#o=new AudioContext;const e=this.#o.createScriptProcessor(0,1,1);this.#o.createMediaStreamSource(this.#i).connect(e),e.connect(this.#o.destination);let t=[],r=0;e.onaudioprocess=e=>{const n=e.playbackTime,i=e.inputBuffer.getChannelData(0);let o=!0;for(let e=0;e<i.length;e++)if(Math.abs(i[e])>.01){o=!1;break}if(o){const e=n-r;e>1&&(t.length>1&&(this.save(t),t=[]),e>10&&this.stop())}else t.push(new Float32Array(i)),r=n},this.#t.emit("recording",!0)}convertBuffer(e){const t=new Float32Array(e),r=new Int16Array(e.length);for(let e=0;e<t.length;e++){const n=Math.max(-1,Math.min(1,t[e]));r[e]=n<0?32768*n:32767*n}return r}async encodeMP3(e){const{Mp3Encoder:t}=await import("lamejs"),r=new t(1,44100,128),n=[];for(const t of e){const e=this.convertBuffer(t),i=1152;let o=e.length;for(let t=0;o>=0;t+=i){const a=e.subarray(t,t+i),s=r.encodeBuffer(a);n.push(new Int8Array(s)),o-=i}}return n.push(r.flush()),new Blob(n,{type:"audio/mp3"})}async save(e){const t=await this.encodeMP3(e),r=new FormData;r.append("file",t,"audio.mp3"),r.append("model",this.#n);try{this.#t.emit("transcribing",!0);const e=await h({url:this.#r,method:"post",data:r});e.text&&this.#t.emit("result",e.text)}catch(e){}finally{this.#t.emit("transcribing",!1)}}stop(){this.#i&&(this.#i.getTracks().forEach((e=>e.stop())),this.#i=void 0),this.#o&&(this.#o.close(),this.#o=void 0),this.#t.emit("recording",!1)}onResult(e){this.#t.on("result",e)}onRecording(e){this.#t.on("recording",e)}onTranscribing(e){this.#t.on("transcribing",e)}}const He=m(((e,t)=>{let{onResult:a,url:s,model:l="builtin"}=e;const{start:c,stop:d,recording:m,transcribing:u}=function(e){let{onResult:t,url:o,model:a="builtin"}=e;const s=i(),[l,c]=n(!1),[d,m]=n(!1);return r((()=>()=>{s.current&&(s.current.stop(),s.current=void 0)}),[]),{recording:l,transcribing:d,start:async()=>{s.current||(s.current="builtin"===a?new Ie:new Ue(o,a),s.current.onRecording(c),s.current.onTranscribing(m),s.current.onResult(t)),s.current.start()},stop:()=>{s.current&&s.current.stop()}}}({model:l,url:s,onResult:a});return o(t,(()=>({start:c,stop:d})),[c,d]),N(y,{tooltip:m?"停止":"语音输入",placement:"top",children:M(Oe,{onMouseDown:e=>{e.preventDefault(),m?d():c()},children:[u&&N(Ke,{animation:"border",variant:"primary",size:"sm"}),N("i",m?{className:"bi bi-mic-fill text-danger"}:{className:"bi bi-mic"})]})})})),Ke=f(P)`
546
+ `;class Ie{#e;#t;constructor(){this.#t=new K}onResult(e){this.#t.on("result",e)}onRecording(e){this.#t.on("recording",e)}onTranscribing(e){this.#t.on("transcribing",e)}start(){if(!this.#e){const e=window.SpeechRecognition||window.webkitSpeechRecognition;if(!e)return void E.error("浏览器不支持语音识别");const t=new e;t.continuous=!0,t.lang=navigator.language,t.onresult=e=>{const t=Array.from(e.results).at(-1)?.item(0).transcript;t&&this.#t.emit("result",t)},t.onspeechend=()=>{this.#t.emit("recording",!1)},t.onerror=()=>{this.#t.emit("recording",!1)},this.#e=t}this.#t.emit("recording",!0),this.#e.start()}stop(){this.#e?.stop(),this.#t.emit("recording",!1)}}class Ue{#t;#r;#n;#i;#o;constructor(e,t){this.#r=e,this.#n=t,this.#t=new K}async start(){this.#i=await navigator.mediaDevices.getUserMedia({audio:!0}),this.#o=new AudioContext;const e=this.#o.createScriptProcessor(0,1,1);this.#o.createMediaStreamSource(this.#i).connect(e),e.connect(this.#o.destination);let t=[],r=0;e.onaudioprocess=e=>{const n=e.playbackTime,i=e.inputBuffer.getChannelData(0);let o=!0;for(let e=0;e<i.length;e++)if(Math.abs(i[e])>.01){o=!1;break}if(o){const e=n-r;e>1&&(t.length>1&&(this.save(t),t=[]),e>10&&this.stop())}else t.push(new Float32Array(i)),r=n},this.#t.emit("recording",!0)}convertBuffer(e){const t=new Float32Array(e),r=new Int16Array(e.length);for(let e=0;e<t.length;e++){const n=Math.max(-1,Math.min(1,t[e]));r[e]=n<0?32768*n:32767*n}return r}async encodeMP3(e){const{Mp3Encoder:t}=await import("@breezystack/lamejs"),r=new t(1,44100,128),n=[];for(const t of e){const e=this.convertBuffer(t),i=1152;let o=e.length;for(let t=0;o>=0;t+=i){const a=e.subarray(t,t+i),s=r.encodeBuffer(a);n.push(new Int8Array(s)),o-=i}}return n.push(r.flush()),new Blob(n,{type:"audio/mp3"})}async save(e){const t=await this.encodeMP3(e),r=new FormData;r.append("file",t,"audio.mp3"),r.append("model",this.#n);try{this.#t.emit("transcribing",!0);const e=await h({url:this.#r,method:"post",data:r});e.text&&this.#t.emit("result",e.text)}catch(e){}finally{this.#t.emit("transcribing",!1)}}stop(){this.#i&&(this.#i.getTracks().forEach((e=>e.stop())),this.#i=void 0),this.#o&&(this.#o.close(),this.#o=void 0),this.#t.emit("recording",!1)}onResult(e){this.#t.on("result",e)}onRecording(e){this.#t.on("recording",e)}onTranscribing(e){this.#t.on("transcribing",e)}}const He=m(((e,t)=>{let{onResult:a,url:s,model:l="builtin"}=e;const{start:c,stop:d,recording:m,transcribing:u}=function(e){let{onResult:t,url:o,model:a="builtin"}=e;const s=i(),[l,c]=n(!1),[d,m]=n(!1);return r((()=>()=>{s.current&&(s.current.stop(),s.current=void 0)}),[]),{recording:l,transcribing:d,start:async()=>{s.current||(s.current="builtin"===a?new Ie:new Ue(o,a),s.current.onRecording(c),s.current.onTranscribing(m),s.current.onResult(t)),s.current.start()},stop:()=>{s.current&&s.current.stop()}}}({model:l,url:s,onResult:a});return o(t,(()=>({start:c,stop:d})),[c,d]),N(y,{tooltip:m?"停止":"语音输入",placement:"top",children:M(Oe,{onMouseDown:e=>{e.preventDefault(),m?d():c()},children:[u&&N(Ke,{animation:"border",variant:"primary",size:"sm"}),N("i",m?{className:"bi bi-mic-fill text-danger"}:{className:"bi bi-mic"})]})})})),Ke=f(P)`
547
547
  position: absolute;
548
548
  --bs-spinner-width: .75rem;
549
549
  --bs-spinner-height: .75rem;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@topthink/chat",
3
- "version": "1.0.45",
3
+ "version": "1.0.46",
4
4
  "scripts": {
5
5
  "prebuild": "rimraf es types",
6
6
  "build": "rollup -c --environment NODE_ENV:production",
@@ -17,11 +17,11 @@
17
17
  ],
18
18
  "dependencies": {
19
19
  "@babel/runtime": "^7.11.2",
20
+ "@breezystack/lamejs": "^1.2.7",
20
21
  "@topthink/components": "^1.0.103",
21
22
  "@types/mdast": "^4.0.4",
22
23
  "eventemitter3": "^5.0.1",
23
24
  "katex": "^0.16.9",
24
- "lamejs": "github:zhuker/lamejs",
25
25
  "mdast-util-from-markdown": "^2.0.1",
26
26
  "micromark-util-character": "^2.1.0",
27
27
  "micromark-util-symbol": "^2.0.0",
@@ -65,5 +65,5 @@
65
65
  },
66
66
  "author": "yunwuxin <tzzhangyajun@qq.com> (https://github.com/yunwuxin)",
67
67
  "license": "MIT",
68
- "gitHead": "d7a5591cfeb4c32e37a598a052b0b4817da57cb0"
68
+ "gitHead": "e575427a22aa85ef61a37f80645ee396132fde1e"
69
69
  }