web-speech-cognitive-services 8.1.2-main.d1bb656 → 8.1.3-main.6ed2e3d

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26405,7 +26405,7 @@
26405
26405
  }
26406
26406
  var meta = document.createElement("meta");
26407
26407
  meta.setAttribute("name", "web-speech-cognitive-services");
26408
- meta.setAttribute("content", `version=${"8.1.2-main.d1bb656"}`);
26408
+ meta.setAttribute("content", `version=${"8.1.3-main.6ed2e3d"}`);
26409
26409
  document.head.appendChild(meta);
26410
26410
 
26411
26411
  // src/index.umd.js
@@ -1456,7 +1456,7 @@ function createSpeechServicesPonyfill(options = {}) {
1456
1456
  }
1457
1457
  var meta = document.createElement("meta");
1458
1458
  meta.setAttribute("name", "web-speech-cognitive-services");
1459
- meta.setAttribute("content", `version=${"8.1.2-main.d1bb656"}`);
1459
+ meta.setAttribute("content", `version=${"8.1.3-main.6ed2e3d"}`);
1460
1460
  document.head.appendChild(meta);
1461
1461
  // Annotate the CommonJS export names for ESM import in node:
1462
1462
  0 && (module.exports = {
@@ -1422,7 +1422,7 @@ function createSpeechServicesPonyfill(options = {}) {
1422
1422
  }
1423
1423
  var meta = document.createElement("meta");
1424
1424
  meta.setAttribute("name", "web-speech-cognitive-services");
1425
- meta.setAttribute("content", `version=${"8.1.2-main.d1bb656"}`);
1425
+ meta.setAttribute("content", `version=${"8.1.3-main.6ed2e3d"}`);
1426
1426
  document.head.appendChild(meta);
1427
1427
  export {
1428
1428
  createSpeechRecognitionPonyfill,
@@ -27,5 +27,5 @@ File System access not available, please use Push or PullAudioOutputStream`),thi
27
27
  ${r}
28
28
  </prosody>
29
29
  </voice>
30
- </speak>`}var fk=/^\s*<speak(\s|\/?>)/u,gk=/^\s*<\?xml\s/u;function gg(i){return fk.test(i)||gk.test(i)}var mk="en-US",yk="riff-16khz-16bit-mono-pcm",Sk="Microsoft Server Speech Text to Speech Voice (en-US, AriaNeural)",Ck="SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU3LjU2LjEwMQAAAAAAAAAAAAAA//tAwAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAACAAABhgC7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7//////////////////////////////////////////////////////////////////8AAAAATGF2YzU3LjY0AAAAAAAAAAAAAAAAJAUHAAAAAAAAAYYoRBqpAAAAAAD/+xDEAAPAAAGkAAAAIAAANIAAAARMQU1FMy45OS41VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVf/7EMQpg8AAAaQAAAAgAAA0gAAABFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV";async function hE({deploymentId:i,fetchCredentials:e,lang:t=mk,outputFormat:r=yk,pitch:n,rate:s,text:o,voice:a=Sk,volume:c}){if(!o)return lE(Ck);let{authorizationToken:u,region:l,speechSynthesisHostname:h,subscriptionKey:d}=await e();if(u&&d||!u&&!d)throw new Error('Only "authorizationToken" or "subscriptionKey" should be set.');if(l&&h||!l&&!h)throw new Error('Only "region" or "speechSynthesisHostnamename" should be set.');let g=gg(o)?o:fg({lang:t,pitch:n,rate:s,text:o,voice:a,volume:c}),M=h||(i?`${encodeURI(l)}.voice.speech.microsoft.com`:`${encodeURI(l)}.tts.speech.microsoft.com`),D=i?`?deploymentId=${encodeURI(i)}`:"",oe=`https://${M}/cognitiveservices/v1${D}`,Q=await fetch(oe,{headers:{"Content-Type":"application/ssml+xml","X-Microsoft-OutputFormat":r,...u?{Authorization:`Bearer ${u}`}:{"Ocp-Apim-Subscription-Key":d}},method:"POST",body:g});if(!Q.ok)throw new Error(`web-speech-cognitive-services: Failed to syntheis speech, server returned ${Q.status}`);return Q.arrayBuffer()}function mg(i,e,t){return i.addEventListener(e,t),()=>i.removeEventListener(e,t)}function Pk(i,e){return new Promise((t,r)=>{let n=i.decodeAudioData(e,t,r);n&&typeof n.then=="function"&&t(n)})}function Rk(i,e,t){return new Promise((r,n)=>{let s=new dg,o=new dg,a=mg(i,"statechange",({target:{state:c}})=>c==="closed"&&s.eventListener());try{t.buffer=e,t.onended=o.eventListener,t.connect(i.destination),t.start(0),Promise.race([s.upcoming(),o.upcoming()]).then(r)}catch(c){n(c)}finally{a()}})}var yg=class extends nr{constructor(e){super(),this._lang=null,this._pitch=1,this._rate=1,this._voice=null,this._volume=1,this.text=e,this.onboundary=null,this.onend=null,this.onerror=null,this.onmark=null,this.onpause=null,this.onresume=null,this.onstart=null}get lang(){return this._lang}set lang(e){this._lang=e}get onboundary(){return Bt(this,"boundary")}set onboundary(e){Ft(this,"boundary",e)}get onend(){return Bt(this,"end")}set onend(e){Ft(this,"end",e)}get onerror(){return Bt(this,"error")}set onerror(e){Ft(this,"error",e)}get onmark(){return Bt(this,"mark")}set onmark(e){Ft(this,"mark",e)}get onpause(){return Bt(this,"pause")}set onpause(e){Ft(this,"pause",e)}get onresume(){return Bt(this,"resume")}set onresume(e){Ft(this,"resume",e)}get onstart(){return Bt(this,"start")}set onstart(e){Ft(this,"start",e)}get pitch(){return this._pitch}set pitch(e){this._pitch=e}get rate(){return this._rate}set rate(e){this._rate=e}get voice(){return this._voice}set voice(e){this._voice=e}get volume(){return this._volume}set volume(e){this._volume=e}preload({deploymentId:e,fetchCredentials:t,outputFormat:r}){this.arrayBufferPromise=hE({fetchCredentials:t,deploymentId:e,lang:this.lang||window.navigator.language,outputFormat:r,pitch:this.pitch,rate:this.rate,text:this.text,voice:this.voice&&this.voice.voiceURI,volume:this.volume}),this.arrayBufferPromise.catch()}async play(e){try{this.dispatchEvent(new sr("start"));let t=e.createBufferSource(),r=await Pk(e,await this.arrayBufferPromise);this._playingSource=t,await Rk(e,r,t),this._playingSource=null,this.dispatchEvent(new sr("end"))}catch(t){this.dispatchEvent(new ErrorEvent("error",{error:"synthesis-failed",message:t.stack}))}}stop(){this._playingSource&&this._playingSource.stop()}},Sg=yg;var ln=class{constructor({gender:i,lang:e,voiceURI:t}){this._default=!1,this._gender=i,this._lang=e,this._localService=!1,this._name=t,this._voiceURI=t}get default(){return this._default}get gender(){return this._gender}get lang(){return this._lang}get localService(){return this._localService}get name(){return this._name}get voiceURI(){return this._voiceURI}};async function Ek({customVoiceHostname:i,deploymentId:e,region:t,subscriptionKey:r}){let n=i||`${t}.customvoice.api.speech.microsoft.com`,s=await fetch(`https://${encodeURI(n)}/api/texttospeech/v2.0/endpoints/${encodeURIComponent(e)}`,{headers:{accept:"application/json","ocp-apim-subscription-key":r}});if(!s.ok)throw new Error("Failed to fetch custom voices");return s.json()}async function dE({customVoiceHostname:i,deploymentId:e,region:t,subscriptionKey:r}){let{models:n}=await Ek({customVoiceHostname:i,deploymentId:e,region:t,subscriptionKey:r});return n.map(({properties:{Gender:s},locale:o,name:a})=>new ln({gender:s,lang:o,voiceURI:a})).sort(({name:s},{name:o})=>s>o?1:s<o?-1:0)}async function Cg({authorizationToken:i,region:e,speechSynthesisHostname:t,subscriptionKey:r}){let n=t||`${encodeURI(e)}.tts.speech.microsoft.com`,s=await fetch(`https://${n}/cognitiveservices/voices/list`,{headers:{"content-type":"application/json",...i?{authorization:`Bearer ${i}`}:{"Ocp-Apim-Subscription-Key":r}}});if(!s.ok)throw new Error("Failed to fetch voices");return(await s.json()).map(({Gender:a,Locale:c,Name:u})=>new ln({gender:a,lang:c,voiceURI:u})).sort(({name:a},{name:c})=>a>c?1:a<c?-1:0)}var _k="audio-24khz-160kbitrate-mono-mp3",Ik=[],vE=i=>{let{audioContext:e,fetchCredentials:t,ponyfill:r={AudioContext:window.AudioContext||window.webkitAudioContext},speechSynthesisDeploymentId:n,speechSynthesisOutputFormat:s=_k}=ps(i);if(!e&&!r.AudioContext)return console.warn("web-speech-cognitive-services: This browser does not support Web Audio and it will not work with Cognitive Services Speech Services."),{};class o extends nr{constructor(){super(),this.queue=new Hu({audioContext:e,ponyfill:r}),this.updateVoices()}cancel(){this.queue.stop()}getVoices(){return Ik}get onvoiceschanged(){return Bt(this,"voiceschanged")}set onvoiceschanged(c){Ft(this,"voiceschanged",c)}pause(){this.queue.pause()}resume(){this.queue.resume()}speak(c){if(!(c instanceof Sg))throw new Error("invalid utterance");let{reject:u,resolve:l,promise:h}=ls(),d=({error:g,message:M})=>{let D=new Error(g);D.stack=M,u(D)};return c.addEventListener("end",l),c.addEventListener("error",d),c.preload({deploymentId:n,fetchCredentials:t,outputFormat:s}),this.queue.push(c),h.finally(()=>{c.removeEventListener("end",l),c.removeEventListener("error",d)})}get speaking(){return this.queue.speaking}async updateVoices(){let{customVoiceHostname:c,region:u,speechSynthesisHostname:l,subscriptionKey:h}=await t();n?h&&(console.warn("web-speech-cognitive-services: Listing of custom voice models are only available when using subscription key."),await hg(async()=>{let d=await dE({customVoiceHostname:c,deploymentId:n,region:u,speechSynthesisHostname:l,subscriptionKey:h});this.getVoices=()=>d})):await hg(async()=>{let d=await Cg(await t());this.getVoices=()=>d}),this.dispatchEvent(new sr("voiceschanged"))}}return{speechSynthesis:new o,SpeechSynthesisEvent:sr,SpeechSynthesisUtterance:Sg}};var Pg=vE;function Ku(i={}){return{...ds(i),...Pg(i)}}var Rg=document.createElement("meta");Rg.setAttribute("name","web-speech-cognitive-services");Rg.setAttribute("content","version=8.1.2-main.d1bb656");document.head.appendChild(Rg);globalThis.WebSpeechCognitiveServices={create:Ku,createSpeechRecognitionPonyfillFromRecognizer:Tr};})();
30
+ </speak>`}var fk=/^\s*<speak(\s|\/?>)/u,gk=/^\s*<\?xml\s/u;function gg(i){return fk.test(i)||gk.test(i)}var mk="en-US",yk="riff-16khz-16bit-mono-pcm",Sk="Microsoft Server Speech Text to Speech Voice (en-US, AriaNeural)",Ck="SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU3LjU2LjEwMQAAAAAAAAAAAAAA//tAwAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAACAAABhgC7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7//////////////////////////////////////////////////////////////////8AAAAATGF2YzU3LjY0AAAAAAAAAAAAAAAAJAUHAAAAAAAAAYYoRBqpAAAAAAD/+xDEAAPAAAGkAAAAIAAANIAAAARMQU1FMy45OS41VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVf/7EMQpg8AAAaQAAAAgAAA0gAAABFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV";async function hE({deploymentId:i,fetchCredentials:e,lang:t=mk,outputFormat:r=yk,pitch:n,rate:s,text:o,voice:a=Sk,volume:c}){if(!o)return lE(Ck);let{authorizationToken:u,region:l,speechSynthesisHostname:h,subscriptionKey:d}=await e();if(u&&d||!u&&!d)throw new Error('Only "authorizationToken" or "subscriptionKey" should be set.');if(l&&h||!l&&!h)throw new Error('Only "region" or "speechSynthesisHostnamename" should be set.');let g=gg(o)?o:fg({lang:t,pitch:n,rate:s,text:o,voice:a,volume:c}),M=h||(i?`${encodeURI(l)}.voice.speech.microsoft.com`:`${encodeURI(l)}.tts.speech.microsoft.com`),D=i?`?deploymentId=${encodeURI(i)}`:"",oe=`https://${M}/cognitiveservices/v1${D}`,Q=await fetch(oe,{headers:{"Content-Type":"application/ssml+xml","X-Microsoft-OutputFormat":r,...u?{Authorization:`Bearer ${u}`}:{"Ocp-Apim-Subscription-Key":d}},method:"POST",body:g});if(!Q.ok)throw new Error(`web-speech-cognitive-services: Failed to syntheis speech, server returned ${Q.status}`);return Q.arrayBuffer()}function mg(i,e,t){return i.addEventListener(e,t),()=>i.removeEventListener(e,t)}function Pk(i,e){return new Promise((t,r)=>{let n=i.decodeAudioData(e,t,r);n&&typeof n.then=="function"&&t(n)})}function Rk(i,e,t){return new Promise((r,n)=>{let s=new dg,o=new dg,a=mg(i,"statechange",({target:{state:c}})=>c==="closed"&&s.eventListener());try{t.buffer=e,t.onended=o.eventListener,t.connect(i.destination),t.start(0),Promise.race([s.upcoming(),o.upcoming()]).then(r)}catch(c){n(c)}finally{a()}})}var yg=class extends nr{constructor(e){super(),this._lang=null,this._pitch=1,this._rate=1,this._voice=null,this._volume=1,this.text=e,this.onboundary=null,this.onend=null,this.onerror=null,this.onmark=null,this.onpause=null,this.onresume=null,this.onstart=null}get lang(){return this._lang}set lang(e){this._lang=e}get onboundary(){return Bt(this,"boundary")}set onboundary(e){Ft(this,"boundary",e)}get onend(){return Bt(this,"end")}set onend(e){Ft(this,"end",e)}get onerror(){return Bt(this,"error")}set onerror(e){Ft(this,"error",e)}get onmark(){return Bt(this,"mark")}set onmark(e){Ft(this,"mark",e)}get onpause(){return Bt(this,"pause")}set onpause(e){Ft(this,"pause",e)}get onresume(){return Bt(this,"resume")}set onresume(e){Ft(this,"resume",e)}get onstart(){return Bt(this,"start")}set onstart(e){Ft(this,"start",e)}get pitch(){return this._pitch}set pitch(e){this._pitch=e}get rate(){return this._rate}set rate(e){this._rate=e}get voice(){return this._voice}set voice(e){this._voice=e}get volume(){return this._volume}set volume(e){this._volume=e}preload({deploymentId:e,fetchCredentials:t,outputFormat:r}){this.arrayBufferPromise=hE({fetchCredentials:t,deploymentId:e,lang:this.lang||window.navigator.language,outputFormat:r,pitch:this.pitch,rate:this.rate,text:this.text,voice:this.voice&&this.voice.voiceURI,volume:this.volume}),this.arrayBufferPromise.catch()}async play(e){try{this.dispatchEvent(new sr("start"));let t=e.createBufferSource(),r=await Pk(e,await this.arrayBufferPromise);this._playingSource=t,await Rk(e,r,t),this._playingSource=null,this.dispatchEvent(new sr("end"))}catch(t){this.dispatchEvent(new ErrorEvent("error",{error:"synthesis-failed",message:t.stack}))}}stop(){this._playingSource&&this._playingSource.stop()}},Sg=yg;var ln=class{constructor({gender:i,lang:e,voiceURI:t}){this._default=!1,this._gender=i,this._lang=e,this._localService=!1,this._name=t,this._voiceURI=t}get default(){return this._default}get gender(){return this._gender}get lang(){return this._lang}get localService(){return this._localService}get name(){return this._name}get voiceURI(){return this._voiceURI}};async function Ek({customVoiceHostname:i,deploymentId:e,region:t,subscriptionKey:r}){let n=i||`${t}.customvoice.api.speech.microsoft.com`,s=await fetch(`https://${encodeURI(n)}/api/texttospeech/v2.0/endpoints/${encodeURIComponent(e)}`,{headers:{accept:"application/json","ocp-apim-subscription-key":r}});if(!s.ok)throw new Error("Failed to fetch custom voices");return s.json()}async function dE({customVoiceHostname:i,deploymentId:e,region:t,subscriptionKey:r}){let{models:n}=await Ek({customVoiceHostname:i,deploymentId:e,region:t,subscriptionKey:r});return n.map(({properties:{Gender:s},locale:o,name:a})=>new ln({gender:s,lang:o,voiceURI:a})).sort(({name:s},{name:o})=>s>o?1:s<o?-1:0)}async function Cg({authorizationToken:i,region:e,speechSynthesisHostname:t,subscriptionKey:r}){let n=t||`${encodeURI(e)}.tts.speech.microsoft.com`,s=await fetch(`https://${n}/cognitiveservices/voices/list`,{headers:{"content-type":"application/json",...i?{authorization:`Bearer ${i}`}:{"Ocp-Apim-Subscription-Key":r}}});if(!s.ok)throw new Error("Failed to fetch voices");return(await s.json()).map(({Gender:a,Locale:c,Name:u})=>new ln({gender:a,lang:c,voiceURI:u})).sort(({name:a},{name:c})=>a>c?1:a<c?-1:0)}var _k="audio-24khz-160kbitrate-mono-mp3",Ik=[],vE=i=>{let{audioContext:e,fetchCredentials:t,ponyfill:r={AudioContext:window.AudioContext||window.webkitAudioContext},speechSynthesisDeploymentId:n,speechSynthesisOutputFormat:s=_k}=ps(i);if(!e&&!r.AudioContext)return console.warn("web-speech-cognitive-services: This browser does not support Web Audio and it will not work with Cognitive Services Speech Services."),{};class o extends nr{constructor(){super(),this.queue=new Hu({audioContext:e,ponyfill:r}),this.updateVoices()}cancel(){this.queue.stop()}getVoices(){return Ik}get onvoiceschanged(){return Bt(this,"voiceschanged")}set onvoiceschanged(c){Ft(this,"voiceschanged",c)}pause(){this.queue.pause()}resume(){this.queue.resume()}speak(c){if(!(c instanceof Sg))throw new Error("invalid utterance");let{reject:u,resolve:l,promise:h}=ls(),d=({error:g,message:M})=>{let D=new Error(g);D.stack=M,u(D)};return c.addEventListener("end",l),c.addEventListener("error",d),c.preload({deploymentId:n,fetchCredentials:t,outputFormat:s}),this.queue.push(c),h.finally(()=>{c.removeEventListener("end",l),c.removeEventListener("error",d)})}get speaking(){return this.queue.speaking}async updateVoices(){let{customVoiceHostname:c,region:u,speechSynthesisHostname:l,subscriptionKey:h}=await t();n?h&&(console.warn("web-speech-cognitive-services: Listing of custom voice models are only available when using subscription key."),await hg(async()=>{let d=await dE({customVoiceHostname:c,deploymentId:n,region:u,speechSynthesisHostname:l,subscriptionKey:h});this.getVoices=()=>d})):await hg(async()=>{let d=await Cg(await t());this.getVoices=()=>d}),this.dispatchEvent(new sr("voiceschanged"))}}return{speechSynthesis:new o,SpeechSynthesisEvent:sr,SpeechSynthesisUtterance:Sg}};var Pg=vE;function Ku(i={}){return{...ds(i),...Pg(i)}}var Rg=document.createElement("meta");Rg.setAttribute("name","web-speech-cognitive-services");Rg.setAttribute("content","version=8.1.3-main.6ed2e3d");document.head.appendChild(Rg);globalThis.WebSpeechCognitiveServices={create:Ku,createSpeechRecognitionPonyfillFromRecognizer:Tr};})();
31
31
  //# sourceMappingURL=web-speech-cognitive-services.production.min.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "web-speech-cognitive-services",
3
- "version": "8.1.2-main.d1bb656",
3
+ "version": "8.1.3-main.6ed2e3d",
4
4
  "description": "Polyfill Web Speech API with Cognitive Services Speech-to-Text service",
5
5
  "files": [
6
6
  "./dist/**/*"
@@ -101,7 +101,7 @@
101
101
  "on-error-resume-next": "^2.0.2",
102
102
  "simple-update-in": "^2.2.0",
103
103
  "valibot": "^0.42.1",
104
- "web-speech-cognitive-services": "^8.1.2-main.d1bb656"
104
+ "web-speech-cognitive-services": "^8.1.3-main.6ed2e3d"
105
105
  },
106
106
  "peerDependencies": {
107
107
  "microsoft-cognitiveservices-speech-sdk": "^1.17.0"