web-speech-cognitive-services 8.1.3-main.b33949a → 8.1.4-main.b358385
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -27219,7 +27219,7 @@
|
|
|
27219
27219
|
}
|
|
27220
27220
|
var meta = document.createElement("meta");
|
|
27221
27221
|
meta.setAttribute("name", "web-speech-cognitive-services");
|
|
27222
|
-
meta.setAttribute("content", `version=${"8.1.
|
|
27222
|
+
meta.setAttribute("content", `version=${"8.1.4-main.b358385"}`);
|
|
27223
27223
|
document.head.appendChild(meta);
|
|
27224
27224
|
|
|
27225
27225
|
// src/index.umd.js
|
|
@@ -1456,7 +1456,7 @@ function createSpeechServicesPonyfill(options = {}) {
|
|
|
1456
1456
|
}
|
|
1457
1457
|
var meta = document.createElement("meta");
|
|
1458
1458
|
meta.setAttribute("name", "web-speech-cognitive-services");
|
|
1459
|
-
meta.setAttribute("content", `version=${"8.1.
|
|
1459
|
+
meta.setAttribute("content", `version=${"8.1.4-main.b358385"}`);
|
|
1460
1460
|
document.head.appendChild(meta);
|
|
1461
1461
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1462
1462
|
0 && (module.exports = {
|
|
@@ -1422,7 +1422,7 @@ function createSpeechServicesPonyfill(options = {}) {
|
|
|
1422
1422
|
}
|
|
1423
1423
|
var meta = document.createElement("meta");
|
|
1424
1424
|
meta.setAttribute("name", "web-speech-cognitive-services");
|
|
1425
|
-
meta.setAttribute("content", `version=${"8.1.
|
|
1425
|
+
meta.setAttribute("content", `version=${"8.1.4-main.b358385"}`);
|
|
1426
1426
|
document.head.appendChild(meta);
|
|
1427
1427
|
export {
|
|
1428
1428
|
createSpeechRecognitionPonyfill,
|
|
@@ -27,5 +27,5 @@ File System access not available, please use Push or PullAudioOutputStream`),thi
|
|
|
27
27
|
${i}
|
|
28
28
|
</prosody>
|
|
29
29
|
</voice>
|
|
30
|
-
</speak>`}var V0=/^\s*<speak(\s|\/?>)/u,U0=/^\s*<\?xml\s/u;function Ug(r){return V0.test(r)||U0.test(r)}var W0="en-US",H0="riff-16khz-16bit-mono-pcm",K0="Microsoft Server Speech Text to Speech Voice (en-US, AriaNeural)",J0="SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU3LjU2LjEwMQAAAAAAAAAAAAAA//tAwAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAACAAABhgC7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7//////////////////////////////////////////////////////////////////8AAAAATGF2YzU3LjY0AAAAAAAAAAAAAAAAJAUHAAAAAAAAAYYoRBqpAAAAAAD/+xDEAAPAAAGkAAAAIAAANIAAAARMQU1FMy45OS41VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVf/7EMQpg8AAAaQAAAAgAAA0gAAABFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV";async function pw({deploymentId:r,fetchCredentials:e,lang:t=W0,outputFormat:i=H0,pitch:n,rate:s,text:o,voice:a=K0,volume:c}){if(!o)return uw(J0);let{authorizationToken:p,region:h,speechSynthesisHostname:d,subscriptionKey:v}=await e();if(p&&v||!p&&!v)throw new Error('Only "authorizationToken" or "subscriptionKey" should be set.');if(h&&d||!h&&!d)throw new Error('Only "region" or "speechSynthesisHostnamename" should be set.');let g=Ug(o)?o:Vg({lang:t,pitch:n,rate:s,text:o,voice:a,volume:c}),M=d||(r?`${encodeURI(h)}.voice.speech.microsoft.com`:`${encodeURI(h)}.tts.speech.microsoft.com`),A=r?`?deploymentId=${encodeURI(r)}`:"",H=`https://${M}/cognitiveservices/v1${A}`,ee=await fetch(H,{headers:{"Content-Type":"application/ssml+xml","X-Microsoft-OutputFormat":i,...p?{Authorization:`Bearer ${p}`}:{"Ocp-Apim-Subscription-Key":v}},method:"POST",body:g});if(!ee.ok)throw new Error(`web-speech-cognitive-services: Failed to syntheis speech, server returned ${ee.status}`);return ee.arrayBuffer()}function Wg(r,e,t){return r.addEventListener(e,t),()=>r.removeEventListener(e,t)}function $0(r,e){return new Promise((t,i)=>{let n=r.decodeAudioData(e,t,i);n&&typeof n.then=="function"&&t(n)})}function G0(r,e,t){return new Promise((i,n)=>{let s=new Bg,o=new Bg,a=Wg(r,"statechange",({target:{state:c}})=>c==="closed"&&s.eventListener());try{t.buffer=e,t.onended=o.eventListener,t.connect(r.destination),t.start(0),Promise.race([s.upcoming(),o.upcoming()]).then(i)}catch(c){n(c)}finally{a()}})}var Hg=class extends ur{constructor(e){super(),this._lang=null,this._pitch=1,this._rate=1,this._voice=null,this._volume=1,this.text=e,this.onboundary=null,this.onend=null,this.onerror=null,this.onmark=null,this.onpause=null,this.onresume=null,this.onstart=null}get lang(){return this._lang}set lang(e){this._lang=e}get onboundary(){return Wt(this,"boundary")}set onboundary(e){Ht(this,"boundary",e)}get onend(){return Wt(this,"end")}set onend(e){Ht(this,"end",e)}get onerror(){return Wt(this,"error")}set onerror(e){Ht(this,"error",e)}get onmark(){return Wt(this,"mark")}set onmark(e){Ht(this,"mark",e)}get onpause(){return Wt(this,"pause")}set onpause(e){Ht(this,"pause",e)}get onresume(){return Wt(this,"resume")}set onresume(e){Ht(this,"resume",e)}get onstart(){return Wt(this,"start")}set onstart(e){Ht(this,"start",e)}get pitch(){return this._pitch}set pitch(e){this._pitch=e}get rate(){return this._rate}set rate(e){this._rate=e}get voice(){return this._voice}set voice(e){this._voice=e}get volume(){return this._volume}set volume(e){this._volume=e}preload({deploymentId:e,fetchCredentials:t,outputFormat:i}){this.arrayBufferPromise=pw({fetchCredentials:t,deploymentId:e,lang:this.lang||window.navigator.language,outputFormat:i,pitch:this.pitch,rate:this.rate,text:this.text,voice:this.voice&&this.voice.voiceURI,volume:this.volume}),this.arrayBufferPromise.catch()}async play(e){try{this.dispatchEvent(new pr("start"));let t=e.createBufferSource(),i=await $0(e,await this.arrayBufferPromise);this._playingSource=t,await G0(e,i,t),this._playingSource=null,this.dispatchEvent(new pr("end"))}catch(t){this.dispatchEvent(new ErrorEvent("error",{error:"synthesis-failed",message:t.stack}))}}stop(){this._playingSource&&this._playingSource.stop()}},Kg=Hg;var Sn=class{constructor({gender:r,lang:e,voiceURI:t}){this._default=!1,this._gender=r,this._lang=e,this._localService=!1,this._name=t,this._voiceURI=t}get default(){return this._default}get gender(){return this._gender}get lang(){return this._lang}get localService(){return this._localService}get name(){return this._name}get voiceURI(){return this._voiceURI}};async function Q0({customVoiceHostname:r,deploymentId:e,region:t,subscriptionKey:i}){let n=r||`${t}.customvoice.api.speech.microsoft.com`,s=await fetch(`https://${encodeURI(n)}/api/texttospeech/v2.0/endpoints/${encodeURIComponent(e)}`,{headers:{accept:"application/json","ocp-apim-subscription-key":i}});if(!s.ok)throw new Error("Failed to fetch custom voices");return s.json()}async function lw({customVoiceHostname:r,deploymentId:e,region:t,subscriptionKey:i}){let{models:n}=await Q0({customVoiceHostname:r,deploymentId:e,region:t,subscriptionKey:i});return n.map(({properties:{Gender:s},locale:o,name:a})=>new Sn({gender:s,lang:o,voiceURI:a})).sort(({name:s},{name:o})=>s>o?1:s<o?-1:0)}async function Jg({authorizationToken:r,region:e,speechSynthesisHostname:t,subscriptionKey:i}){let n=t||`${encodeURI(e)}.tts.speech.microsoft.com`,s=await fetch(`https://${n}/cognitiveservices/voices/list`,{headers:{"content-type":"application/json",...r?{authorization:`Bearer ${r}`}:{"Ocp-Apim-Subscription-Key":i}}});if(!s.ok)throw new Error("Failed to fetch voices");return(await s.json()).map(({Gender:a,Locale:c,Name:p})=>new Sn({gender:a,lang:c,voiceURI:p})).sort(({name:a},{name:c})=>a>c?1:a<c?-1:0)}var X0="audio-24khz-160kbitrate-mono-mp3",Z0=[],hw=r=>{let{audioContext:e,fetchCredentials:t,ponyfill:i={AudioContext:window.AudioContext||window.webkitAudioContext},speechSynthesisDeploymentId:n,speechSynthesisOutputFormat:s=X0}=As(r);if(!e&&!i.AudioContext)return console.warn("web-speech-cognitive-services: This browser does not support Web Audio and it will not work with Cognitive Services Speech Services."),{};class o extends ur{constructor(){super(),this.queue=new hp({audioContext:e,ponyfill:i}),this.updateVoices()}cancel(){this.queue.stop()}getVoices(){return Z0}get onvoiceschanged(){return Wt(this,"voiceschanged")}set onvoiceschanged(c){Ht(this,"voiceschanged",c)}pause(){this.queue.pause()}resume(){this.queue.resume()}speak(c){if(!(c instanceof Kg))throw new Error("invalid utterance");let{reject:p,resolve:h,promise:d}=Ts(),v=({error:g,message:M})=>{let A=new Error(g);A.stack=M,p(A)};return c.addEventListener("end",h),c.addEventListener("error",v),c.preload({deploymentId:n,fetchCredentials:t,outputFormat:s}),this.queue.push(c),d.finally(()=>{c.removeEventListener("end",h),c.removeEventListener("error",v)})}get speaking(){return this.queue.speaking}async updateVoices(){let{customVoiceHostname:c,region:p,speechSynthesisHostname:h,subscriptionKey:d}=await t();n?d&&(console.warn("web-speech-cognitive-services: Listing of custom voice models are only available when using subscription key."),await Lg(async()=>{let v=await lw({customVoiceHostname:c,deploymentId:n,region:p,speechSynthesisHostname:h,subscriptionKey:d});this.getVoices=()=>v})):await Lg(async()=>{let v=await Jg(await t());this.getVoices=()=>v}),this.dispatchEvent(new pr("voiceschanged"))}}return{speechSynthesis:new o,SpeechSynthesisEvent:pr,SpeechSynthesisUtterance:Kg}};var $g=hw;function dp(r={}){return{...ks(r),...$g(r)}}var Gg=document.createElement("meta");Gg.setAttribute("name","web-speech-cognitive-services");Gg.setAttribute("content","version=8.1.
|
|
30
|
+
</speak>`}var V0=/^\s*<speak(\s|\/?>)/u,U0=/^\s*<\?xml\s/u;function Ug(r){return V0.test(r)||U0.test(r)}var W0="en-US",H0="riff-16khz-16bit-mono-pcm",K0="Microsoft Server Speech Text to Speech Voice (en-US, AriaNeural)",J0="SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU3LjU2LjEwMQAAAAAAAAAAAAAA//tAwAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAACAAABhgC7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7u7//////////////////////////////////////////////////////////////////8AAAAATGF2YzU3LjY0AAAAAAAAAAAAAAAAJAUHAAAAAAAAAYYoRBqpAAAAAAD/+xDEAAPAAAGkAAAAIAAANIAAAARMQU1FMy45OS41VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVf/7EMQpg8AAAaQAAAAgAAA0gAAABFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV";async function pw({deploymentId:r,fetchCredentials:e,lang:t=W0,outputFormat:i=H0,pitch:n,rate:s,text:o,voice:a=K0,volume:c}){if(!o)return uw(J0);let{authorizationToken:p,region:h,speechSynthesisHostname:d,subscriptionKey:v}=await e();if(p&&v||!p&&!v)throw new Error('Only "authorizationToken" or "subscriptionKey" should be set.');if(h&&d||!h&&!d)throw new Error('Only "region" or "speechSynthesisHostnamename" should be set.');let g=Ug(o)?o:Vg({lang:t,pitch:n,rate:s,text:o,voice:a,volume:c}),M=d||(r?`${encodeURI(h)}.voice.speech.microsoft.com`:`${encodeURI(h)}.tts.speech.microsoft.com`),A=r?`?deploymentId=${encodeURI(r)}`:"",H=`https://${M}/cognitiveservices/v1${A}`,ee=await fetch(H,{headers:{"Content-Type":"application/ssml+xml","X-Microsoft-OutputFormat":i,...p?{Authorization:`Bearer ${p}`}:{"Ocp-Apim-Subscription-Key":v}},method:"POST",body:g});if(!ee.ok)throw new Error(`web-speech-cognitive-services: Failed to syntheis speech, server returned ${ee.status}`);return ee.arrayBuffer()}function Wg(r,e,t){return r.addEventListener(e,t),()=>r.removeEventListener(e,t)}function $0(r,e){return new Promise((t,i)=>{let n=r.decodeAudioData(e,t,i);n&&typeof n.then=="function"&&t(n)})}function G0(r,e,t){return new Promise((i,n)=>{let s=new Bg,o=new Bg,a=Wg(r,"statechange",({target:{state:c}})=>c==="closed"&&s.eventListener());try{t.buffer=e,t.onended=o.eventListener,t.connect(r.destination),t.start(0),Promise.race([s.upcoming(),o.upcoming()]).then(i)}catch(c){n(c)}finally{a()}})}var Hg=class extends ur{constructor(e){super(),this._lang=null,this._pitch=1,this._rate=1,this._voice=null,this._volume=1,this.text=e,this.onboundary=null,this.onend=null,this.onerror=null,this.onmark=null,this.onpause=null,this.onresume=null,this.onstart=null}get lang(){return this._lang}set lang(e){this._lang=e}get onboundary(){return Wt(this,"boundary")}set onboundary(e){Ht(this,"boundary",e)}get onend(){return Wt(this,"end")}set onend(e){Ht(this,"end",e)}get onerror(){return Wt(this,"error")}set onerror(e){Ht(this,"error",e)}get onmark(){return Wt(this,"mark")}set onmark(e){Ht(this,"mark",e)}get onpause(){return Wt(this,"pause")}set onpause(e){Ht(this,"pause",e)}get onresume(){return Wt(this,"resume")}set onresume(e){Ht(this,"resume",e)}get onstart(){return Wt(this,"start")}set onstart(e){Ht(this,"start",e)}get pitch(){return this._pitch}set pitch(e){this._pitch=e}get rate(){return this._rate}set rate(e){this._rate=e}get voice(){return this._voice}set voice(e){this._voice=e}get volume(){return this._volume}set volume(e){this._volume=e}preload({deploymentId:e,fetchCredentials:t,outputFormat:i}){this.arrayBufferPromise=pw({fetchCredentials:t,deploymentId:e,lang:this.lang||window.navigator.language,outputFormat:i,pitch:this.pitch,rate:this.rate,text:this.text,voice:this.voice&&this.voice.voiceURI,volume:this.volume}),this.arrayBufferPromise.catch()}async play(e){try{this.dispatchEvent(new pr("start"));let t=e.createBufferSource(),i=await $0(e,await this.arrayBufferPromise);this._playingSource=t,await G0(e,i,t),this._playingSource=null,this.dispatchEvent(new pr("end"))}catch(t){this.dispatchEvent(new ErrorEvent("error",{error:"synthesis-failed",message:t.stack}))}}stop(){this._playingSource&&this._playingSource.stop()}},Kg=Hg;var Sn=class{constructor({gender:r,lang:e,voiceURI:t}){this._default=!1,this._gender=r,this._lang=e,this._localService=!1,this._name=t,this._voiceURI=t}get default(){return this._default}get gender(){return this._gender}get lang(){return this._lang}get localService(){return this._localService}get name(){return this._name}get voiceURI(){return this._voiceURI}};async function Q0({customVoiceHostname:r,deploymentId:e,region:t,subscriptionKey:i}){let n=r||`${t}.customvoice.api.speech.microsoft.com`,s=await fetch(`https://${encodeURI(n)}/api/texttospeech/v2.0/endpoints/${encodeURIComponent(e)}`,{headers:{accept:"application/json","ocp-apim-subscription-key":i}});if(!s.ok)throw new Error("Failed to fetch custom voices");return s.json()}async function lw({customVoiceHostname:r,deploymentId:e,region:t,subscriptionKey:i}){let{models:n}=await Q0({customVoiceHostname:r,deploymentId:e,region:t,subscriptionKey:i});return n.map(({properties:{Gender:s},locale:o,name:a})=>new Sn({gender:s,lang:o,voiceURI:a})).sort(({name:s},{name:o})=>s>o?1:s<o?-1:0)}async function Jg({authorizationToken:r,region:e,speechSynthesisHostname:t,subscriptionKey:i}){let n=t||`${encodeURI(e)}.tts.speech.microsoft.com`,s=await fetch(`https://${n}/cognitiveservices/voices/list`,{headers:{"content-type":"application/json",...r?{authorization:`Bearer ${r}`}:{"Ocp-Apim-Subscription-Key":i}}});if(!s.ok)throw new Error("Failed to fetch voices");return(await s.json()).map(({Gender:a,Locale:c,Name:p})=>new Sn({gender:a,lang:c,voiceURI:p})).sort(({name:a},{name:c})=>a>c?1:a<c?-1:0)}var X0="audio-24khz-160kbitrate-mono-mp3",Z0=[],hw=r=>{let{audioContext:e,fetchCredentials:t,ponyfill:i={AudioContext:window.AudioContext||window.webkitAudioContext},speechSynthesisDeploymentId:n,speechSynthesisOutputFormat:s=X0}=As(r);if(!e&&!i.AudioContext)return console.warn("web-speech-cognitive-services: This browser does not support Web Audio and it will not work with Cognitive Services Speech Services."),{};class o extends ur{constructor(){super(),this.queue=new hp({audioContext:e,ponyfill:i}),this.updateVoices()}cancel(){this.queue.stop()}getVoices(){return Z0}get onvoiceschanged(){return Wt(this,"voiceschanged")}set onvoiceschanged(c){Ht(this,"voiceschanged",c)}pause(){this.queue.pause()}resume(){this.queue.resume()}speak(c){if(!(c instanceof Kg))throw new Error("invalid utterance");let{reject:p,resolve:h,promise:d}=Ts(),v=({error:g,message:M})=>{let A=new Error(g);A.stack=M,p(A)};return c.addEventListener("end",h),c.addEventListener("error",v),c.preload({deploymentId:n,fetchCredentials:t,outputFormat:s}),this.queue.push(c),d.finally(()=>{c.removeEventListener("end",h),c.removeEventListener("error",v)})}get speaking(){return this.queue.speaking}async updateVoices(){let{customVoiceHostname:c,region:p,speechSynthesisHostname:h,subscriptionKey:d}=await t();n?d&&(console.warn("web-speech-cognitive-services: Listing of custom voice models are only available when using subscription key."),await Lg(async()=>{let v=await lw({customVoiceHostname:c,deploymentId:n,region:p,speechSynthesisHostname:h,subscriptionKey:d});this.getVoices=()=>v})):await Lg(async()=>{let v=await Jg(await t());this.getVoices=()=>v}),this.dispatchEvent(new pr("voiceschanged"))}}return{speechSynthesis:new o,SpeechSynthesisEvent:pr,SpeechSynthesisUtterance:Kg}};var $g=hw;function dp(r={}){return{...ks(r),...$g(r)}}var Gg=document.createElement("meta");Gg.setAttribute("name","web-speech-cognitive-services");Gg.setAttribute("content","version=8.1.4-main.b358385");document.head.appendChild(Gg);globalThis.WebSpeechCognitiveServices={create:dp,createSpeechRecognitionPonyfillFromRecognizer:Vr};})();
|
|
31
31
|
//# sourceMappingURL=web-speech-cognitive-services.production.min.js.map
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "web-speech-cognitive-services",
|
|
3
|
-
"version": "8.1.
|
|
3
|
+
"version": "8.1.4-main.b358385",
|
|
4
4
|
"description": "Polyfill Web Speech API with Cognitive Services Speech-to-Text service",
|
|
5
5
|
"files": [
|
|
6
6
|
"./dist/**/*"
|
|
@@ -101,7 +101,7 @@
|
|
|
101
101
|
"on-error-resume-next": "^2.0.2",
|
|
102
102
|
"simple-update-in": "^2.2.0",
|
|
103
103
|
"valibot": "^1.1.0",
|
|
104
|
-
"web-speech-cognitive-services": "^8.1.
|
|
104
|
+
"web-speech-cognitive-services": "^8.1.4-main.b358385"
|
|
105
105
|
},
|
|
106
106
|
"peerDependencies": {
|
|
107
107
|
"microsoft-cognitiveservices-speech-sdk": "^1.17.0"
|