@glydeunity/voice-sdk 1.3.3 → 1.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/voice-sdk.es.js +2 -2
- package/dist/voice-sdk.umd.js +1 -1
- package/package.json +1 -1
package/dist/voice-sdk.es.js
CHANGED
|
@@ -226,7 +226,7 @@ class y {
|
|
|
226
226
|
}
|
|
227
227
|
this.ws = new WebSocket(l, ["bearer", i]), this.ws.onopen = () => {
|
|
228
228
|
const s = r || {
|
|
229
|
-
think: { provider: { type: "open_ai", model: "gpt-
|
|
229
|
+
think: { provider: { type: "open_ai", model: "gpt-4.1-nano" } },
|
|
230
230
|
speak: { provider: { type: "deepgram", model: "aura-2-thalia-en" } },
|
|
231
231
|
listen: { provider: { type: "deepgram", version: "v2", model: "flux-general-en" } }
|
|
232
232
|
}, h = {
|
|
@@ -251,7 +251,7 @@ class y {
|
|
|
251
251
|
provider: { type: "deepgram", version: "v2", model: "flux-general-en" }
|
|
252
252
|
},
|
|
253
253
|
think: {
|
|
254
|
-
provider: s.think?.provider || { type: "open_ai", model: "gpt-
|
|
254
|
+
provider: s.think?.provider || { type: "open_ai", model: "gpt-4.1-nano" },
|
|
255
255
|
// Functions come from server config - no client-side defaults
|
|
256
256
|
...s.think?.functions && { functions: s.think.functions }
|
|
257
257
|
},
|
package/dist/voice-sdk.umd.js
CHANGED
|
@@ -130,7 +130,7 @@ class AudioPlaybackProcessor extends AudioWorkletProcessor {
|
|
|
130
130
|
}
|
|
131
131
|
|
|
132
132
|
registerProcessor('audio-playback-processor', AudioPlaybackProcessor);
|
|
133
|
-
`;class f{config;unityUrl;active=!1;serverConfig=null;ws=null;audioContext=null;mediaStream=null;captureWorkletNode=null;playbackWorkletNode=null;isMuted=!1;outputSampleRate=24e3;inputSampleRate=48e3;isAgentSpeaking=!1;agentAudioDoneReceived=!1;sessionContext={};constructor(e){this.config=e,this.unityUrl=e.unityBaseUrl||"https://api.glydeunity.com",!e.publishableKey&&!e.apiKey&&!e.authToken&&console.warn("[GlydeVoice] No authentication method provided. One of publishableKey, apiKey, or authToken is required.")}getAuthHeaders(){const e={"Content-Type":"application/json"};return this.config.publishableKey&&(e["x-publishable-key"]=this.config.publishableKey),this.config.apiKey&&(e["x-api-key"]=this.config.apiKey),this.config.authToken&&(e.Authorization=`Bearer ${this.config.authToken}`),e}async fetchConfig(){const e=`${this.unityUrl}/api/unity/voice/config/${this.config.contextType}`,t=this.config.contextId?`${e}/${this.config.contextId}`:e,o=await fetch(t,{method:"GET",headers:this.getAuthHeaders()});if(!o.ok){const a=await o.json();throw new Error(a.error?.message||a.message||"Failed to fetch voice config")}const{data:i}=await o.json();return i}async start(){if(!this.active){this.active=!0;try{this.config.systemPrompt||(this.serverConfig=await this.fetchConfig(),console.log("[GlydeVoice] Fetched config:",this.serverConfig));const e={context_id:this.config.contextId,domain:typeof window<"u"?window.location.hostname:"localhost"};this.config.systemPrompt&&(e.system_prompt=this.config.systemPrompt),this.config.deepgramConfig&&(e.deepgram_config=this.config.deepgramConfig);const t=await fetch(`${this.unityUrl}/api/unity/voice/auth`,{method:"POST",headers:this.getAuthHeaders(),body:JSON.stringify(e)});if(!t.ok){const s=await t.json();throw new Error(s.error?.message||s.message||"Failed to authenticate voice session")}const{data:o}=await t.json(),{token:i,agent_config:a,deepgram_config:n}=o;this.setSessionContext({clientUuid:a?.client_uuid,contextId:this.config.contextId,contextType:this.config.contextType,currentJobUuid:a?.job_uuid});const d=this.config.systemPrompt||a.instructions||this.serverConfig?.system_prompt||"You are a helpful AI assistant.";await this.initializeAudio();let h="wss://agent.deepgram.com/v1/agent/converse";const r=this.config.deepgramConfig||n||this.serverConfig?.deepgram_config;if(r?.tags&&r.tags.length>0){const s=new URLSearchParams;r.tags.forEach(u=>s.append("tag",u)),h+=`?${s.toString()}`}this.ws=new WebSocket(h,["bearer",i]),this.ws.onopen=()=>{const s=r||{think:{provider:{type:"open_ai",model:"gpt-
|
|
133
|
+
`;class f{config;unityUrl;active=!1;serverConfig=null;ws=null;audioContext=null;mediaStream=null;captureWorkletNode=null;playbackWorkletNode=null;isMuted=!1;outputSampleRate=24e3;inputSampleRate=48e3;isAgentSpeaking=!1;agentAudioDoneReceived=!1;sessionContext={};constructor(e){this.config=e,this.unityUrl=e.unityBaseUrl||"https://api.glydeunity.com",!e.publishableKey&&!e.apiKey&&!e.authToken&&console.warn("[GlydeVoice] No authentication method provided. One of publishableKey, apiKey, or authToken is required.")}getAuthHeaders(){const e={"Content-Type":"application/json"};return this.config.publishableKey&&(e["x-publishable-key"]=this.config.publishableKey),this.config.apiKey&&(e["x-api-key"]=this.config.apiKey),this.config.authToken&&(e.Authorization=`Bearer ${this.config.authToken}`),e}async fetchConfig(){const e=`${this.unityUrl}/api/unity/voice/config/${this.config.contextType}`,t=this.config.contextId?`${e}/${this.config.contextId}`:e,o=await fetch(t,{method:"GET",headers:this.getAuthHeaders()});if(!o.ok){const a=await o.json();throw new Error(a.error?.message||a.message||"Failed to fetch voice config")}const{data:i}=await o.json();return i}async start(){if(!this.active){this.active=!0;try{this.config.systemPrompt||(this.serverConfig=await this.fetchConfig(),console.log("[GlydeVoice] Fetched config:",this.serverConfig));const e={context_id:this.config.contextId,domain:typeof window<"u"?window.location.hostname:"localhost"};this.config.systemPrompt&&(e.system_prompt=this.config.systemPrompt),this.config.deepgramConfig&&(e.deepgram_config=this.config.deepgramConfig);const t=await fetch(`${this.unityUrl}/api/unity/voice/auth`,{method:"POST",headers:this.getAuthHeaders(),body:JSON.stringify(e)});if(!t.ok){const s=await t.json();throw new Error(s.error?.message||s.message||"Failed to authenticate voice session")}const{data:o}=await t.json(),{token:i,agent_config:a,deepgram_config:n}=o;this.setSessionContext({clientUuid:a?.client_uuid,contextId:this.config.contextId,contextType:this.config.contextType,currentJobUuid:a?.job_uuid});const d=this.config.systemPrompt||a.instructions||this.serverConfig?.system_prompt||"You are a helpful AI assistant.";await this.initializeAudio();let h="wss://agent.deepgram.com/v1/agent/converse";const r=this.config.deepgramConfig||n||this.serverConfig?.deepgram_config;if(r?.tags&&r.tags.length>0){const s=new URLSearchParams;r.tags.forEach(u=>s.append("tag",u)),h+=`?${s.toString()}`}this.ws=new WebSocket(h,["bearer",i]),this.ws.onopen=()=>{const s=r||{think:{provider:{type:"open_ai",model:"gpt-4.1-nano"}},speak:{provider:{type:"deepgram",model:"aura-2-thalia-en"}},listen:{provider:{type:"deepgram",version:"v2",model:"flux-general-en"}}},u={type:"Settings",audio:{input:{encoding:"linear16",sample_rate:this.inputSampleRate},output:{encoding:"linear16",sample_rate:this.outputSampleRate,container:"none"}},agent:{language:"en",speak:s.speak||{provider:{type:"deepgram",model:"aura-2-thalia-en"}},listen:s.listen||{provider:{type:"deepgram",version:"v2",model:"flux-general-en"}},think:{provider:s.think?.provider||{type:"open_ai",model:"gpt-4.1-nano"},...s.think?.functions&&{functions:s.think.functions}},greeting:"Hi! I'm excited you chose to speak with me. Are you ready to start?"}};s.tags&&s.tags.length>0&&(u.tags=s.tags),this.ws.send(JSON.stringify(u)),this.emit({type:"open",payload:{config:a,serverConfig:this.serverConfig}})};const g=d;this.ws.onmessage=s=>{if(typeof s.data=="string"){try{if(JSON.parse(s.data).type==="SettingsApplied"){const y={type:"UpdatePrompt",prompt:g};this.ws.send(JSON.stringify(y)),this.startMicrophone()}}catch{}this.handleTextMessage(s.data)}else s.data instanceof Blob?this.handleAudioData(s.data):s.data instanceof ArrayBuffer&&this.handleAudioBuffer(s.data)},this.ws.onerror=s=>{console.error("[GlydeVoice] WebSocket error:",s),this.emit({type:"error",payload:s})},this.ws.onclose=()=>{this.cleanup(),this.emit({type:"close"})},this.renderUI()}catch(e){throw console.error("[GlydeVoice] Error starting session:",e),this.active=!1,this.emit({type:"error",payload:e}),e}}}createWorkletBlobUrl(e){const t=new Blob([e],{type:"application/javascript"});return URL.createObjectURL(t)}async initializeAudio(){this.audioContext=new AudioContext({sampleRate:this.inputSampleRate});const e=this.createWorkletBlobUrl(l),t=this.createWorkletBlobUrl(p);try{await Promise.all([this.audioContext.audioWorklet.addModule(e),this.audioContext.audioWorklet.addModule(t)])}finally{URL.revokeObjectURL(e),URL.revokeObjectURL(t)}this.playbackWorkletNode=new AudioWorkletNode(this.audioContext,"audio-playback-processor"),this.playbackWorkletNode.connect(this.audioContext.destination),this.playbackWorkletNode.port.onmessage=o=>{const{type:i}=o.data;(i==="cleared"||i==="bufferEmpty")&&(this.isAgentSpeaking=!1,this.agentAudioDoneReceived=!1,this.emit({type:"agent_speaking",payload:!1}))}}handleTextMessage(e){try{const t=JSON.parse(e);switch(t.type){case"Welcome":this.emit({type:"ready"});break;case"SettingsApplied":break;case"UserStartedSpeaking":this.emit({type:"user_speaking",payload:!0}),this.clearPlaybackBuffer(),this.isAgentSpeaking=!1,this.agentAudioDoneReceived=!1;break;case"UserStoppedSpeaking":this.emit({type:"user_speaking",payload:!1});break;case"ConversationText":if(t.content&&t.content.trim()){const o=t.role==="assistant"?"agent":"user";this.config.onTranscript&&this.config.onTranscript(t.content,o),this.emit({type:"transcript",payload:{text:t.content,role:o}}),this.saveTranscript(t.content,t.role)}break;case"AgentStartedSpeaking":this.isAgentSpeaking=!0,this.agentAudioDoneReceived=!1,this.emit({type:"agent_speaking",payload:!0});break;case"AgentAudioDone":this.agentAudioDoneReceived=!0;break;case"Error":console.error("[GlydeVoice] Agent error:",t),this.emit({type:"error",payload:t});break;case"FunctionCallRequest":this.handleFunctionCallRequest(t);break}}catch(t){console.error("[GlydeVoice] Failed to parse message:",t)}}async handleAudioData(e){const t=await e.arrayBuffer();this.handleAudioBuffer(t)}handleAudioBuffer(e){if(!this.playbackWorkletNode||!this.audioContext)return;this.audioContext.state==="suspended"&&this.audioContext.resume();const t=e.byteLength;if(t===0)return;const o=t-t%2;if(o===0)return;const i=o===t?e:e.slice(0,o),a=new Int16Array(i),n=new Float32Array(a.length);for(let r=0;r<a.length;r++)n[r]=a[r]/32768;const d=this.resample24kTo48k(n);!this.isAgentSpeaking&&!this.agentAudioDoneReceived&&(this.isAgentSpeaking=!0,this.emit({type:"agent_speaking",payload:!0}));const h=new Float32Array(d);this.playbackWorkletNode.port.postMessage({type:"audio",data:h},[h.buffer])}resample24kTo48k(e){const t=e.length*2,o=new Float32Array(t);for(let a=0;a<e.length-1;a++){const n=e[a],d=e[a+1];o[a*2]=n,o[a*2+1]=(n+d)/2}const i=e.length-1;return o[i*2]=e[i],o[i*2+1]=e[i],o}clearPlaybackBuffer(){this.playbackWorkletNode&&this.playbackWorkletNode.port.postMessage({type:"clear"})}async startMicrophone(){if(!this.audioContext)throw new Error("Audio context not initialized");try{this.mediaStream=await navigator.mediaDevices.getUserMedia({audio:{channelCount:1,sampleRate:this.inputSampleRate,echoCancellation:!0,noiseSuppression:!0}});const e=this.audioContext.createMediaStreamSource(this.mediaStream);this.captureWorkletNode=new AudioWorkletNode(this.audioContext,"audio-capture-processor"),this.captureWorkletNode.port.onmessage=t=>{!this.active||!this.ws||this.ws.readyState!==WebSocket.OPEN||this.isMuted||this.ws.send(t.data)},e.connect(this.captureWorkletNode),this.emit({type:"microphone_ready"})}catch(e){throw console.error("[GlydeVoice] Microphone error:",e),e}}async saveTranscript(e,t){if(!(!this.config.contextId||!e))try{await fetch(`${this.unityUrl}/api/unity/voice/transcript`,{method:"POST",headers:this.getAuthHeaders(),body:JSON.stringify({context_id:this.config.contextId,content:e,role:t==="assistant"?"assistant":"user"})})}catch{}}setMuted(e){this.isMuted=e}getMuted(){return this.isMuted}isActive(){return this.active}getServerConfig(){return this.serverConfig}stop(){this.active=!1,this.cleanup()}cleanup(){this.captureWorkletNode&&(this.captureWorkletNode.disconnect(),this.captureWorkletNode.port.close(),this.captureWorkletNode=null),this.playbackWorkletNode&&(this.playbackWorkletNode.disconnect(),this.playbackWorkletNode.port.close(),this.playbackWorkletNode=null),this.mediaStream&&(this.mediaStream.getTracks().forEach(e=>e.stop()),this.mediaStream=null),this.audioContext&&(this.audioContext.close(),this.audioContext=null),this.ws&&(this.ws.readyState===WebSocket.OPEN&&this.ws.close(),this.ws=null)}emit(e){this.config.onEvent&&this.config.onEvent(e)}renderUI(){if(!this.config.container)return;const e=typeof this.config.container=="string"?document.querySelector(this.config.container):this.config.container;e&&(e.innerHTML=`
|
|
134
134
|
<div style="padding: 20px; border: 1px solid #ccc; border-radius: 8px; background: #fff;">
|
|
135
135
|
<h3>Glyde Voice Agent</h3>
|
|
136
136
|
<p>Status: Active</p>
|