@glydeunity/voice-sdk 1.2.3 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -0
- package/dist/voice-sdk.es.js +48 -43
- package/dist/voice-sdk.umd.js +5 -5
- package/package.json +1 -1
package/dist/index.d.ts
CHANGED
package/dist/voice-sdk.es.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const
|
|
1
|
+
const u = `
|
|
2
2
|
class AudioCaptureProcessor extends AudioWorkletProcessor {
|
|
3
3
|
constructor() {
|
|
4
4
|
super();
|
|
@@ -33,7 +33,7 @@ class AudioCaptureProcessor extends AudioWorkletProcessor {
|
|
|
33
33
|
}
|
|
34
34
|
|
|
35
35
|
registerProcessor('audio-capture-processor', AudioCaptureProcessor);
|
|
36
|
-
`,
|
|
36
|
+
`, f = `
|
|
37
37
|
class AudioPlaybackProcessor extends AudioWorkletProcessor {
|
|
38
38
|
constructor() {
|
|
39
39
|
super();
|
|
@@ -131,7 +131,7 @@ class AudioPlaybackProcessor extends AudioWorkletProcessor {
|
|
|
131
131
|
|
|
132
132
|
registerProcessor('audio-playback-processor', AudioPlaybackProcessor);
|
|
133
133
|
`;
|
|
134
|
-
class
|
|
134
|
+
class y {
|
|
135
135
|
config;
|
|
136
136
|
unityUrl;
|
|
137
137
|
active = !1;
|
|
@@ -174,15 +174,15 @@ class g {
|
|
|
174
174
|
* @returns Voice configuration including system prompt, tools, and Deepgram settings
|
|
175
175
|
*/
|
|
176
176
|
async fetchConfig() {
|
|
177
|
-
const e = `${this.unityUrl}/api/unity/voice/config/${this.config.contextType}`, t = this.config.contextId ? `${e}/${this.config.contextId}` : e,
|
|
177
|
+
const e = `${this.unityUrl}/api/unity/voice/config/${this.config.contextType}`, t = this.config.contextId ? `${e}/${this.config.contextId}` : e, s = await fetch(t, {
|
|
178
178
|
method: "GET",
|
|
179
179
|
headers: this.getAuthHeaders()
|
|
180
180
|
});
|
|
181
|
-
if (!
|
|
182
|
-
const o = await
|
|
181
|
+
if (!s.ok) {
|
|
182
|
+
const o = await s.json();
|
|
183
183
|
throw new Error(o.error?.message || o.message || "Failed to fetch voice config");
|
|
184
184
|
}
|
|
185
|
-
const { data: a } = await
|
|
185
|
+
const { data: a } = await s.json();
|
|
186
186
|
return a;
|
|
187
187
|
}
|
|
188
188
|
/**
|
|
@@ -204,14 +204,19 @@ class g {
|
|
|
204
204
|
body: JSON.stringify(e)
|
|
205
205
|
});
|
|
206
206
|
if (!t.ok) {
|
|
207
|
-
const
|
|
208
|
-
throw new Error(
|
|
207
|
+
const i = await t.json();
|
|
208
|
+
throw new Error(i.error?.message || i.message || "Failed to authenticate voice session");
|
|
209
209
|
}
|
|
210
|
-
const { data:
|
|
210
|
+
const { data: s } = await t.json(), { token: a, agent_config: o, deepgram_config: n } = s, h = this.config.systemPrompt || o.instructions || this.serverConfig?.system_prompt || "You are a helpful AI assistant.";
|
|
211
211
|
await this.initializeAudio();
|
|
212
|
-
|
|
212
|
+
let p = "wss://agent.deepgram.com/v1/agent/converse";
|
|
213
|
+
const r = this.config.deepgramConfig || n || this.serverConfig?.deepgram_config;
|
|
214
|
+
if (r?.tags && r.tags.length > 0) {
|
|
215
|
+
const i = new URLSearchParams();
|
|
216
|
+
r.tags.forEach((l) => i.append("tag", l)), p += `?${i.toString()}`;
|
|
217
|
+
}
|
|
213
218
|
this.ws = new WebSocket(p, ["bearer", a]), this.ws.onopen = () => {
|
|
214
|
-
const
|
|
219
|
+
const i = r || {
|
|
215
220
|
think: {
|
|
216
221
|
provider: { type: "open_ai", model: "gpt-4.1-mini" },
|
|
217
222
|
functions: [
|
|
@@ -292,15 +297,15 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
292
297
|
},
|
|
293
298
|
agent: {
|
|
294
299
|
language: "en",
|
|
295
|
-
speak:
|
|
300
|
+
speak: i.speak || {
|
|
296
301
|
provider: { type: "deepgram", model: "aura-2-thalia-en" }
|
|
297
302
|
},
|
|
298
|
-
listen:
|
|
303
|
+
listen: i.listen || {
|
|
299
304
|
provider: { type: "deepgram", version: "v2", model: "flux-general-en" }
|
|
300
305
|
},
|
|
301
306
|
think: {
|
|
302
|
-
provider:
|
|
303
|
-
functions:
|
|
307
|
+
provider: i.think?.provider || { type: "open_ai", model: "gpt-4.1-mini" },
|
|
308
|
+
functions: i.think?.functions || [
|
|
304
309
|
{
|
|
305
310
|
name: "end_conversation",
|
|
306
311
|
description: `You are an AI assistant that monitors conversations and ends them when specific stop phrases are detected.
|
|
@@ -366,23 +371,23 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
366
371
|
};
|
|
367
372
|
this.ws.send(JSON.stringify(l)), this.emit({ type: "open", payload: { config: o, serverConfig: this.serverConfig } });
|
|
368
373
|
};
|
|
369
|
-
const
|
|
370
|
-
this.ws.onmessage = (
|
|
371
|
-
if (typeof
|
|
374
|
+
const c = h;
|
|
375
|
+
this.ws.onmessage = (i) => {
|
|
376
|
+
if (typeof i.data == "string") {
|
|
372
377
|
try {
|
|
373
|
-
if (JSON.parse(
|
|
374
|
-
const
|
|
378
|
+
if (JSON.parse(i.data).type === "SettingsApplied") {
|
|
379
|
+
const d = {
|
|
375
380
|
type: "UpdatePrompt",
|
|
376
|
-
prompt:
|
|
381
|
+
prompt: c
|
|
377
382
|
};
|
|
378
|
-
this.ws.send(JSON.stringify(
|
|
383
|
+
this.ws.send(JSON.stringify(d)), this.startMicrophone();
|
|
379
384
|
}
|
|
380
385
|
} catch {
|
|
381
386
|
}
|
|
382
|
-
this.handleTextMessage(
|
|
383
|
-
} else
|
|
384
|
-
}, this.ws.onerror = (
|
|
385
|
-
console.error("[GlydeVoice] WebSocket error:",
|
|
387
|
+
this.handleTextMessage(i.data);
|
|
388
|
+
} else i.data instanceof Blob ? this.handleAudioData(i.data) : i.data instanceof ArrayBuffer && this.handleAudioBuffer(i.data);
|
|
389
|
+
}, this.ws.onerror = (i) => {
|
|
390
|
+
console.error("[GlydeVoice] WebSocket error:", i), this.emit({ type: "error", payload: i });
|
|
386
391
|
}, this.ws.onclose = () => {
|
|
387
392
|
this.cleanup(), this.emit({ type: "close" });
|
|
388
393
|
}, this.renderUI();
|
|
@@ -407,7 +412,7 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
407
412
|
*/
|
|
408
413
|
async initializeAudio() {
|
|
409
414
|
this.audioContext = new AudioContext({ sampleRate: this.inputSampleRate });
|
|
410
|
-
const e = this.createWorkletBlobUrl(
|
|
415
|
+
const e = this.createWorkletBlobUrl(u), t = this.createWorkletBlobUrl(f);
|
|
411
416
|
try {
|
|
412
417
|
await Promise.all([
|
|
413
418
|
this.audioContext.audioWorklet.addModule(e),
|
|
@@ -416,8 +421,8 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
416
421
|
} finally {
|
|
417
422
|
URL.revokeObjectURL(e), URL.revokeObjectURL(t);
|
|
418
423
|
}
|
|
419
|
-
this.playbackWorkletNode = new AudioWorkletNode(this.audioContext, "audio-playback-processor"), this.playbackWorkletNode.connect(this.audioContext.destination), this.playbackWorkletNode.port.onmessage = (
|
|
420
|
-
const { type: a } =
|
|
424
|
+
this.playbackWorkletNode = new AudioWorkletNode(this.audioContext, "audio-playback-processor"), this.playbackWorkletNode.connect(this.audioContext.destination), this.playbackWorkletNode.port.onmessage = (s) => {
|
|
425
|
+
const { type: a } = s.data;
|
|
421
426
|
(a === "cleared" || a === "bufferEmpty") && (this.isAgentSpeaking = !1, this.agentAudioDoneReceived = !1, this.emit({ type: "agent_speaking", payload: !1 }));
|
|
422
427
|
};
|
|
423
428
|
}
|
|
@@ -441,8 +446,8 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
441
446
|
break;
|
|
442
447
|
case "ConversationText":
|
|
443
448
|
if (t.content && t.content.trim()) {
|
|
444
|
-
const
|
|
445
|
-
this.config.onTranscript && this.config.onTranscript(t.content,
|
|
449
|
+
const s = t.role === "assistant" ? "agent" : "user";
|
|
450
|
+
this.config.onTranscript && this.config.onTranscript(t.content, s), this.emit({ type: "transcript", payload: { text: t.content, role: s } }), this.saveTranscript(t.content, t.role);
|
|
446
451
|
}
|
|
447
452
|
break;
|
|
448
453
|
case "AgentStartedSpeaking":
|
|
@@ -475,12 +480,12 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
475
480
|
this.audioContext.state === "suspended" && this.audioContext.resume();
|
|
476
481
|
const t = e.byteLength;
|
|
477
482
|
if (t === 0) return;
|
|
478
|
-
const
|
|
479
|
-
if (
|
|
480
|
-
const a =
|
|
481
|
-
for (let
|
|
482
|
-
r
|
|
483
|
-
const h = this.resample24kTo48k(
|
|
483
|
+
const s = t - t % 2;
|
|
484
|
+
if (s === 0) return;
|
|
485
|
+
const a = s === t ? e : e.slice(0, s), o = new Int16Array(a), n = new Float32Array(o.length);
|
|
486
|
+
for (let r = 0; r < o.length; r++)
|
|
487
|
+
n[r] = o[r] / 32768;
|
|
488
|
+
const h = this.resample24kTo48k(n);
|
|
484
489
|
!this.isAgentSpeaking && !this.agentAudioDoneReceived && (this.isAgentSpeaking = !0, this.emit({ type: "agent_speaking", payload: !0 }));
|
|
485
490
|
const p = new Float32Array(h);
|
|
486
491
|
this.playbackWorkletNode.port.postMessage({
|
|
@@ -492,13 +497,13 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
492
497
|
* Resample audio from 24kHz to 48kHz using linear interpolation
|
|
493
498
|
*/
|
|
494
499
|
resample24kTo48k(e) {
|
|
495
|
-
const t = e.length * 2,
|
|
500
|
+
const t = e.length * 2, s = new Float32Array(t);
|
|
496
501
|
for (let o = 0; o < e.length - 1; o++) {
|
|
497
|
-
const
|
|
498
|
-
|
|
502
|
+
const n = e[o], h = e[o + 1];
|
|
503
|
+
s[o * 2] = n, s[o * 2 + 1] = (n + h) / 2;
|
|
499
504
|
}
|
|
500
505
|
const a = e.length - 1;
|
|
501
|
-
return
|
|
506
|
+
return s[a * 2] = e[a], s[a * 2 + 1] = e[a], s;
|
|
502
507
|
}
|
|
503
508
|
/**
|
|
504
509
|
* Clear the playback buffer (for interruption handling)
|
|
@@ -607,5 +612,5 @@ If the candidate is interested in other opportunities, you should call a GLYDE U
|
|
|
607
612
|
}
|
|
608
613
|
}
|
|
609
614
|
export {
|
|
610
|
-
|
|
615
|
+
y as GlydeVoice
|
|
611
616
|
};
|
package/dist/voice-sdk.umd.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
(function(
|
|
1
|
+
(function(h,p){typeof exports=="object"&&typeof module<"u"?p(exports):typeof define=="function"&&define.amd?define(["exports"],p):(h=typeof globalThis<"u"?globalThis:h||self,p(h.GlydeVoice={}))})(this,(function(h){"use strict";const p=`
|
|
2
2
|
class AudioCaptureProcessor extends AudioWorkletProcessor {
|
|
3
3
|
constructor() {
|
|
4
4
|
super();
|
|
@@ -130,7 +130,7 @@ class AudioPlaybackProcessor extends AudioWorkletProcessor {
|
|
|
130
130
|
}
|
|
131
131
|
|
|
132
132
|
registerProcessor('audio-playback-processor', AudioPlaybackProcessor);
|
|
133
|
-
`;class f{config;unityUrl;active=!1;serverConfig=null;ws=null;audioContext=null;mediaStream=null;captureWorkletNode=null;playbackWorkletNode=null;isMuted=!1;outputSampleRate=24e3;inputSampleRate=48e3;isAgentSpeaking=!1;agentAudioDoneReceived=!1;constructor(e){this.config=e,this.unityUrl=e.unityBaseUrl||"https://api.glydeunity.com",!e.publishableKey&&!e.apiKey&&!e.authToken&&console.warn("[GlydeVoice] No authentication method provided. One of publishableKey, apiKey, or authToken is required.")}getAuthHeaders(){const e={"Content-Type":"application/json"};return this.config.publishableKey&&(e["x-publishable-key"]=this.config.publishableKey),this.config.apiKey&&(e["x-api-key"]=this.config.apiKey),this.config.authToken&&(e.Authorization=`Bearer ${this.config.authToken}`),e}async fetchConfig(){const e=`${this.unityUrl}/api/unity/voice/config/${this.config.contextType}`,t=this.config.contextId?`${e}/${this.config.contextId}`:e,
|
|
133
|
+
`;class f{config;unityUrl;active=!1;serverConfig=null;ws=null;audioContext=null;mediaStream=null;captureWorkletNode=null;playbackWorkletNode=null;isMuted=!1;outputSampleRate=24e3;inputSampleRate=48e3;isAgentSpeaking=!1;agentAudioDoneReceived=!1;constructor(e){this.config=e,this.unityUrl=e.unityBaseUrl||"https://api.glydeunity.com",!e.publishableKey&&!e.apiKey&&!e.authToken&&console.warn("[GlydeVoice] No authentication method provided. One of publishableKey, apiKey, or authToken is required.")}getAuthHeaders(){const e={"Content-Type":"application/json"};return this.config.publishableKey&&(e["x-publishable-key"]=this.config.publishableKey),this.config.apiKey&&(e["x-api-key"]=this.config.apiKey),this.config.authToken&&(e.Authorization=`Bearer ${this.config.authToken}`),e}async fetchConfig(){const e=`${this.unityUrl}/api/unity/voice/config/${this.config.contextType}`,t=this.config.contextId?`${e}/${this.config.contextId}`:e,s=await fetch(t,{method:"GET",headers:this.getAuthHeaders()});if(!s.ok){const o=await s.json();throw new Error(o.error?.message||o.message||"Failed to fetch voice config")}const{data:a}=await s.json();return a}async start(){if(!this.active){this.active=!0;try{this.config.systemPrompt||(this.serverConfig=await this.fetchConfig(),console.log("[GlydeVoice] Fetched config:",this.serverConfig));const e={context_id:this.config.contextId,domain:typeof window<"u"?window.location.hostname:"localhost"};this.config.systemPrompt&&(e.system_prompt=this.config.systemPrompt),this.config.deepgramConfig&&(e.deepgram_config=this.config.deepgramConfig);const t=await fetch(`${this.unityUrl}/api/unity/voice/auth`,{method:"POST",headers:this.getAuthHeaders(),body:JSON.stringify(e)});if(!t.ok){const i=await t.json();throw new Error(i.error?.message||i.message||"Failed to authenticate voice session")}const{data:s}=await t.json(),{token:a,agent_config:o,deepgram_config:n}=s,l=this.config.systemPrompt||o.instructions||this.serverConfig?.system_prompt||"You are a helpful AI assistant.";await this.initializeAudio();let c="wss://agent.deepgram.com/v1/agent/converse";const r=this.config.deepgramConfig||n||this.serverConfig?.deepgram_config;if(r?.tags&&r.tags.length>0){const i=new URLSearchParams;r.tags.forEach(d=>i.append("tag",d)),c+=`?${i.toString()}`}this.ws=new WebSocket(c,["bearer",a]),this.ws.onopen=()=>{const i=r||{think:{provider:{type:"open_ai",model:"gpt-4.1-mini"},functions:[{name:"end_conversation",description:`You are an AI assistant that monitors conversations and ends them when specific stop phrases are detected.
|
|
134
134
|
|
|
135
135
|
Here is a list of phrases to listen for but not restricted to:
|
|
136
136
|
-stop
|
|
@@ -166,7 +166,7 @@ Suggest other opportunities if:
|
|
|
166
166
|
3. The user's input clearly expresses a desire to end the conversation, even if it doesn't use the exact phrases listed.
|
|
167
167
|
4. The user's input clearly expresses a desire to know about urgent or immediate opportunities.
|
|
168
168
|
|
|
169
|
-
If the candidate is interested in other opportunities, you should call a GLYDE Unity MCP tool to identify other job openings.`,parameters:{type:"object",properties:{item:{type:"string",description:"The phrase or text that triggered the suggestion of other opportunities"}},required:["item"]}}]},speak:{provider:{type:"deepgram",model:"aura-2-thalia-en"}},listen:{provider:{type:"deepgram",version:"v2",model:"flux-general-en"}}},d={type:"Settings",audio:{input:{encoding:"linear16",sample_rate:this.inputSampleRate},output:{encoding:"linear16",sample_rate:this.outputSampleRate,container:"none"}},agent:{language:"en",speak:
|
|
169
|
+
If the candidate is interested in other opportunities, you should call a GLYDE Unity MCP tool to identify other job openings.`,parameters:{type:"object",properties:{item:{type:"string",description:"The phrase or text that triggered the suggestion of other opportunities"}},required:["item"]}}]},speak:{provider:{type:"deepgram",model:"aura-2-thalia-en"}},listen:{provider:{type:"deepgram",version:"v2",model:"flux-general-en"}}},d={type:"Settings",audio:{input:{encoding:"linear16",sample_rate:this.inputSampleRate},output:{encoding:"linear16",sample_rate:this.outputSampleRate,container:"none"}},agent:{language:"en",speak:i.speak||{provider:{type:"deepgram",model:"aura-2-thalia-en"}},listen:i.listen||{provider:{type:"deepgram",version:"v2",model:"flux-general-en"}},think:{provider:i.think?.provider||{type:"open_ai",model:"gpt-4.1-mini"},functions:i.think?.functions||[{name:"end_conversation",description:`You are an AI assistant that monitors conversations and ends them when specific stop phrases are detected.
|
|
170
170
|
|
|
171
171
|
Here is a list of phrases to listen for but not restricted to:
|
|
172
172
|
-stop
|
|
@@ -202,11 +202,11 @@ Suggest other opportunities if:
|
|
|
202
202
|
3. The user's input clearly expresses a desire to end the conversation, even if it doesn't use the exact phrases listed.
|
|
203
203
|
4. The user's input clearly expresses a desire to know about urgent or immediate opportunities.
|
|
204
204
|
|
|
205
|
-
If the candidate is interested in other opportunities, you should call a GLYDE Unity MCP tool to identify other job openings.`,parameters:{type:"object",properties:{item:{type:"string",description:"The phrase or text that triggered the suggestion of other opportunities"}},required:["item"]}}]},greeting:"Hi! I'm excited you chose to speak with me. Are you ready to start?"}};this.ws.send(JSON.stringify(d)),this.emit({type:"open",payload:{config:o,serverConfig:this.serverConfig}})};const
|
|
205
|
+
If the candidate is interested in other opportunities, you should call a GLYDE Unity MCP tool to identify other job openings.`,parameters:{type:"object",properties:{item:{type:"string",description:"The phrase or text that triggered the suggestion of other opportunities"}},required:["item"]}}]},greeting:"Hi! I'm excited you chose to speak with me. Are you ready to start?"}};this.ws.send(JSON.stringify(d)),this.emit({type:"open",payload:{config:o,serverConfig:this.serverConfig}})};const g=l;this.ws.onmessage=i=>{if(typeof i.data=="string"){try{if(JSON.parse(i.data).type==="SettingsApplied"){const y={type:"UpdatePrompt",prompt:g};this.ws.send(JSON.stringify(y)),this.startMicrophone()}}catch{}this.handleTextMessage(i.data)}else i.data instanceof Blob?this.handleAudioData(i.data):i.data instanceof ArrayBuffer&&this.handleAudioBuffer(i.data)},this.ws.onerror=i=>{console.error("[GlydeVoice] WebSocket error:",i),this.emit({type:"error",payload:i})},this.ws.onclose=()=>{this.cleanup(),this.emit({type:"close"})},this.renderUI()}catch(e){throw console.error("[GlydeVoice] Error starting session:",e),this.active=!1,this.emit({type:"error",payload:e}),e}}}createWorkletBlobUrl(e){const t=new Blob([e],{type:"application/javascript"});return URL.createObjectURL(t)}async initializeAudio(){this.audioContext=new AudioContext({sampleRate:this.inputSampleRate});const e=this.createWorkletBlobUrl(p),t=this.createWorkletBlobUrl(u);try{await Promise.all([this.audioContext.audioWorklet.addModule(e),this.audioContext.audioWorklet.addModule(t)])}finally{URL.revokeObjectURL(e),URL.revokeObjectURL(t)}this.playbackWorkletNode=new AudioWorkletNode(this.audioContext,"audio-playback-processor"),this.playbackWorkletNode.connect(this.audioContext.destination),this.playbackWorkletNode.port.onmessage=s=>{const{type:a}=s.data;(a==="cleared"||a==="bufferEmpty")&&(this.isAgentSpeaking=!1,this.agentAudioDoneReceived=!1,this.emit({type:"agent_speaking",payload:!1}))}}handleTextMessage(e){try{const t=JSON.parse(e);switch(t.type){case"Welcome":this.emit({type:"ready"});break;case"SettingsApplied":break;case"UserStartedSpeaking":this.emit({type:"user_speaking",payload:!0}),this.clearPlaybackBuffer(),this.isAgentSpeaking=!1,this.agentAudioDoneReceived=!1;break;case"UserStoppedSpeaking":this.emit({type:"user_speaking",payload:!1});break;case"ConversationText":if(t.content&&t.content.trim()){const s=t.role==="assistant"?"agent":"user";this.config.onTranscript&&this.config.onTranscript(t.content,s),this.emit({type:"transcript",payload:{text:t.content,role:s}}),this.saveTranscript(t.content,t.role)}break;case"AgentStartedSpeaking":this.isAgentSpeaking=!0,this.agentAudioDoneReceived=!1,this.emit({type:"agent_speaking",payload:!0});break;case"AgentAudioDone":this.agentAudioDoneReceived=!0;break;case"Error":console.error("[GlydeVoice] Agent error:",t),this.emit({type:"error",payload:t});break}}catch(t){console.error("[GlydeVoice] Failed to parse message:",t)}}async handleAudioData(e){const t=await e.arrayBuffer();this.handleAudioBuffer(t)}handleAudioBuffer(e){if(!this.playbackWorkletNode||!this.audioContext)return;this.audioContext.state==="suspended"&&this.audioContext.resume();const t=e.byteLength;if(t===0)return;const s=t-t%2;if(s===0)return;const a=s===t?e:e.slice(0,s),o=new Int16Array(a),n=new Float32Array(o.length);for(let r=0;r<o.length;r++)n[r]=o[r]/32768;const l=this.resample24kTo48k(n);!this.isAgentSpeaking&&!this.agentAudioDoneReceived&&(this.isAgentSpeaking=!0,this.emit({type:"agent_speaking",payload:!0}));const c=new Float32Array(l);this.playbackWorkletNode.port.postMessage({type:"audio",data:c},[c.buffer])}resample24kTo48k(e){const t=e.length*2,s=new Float32Array(t);for(let o=0;o<e.length-1;o++){const n=e[o],l=e[o+1];s[o*2]=n,s[o*2+1]=(n+l)/2}const a=e.length-1;return s[a*2]=e[a],s[a*2+1]=e[a],s}clearPlaybackBuffer(){this.playbackWorkletNode&&this.playbackWorkletNode.port.postMessage({type:"clear"})}async startMicrophone(){if(!this.audioContext)throw new Error("Audio context not initialized");try{this.mediaStream=await navigator.mediaDevices.getUserMedia({audio:{channelCount:1,sampleRate:this.inputSampleRate,echoCancellation:!0,noiseSuppression:!0}});const e=this.audioContext.createMediaStreamSource(this.mediaStream);this.captureWorkletNode=new AudioWorkletNode(this.audioContext,"audio-capture-processor"),this.captureWorkletNode.port.onmessage=t=>{!this.active||!this.ws||this.ws.readyState!==WebSocket.OPEN||this.isMuted||this.ws.send(t.data)},e.connect(this.captureWorkletNode),this.emit({type:"microphone_ready"})}catch(e){throw console.error("[GlydeVoice] Microphone error:",e),e}}async saveTranscript(e,t){if(!(!this.config.contextId||!e))try{await fetch(`${this.unityUrl}/api/unity/voice/transcript`,{method:"POST",headers:this.getAuthHeaders(),body:JSON.stringify({context_id:this.config.contextId,content:e,role:t==="assistant"?"assistant":"user"})})}catch{}}setMuted(e){this.isMuted=e}getMuted(){return this.isMuted}isActive(){return this.active}getServerConfig(){return this.serverConfig}stop(){this.active=!1,this.cleanup()}cleanup(){this.captureWorkletNode&&(this.captureWorkletNode.disconnect(),this.captureWorkletNode.port.close(),this.captureWorkletNode=null),this.playbackWorkletNode&&(this.playbackWorkletNode.disconnect(),this.playbackWorkletNode.port.close(),this.playbackWorkletNode=null),this.mediaStream&&(this.mediaStream.getTracks().forEach(e=>e.stop()),this.mediaStream=null),this.audioContext&&(this.audioContext.close(),this.audioContext=null),this.ws&&(this.ws.readyState===WebSocket.OPEN&&this.ws.close(),this.ws=null)}emit(e){this.config.onEvent&&this.config.onEvent(e)}renderUI(){if(!this.config.container)return;const e=typeof this.config.container=="string"?document.querySelector(this.config.container):this.config.container;e&&(e.innerHTML=`
|
|
206
206
|
<div style="padding: 20px; border: 1px solid #ccc; border-radius: 8px; background: #fff;">
|
|
207
207
|
<h3>Glyde Voice Agent</h3>
|
|
208
208
|
<p>Status: Active</p>
|
|
209
209
|
<p>Context: ${this.config.contextType}</p>
|
|
210
210
|
<button onclick="this.closest('div').remove()">Close</button>
|
|
211
211
|
</div>
|
|
212
|
-
`)}}
|
|
212
|
+
`)}}h.GlydeVoice=f,Object.defineProperty(h,Symbol.toStringTag,{value:"Module"})}));
|