@11labs/client 0.0.2 → 0.0.3-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -108,6 +108,54 @@ The options passed to `startSession` can also be used to register optional callb
108
108
  - **onStatusChange** - handler called whenever connection status changes. Can be `connected`, `connecting` and `disconnected` (initial).
109
109
  - **onModeChange** - handler called when a status changes, eg. agent switches from `speaking` to `listening`, or the other way around.
110
110
 
111
+ #### Client Tools
112
+
113
+ Client tools are a way to enabled agent to invoke client-side functionality. This can be used to trigger actions in the client, such as opening a modal or doing an API call on behalf of the user.
114
+
115
+ Client tools definition is an object of functions, and needs to be identical with your configuration within the [ElevenLabs UI](https://elevenlabs.io/app/conversational-ai), where you can name and describe different tools, as well as set up the parameters passed by the agent.
116
+
117
+ ```ts
118
+ const conversation = await Conversation.startSession({
119
+ clientTools: {
120
+ displayMessage: async (parameters: {text: string}) => {
121
+ alert(text);
122
+
123
+ return "Message displayed";
124
+ }
125
+ }
126
+ });
127
+ ```
128
+
129
+ In case function returns a value, it will be passed back to the agent as a response.
130
+ Note that the tool needs to be explicitly set to be blocking conversation in ElevenLabs UI for the agent to await and react to the response, otherwise agent assumes success and continues the conversation.
131
+
132
+ #### Conversation overrides
133
+
134
+ You may choose to override various settings of the conversation and set them dynamically based other user interactions.
135
+ We support overriding various settings.
136
+ These settings are optional and can be used to customize the conversation experience.
137
+ The following settings are available:
138
+
139
+ ```ts
140
+ const conversation = await Conversation.startSession({
141
+ overrides: {
142
+ customLlmExtraBody: {
143
+ customField: "custom value",
144
+ },
145
+ agent: {
146
+ prompt: {
147
+ prompt: "My custom prompt",
148
+ },
149
+ firstMessage: "My custom first message",
150
+ language: "en",
151
+ },
152
+ tts: {
153
+ voiceId: "custom voice id"
154
+ },
155
+ },
156
+ });
157
+ ```
158
+
111
159
  #### Return value
112
160
 
113
161
  `startSession` returns a `Conversation` instance that can be used to control the session. The method will throw an error if the session cannot be established. This can happen if the user denies microphone access, or if the websocket connection
package/dist/index.d.ts CHANGED
@@ -1,36 +1,41 @@
1
1
  import { Input } from "./utils/input";
2
2
  import { Output } from "./utils/output";
3
3
  import { SessionConfig } from "./utils/connection";
4
- export type { SocketEvent } from "./utils/events";
4
+ import { ClientToolCallEvent } from "./utils/events";
5
+ export type { IncomingSocketEvent } from "./utils/events";
5
6
  export type { SessionConfig } from "./utils/connection";
6
7
  export type Role = "user" | "ai";
7
8
  export type Mode = "speaking" | "listening";
8
9
  export type Status = "connecting" | "connected" | "disconnecting" | "disconnected";
9
- export type Options = SessionConfig & Callbacks;
10
+ export type Options = SessionConfig & Callbacks & ClientToolsConfig;
11
+ export type ClientToolsConfig = {
12
+ clientTools: Record<string, (parameters: any) => Promise<string | number | void> | string | number | void>;
13
+ };
10
14
  export type Callbacks = {
11
15
  onConnect: (props: {
12
16
  conversationId: string;
13
17
  }) => void;
18
+ onDebug: (props: any) => void;
14
19
  onDisconnect: () => void;
20
+ onError: (message: string, context?: any) => void;
15
21
  onMessage: (props: {
16
22
  message: string;
17
23
  source: Role;
18
24
  }) => void;
19
- onDebug: (props: any) => void;
20
- onError: (message: string, context?: any) => void;
21
- onStatusChange: (prop: {
22
- status: Status;
23
- }) => void;
24
25
  onModeChange: (prop: {
25
26
  mode: Mode;
26
27
  }) => void;
28
+ onStatusChange: (prop: {
29
+ status: Status;
30
+ }) => void;
31
+ onUnhandledClientToolCall?: (params: ClientToolCallEvent["client_tool_call"]) => void;
27
32
  };
28
33
  export declare class Conversation {
29
34
  private readonly options;
30
35
  private readonly connection;
31
36
  readonly input: Input;
32
37
  readonly output: Output;
33
- static startSession(options: SessionConfig & Partial<Callbacks>): Promise<Conversation>;
38
+ static startSession(options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig>): Promise<Conversation>;
34
39
  private lastInterruptTimestamp;
35
40
  private mode;
36
41
  private status;
package/dist/lib.cjs CHANGED
@@ -1,2 +1,2 @@
1
- function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)({}).hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},e.apply(null,arguments)}function t(e){for(var t=window.atob(e),n=t.length,r=new Uint8Array(n),o=0;o<n;o++)r[o]=t.charCodeAt(o);return r.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(n),o=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=r}return e.create=function(t){try{var n=null,o=null;return Promise.resolve(function(s,i){try{var a=function(){function s(){return Promise.resolve(n.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var r=n.createMediaStreamSource(o=t),s=new AudioWorkletNode(n,"raw-audio-processor");return r.connect(a),a.connect(s),new e(n,a,s,o)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!i)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(s):s()}()}catch(e){return i(e)}return a&&a.then?a.then(void 0,i):a}(0,function(e){var t,r;throw null==(t=o)||t.getTracks().forEach(function(e){return e.stop()}),null==(r=n)||r.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),s=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),i=URL.createObjectURL(s),a=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=r}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(a=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(i)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,a,u,t)}))}catch(e){return o(e)}var a,u;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(a=null!=(i=t.origin)?i:"wss://api.elevenlabs.io",c=t.signedUrl?t.signedUrl:a+"/v1/convai/conversation?agent_id="+t.agentId,l=["convai"],t.authorization&&l.push("bearer."+t.authorization),n=new WebSocket(c,l),Promise.resolve(new Promise(function(e,t){n.addEventListener("error",t),n.addEventListener("close",t),n.addEventListener("message",function(t){var n=JSON.parse(t.data);u(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var r=t.conversation_id,o=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,r,o)}))}catch(e){return o(e)}var i,a,c,l;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){this.socket.close()},e}(),l={onConnect:function(){},onDisconnect:function(){},onError:function(){},onDebug:function(){},onMessage:function(){},onStatusChange:function(){},onModeChange:function(){}};exports.Conversation=/*#__PURE__*/function(){function n(e,n,r,o){var s=this,i=this,a=this,c=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==i.status?Promise.resolve():(i.updateStatus("disconnecting"),i.connection.close(),Promise.resolve(i.input.close()).then(function(){return Promise.resolve(i.output.close()).then(function(){i.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==s.mode&&(s.mode=e,s.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==s.status&&(s.status=e,s.options.onStatusChange({status:e}))},this.onEvent=function(e){try{var t=JSON.parse(e.data);if(!u(t))return;switch(t.type){case"interruption":t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio();break;case"agent_response":s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response});break;case"user_transcript":s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.updateMode("speaking"));break;case"ping":s.connection.socket.send(JSON.stringify({type:"pong",event_id:t.ping_event.event_id}));break;default:s.options.onDebug(t)}}catch(t){return void s.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=function(e){var t,n,r=JSON.stringify({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))});"connected"===s.status&&s.connection.socket.send(r)},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&s.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return a.output.gain.gain.value=a.volume,a.output.worklet.port.postMessage({type:"clearInterrupted"}),a.output.worklet.port.postMessage({type:"buffer",buffer:t(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return c.updateMode("listening"),c.output.worklet.port.postMessage({type:"interrupt"}),c.output.gain.gain.exponentialRampToValueAtTime(1e-4,c.output.context.currentTime+2),setTimeout(function(){c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),s.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return s.connection.conversationId},this.setVolume=function(e){s.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=s.inputFrequencyData||(s.inputFrequencyData=new Uint8Array(s.input.analyser.frequencyBinCount)),s.input.analyser.getByteFrequencyData(s.inputFrequencyData),s.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=s.outputFrequencyData||(s.outputFrequencyData=new Uint8Array(s.output.analyser.frequencyBinCount)),s.output.analyser.getByteFrequencyData(s.outputFrequencyData),s.outputFrequencyData},this.getInputVolume=function(){return s.calculateVolume(s.getInputByteFrequencyData())},this.getOutputVolume=function(){return s.calculateVolume(s.getOutputByteFrequencyData())},this.options=e,this.connection=n,this.input=r,this.output=o,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(e){s.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){s.updateStatus("disconnected"),s.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){s.updateStatus("disconnected"),s.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(t){try{var r=e({},l,t);r.onStatusChange({status:"connecting"});var s=null,i=null,u=null;return Promise.resolve(function(e,l){try{var h=Promise.resolve(o.create(16e3)).then(function(e){return s=e,Promise.resolve(c.create(t)).then(function(e){return i=e,Promise.resolve(a.create(i.sampleRate)).then(function(e){return new n(r,i,s,u=e)})})})}catch(e){return l(e)}return h&&h.then?h.then(void 0,l):h}(0,function(e){var t,n;return r.onStatusChange({status:"disconnected"}),null==(t=i)||t.close(),Promise.resolve(null==(n=s)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=u)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},n}();
1
+ function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var o in n)({}).hasOwnProperty.call(n,o)&&(t[o]=n[o])}return t},t.apply(null,arguments)}function e(t){for(var e=window.atob(t),n=e.length,o=new Uint8Array(n),r=0;r<n;r++)o[r]=e.charCodeAt(r);return o.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(n),r=/*#__PURE__*/function(){function t(t,e,n,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=o}return t.create=function(e){try{var n=null,r=null;return Promise.resolve(function(i,s){try{var a=function(){function i(){return Promise.resolve(n.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:e},echoCancellation:{ideal:!0}}})).then(function(e){var o=n.createMediaStreamSource(r=e),i=new AudioWorkletNode(n,"raw-audio-processor");return o.connect(a),a.connect(i),new t(n,a,i,r)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(s?{sampleRate:e}:{})).createAnalyser(),u=function(){if(!s)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(i):i()}()}catch(t){return s(t)}return a&&a.then?a.then(void 0,s):a}(0,function(t){var e,o;throw null==(e=r)||e.getTracks().forEach(function(t){return t.stop()}),null==(o=n)||o.close(),t}))}catch(t){return Promise.reject(t)}},t.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(t){return t.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(t){return Promise.reject(t)}},t}(),i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(i),a=/*#__PURE__*/function(){function t(t,e,n,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=o}return t.create=function(e){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=(n=new AudioContext({sampleRate:e})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(s)).then(function(){var e=new AudioWorkletNode(n,"audio-concat-processor");return e.connect(u),new t(n,a,u,e)}))}catch(t){return r(t)}var a,u;return i&&i.then?i.then(void 0,r):i}(0,function(t){var e;throw null==(e=n)||e.close(),t}))}catch(t){return Promise.reject(t)}},t.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(t){return Promise.reject(t)}},t}();function u(t){return!!t.type}var c=/*#__PURE__*/function(){function t(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}t.create=function(e){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=null!=(s=e.origin)?s:"wss://api.elevenlabs.io",c=e.signedUrl?e.signedUrl:a+"/v1/convai/conversation?agent_id="+e.agentId,l=["convai"],e.authorization&&l.push("bearer."+e.authorization),n=new WebSocket(c,l),Promise.resolve(new Promise(function(t,o){n.addEventListener("open",function(){if(e.overrides){var t,o,r,i,s,a={type:"conversation_initiation_client_data",conversation_initiation_client_data:{custom_llm_extra_body:e.overrides.customLlmExtraBody,agent:{prompt:null==(t=e.overrides.agent)?void 0:t.prompt,first_message:null==(o=e.overrides.agent)?void 0:o.firstMessage,language:null==(r=e.overrides.agent)?void 0:r.language},tts:{voice_id:null==(i=e.overrides.tts)?void 0:i.voiceId}}};null==(s=n)||s.send(JSON.stringify(a))}},{once:!0}),n.addEventListener("error",o),n.addEventListener("close",o),n.addEventListener("message",function(e){var n=JSON.parse(e.data);u(n)&&("conversation_initiation_metadata"===n.type?t(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(e){var o=e.conversation_id,r=parseInt(e.agent_output_audio_format.replace("pcm_",""));return new t(n,o,r)}))}catch(t){return r(t)}var s,a,c,l;return i&&i.then?i.then(void 0,r):i}(0,function(t){var e;throw null==(e=n)||e.close(),t}))}catch(t){return Promise.reject(t)}};var e=t.prototype;return e.close=function(){this.socket.close()},e.sendMessage=function(t){this.socket.send(JSON.stringify(t))},t}();function l(t,e){try{var n=t()}catch(t){return e(t)}return n&&n.then?n.then(void 0,e):n}function h(t,e,n){if(!t.s){if(n instanceof d){if(!n.s)return void(n.o=h.bind(null,t,e));1&e&&(e=n.s),n=n.v}if(n&&n.then)return void n.then(h.bind(null,t,e),h.bind(null,t,2));t.s=e,t.v=n;var o=t.o;o&&o(t)}}var f={clientTools:{}},d=/*#__PURE__*/function(){function t(){}return t.prototype.then=function(e,n){var o=new t,r=this.s;if(r){var i=1&r?e:n;if(i){try{h(o,1,i(this.v))}catch(t){h(o,2,t)}return o}return this}return this.o=function(t){try{var r=t.v;1&t.s?h(o,1,e?e(r):r):n?h(o,1,n(r)):h(o,2,r)}catch(t){h(o,2,t)}},o},t}(),p={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){}};exports.Conversation=/*#__PURE__*/function(){function n(t,n,o,r){var i=this,s=this,a=this,c=this,f=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==s.status?Promise.resolve():(s.updateStatus("disconnecting"),s.connection.close(),Promise.resolve(s.input.close()).then(function(){return Promise.resolve(s.output.close()).then(function(){s.updateStatus("disconnected")})}))}catch(t){return Promise.reject(t)}},this.updateMode=function(t){t!==i.mode&&(i.mode=t,i.options.onModeChange({mode:t}))},this.updateStatus=function(t){t!==i.status&&(i.status=t,i.options.onStatusChange({status:t}))},this.onEvent=function(t){try{return Promise.resolve(l(function(){var e,n=JSON.parse(t.data);if(u(n)){var o=function(t,e){var n,o=-1;t:{for(var r=0;r<e.length;r++){var i=e[r][0];if(i){var s=i();if(s&&s.then)break t;if(s===t){o=r;break}}else o=r}if(-1!==o){do{for(var a=e[o][1];!a;)o++,a=e[o][1];var u=a();if(u&&u.then){n=!0;break t}var c=e[o][2];o++}while(c&&!c());return u}}var l=new d,f=h.bind(null,l,2);return(n?u.then(p):s.then(function n(s){for(;;){if(s===t){o=r;break}if(++r===e.length){if(-1!==o)break;return void h(l,1,u)}if(i=e[r][0]){if((s=i())&&s.then)return void s.then(n).then(void 0,f)}else o=r}do{for(var a=e[o][1];!a;)o++,a=e[o][1];var u=a();if(u&&u.then)return void u.then(p).then(void 0,f);var c=e[o][2];o++}while(c&&!c());h(l,1,u)})).then(void 0,f),l;function p(t){for(;;){var n=e[o][2];if(!n||n())break;o++;for(var r=e[o][1];!r;)o++,r=e[o][1];if((t=r())&&t.then)return void t.then(p).then(void 0,f)}h(l,1,t)}}(n.type,[[function(){return"interruption"},function(){return n.interruption_event&&(a.lastInterruptTimestamp=n.interruption_event.event_id),a.fadeOutAudio(),void(e=1)}],[function(){return"agent_response"},function(){return a.options.onMessage({source:"ai",message:n.agent_response_event.agent_response}),void(e=1)}],[function(){return"user_transcript"},function(){return a.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript}),void(e=1)}],[function(){return"internal_tentative_agent_response"},function(){return a.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response}),void(e=1)}],[function(){return"client_tool_call"},function(){var t=function(){if(a.options.onUnhandledClientToolCall)return a.options.onUnhandledClientToolCall(n.client_tool_call),void(e=1);a.onError("Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,response:"Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",is_error:!0}),e=1},o=function(){if(a.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){var t=function(){e=1},o=l(function(){return Promise.resolve(a.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters)).then(function(t){a.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,response:t,is_error:!1})})},function(t){a.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,response:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})});return o&&o.then?o.then(t):t()}}();return o&&o.then?o.then(t):t()},function(){return e||e}],[function(){return"audio"},function(){return a.lastInterruptTimestamp<=n.audio_event.event_id&&(a.addAudioBase64Chunk(n.audio_event.audio_base_64),a.updateMode("speaking")),void(e=1)}],[function(){return"ping"},function(){return a.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id}),void(e=1)}],[void 0,function(){return a.options.onDebug(n),void(e=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){a.onError("Failed to parse event data",{event:t})}))}catch(t){return Promise.reject(t)}},this.onInputWorkletMessage=function(t){var e,n;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(e=t.data[0].buffer,n=new Uint8Array(e),window.btoa(String.fromCharCode.apply(String,n)))})},this.onOutputWorkletMessage=function(t){var e=t.data;"process"===e.type&&i.updateMode(e.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(t){try{return c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"}),c.output.worklet.port.postMessage({type:"buffer",buffer:e(t)}),Promise.resolve()}catch(t){return Promise.reject(t)}},this.fadeOutAudio=function(){try{return f.updateMode("listening"),f.output.worklet.port.postMessage({type:"interrupt"}),f.output.gain.gain.exponentialRampToValueAtTime(1e-4,f.output.context.currentTime+2),setTimeout(function(){f.output.gain.gain.value=f.volume,f.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(t){return Promise.reject(t)}},this.onError=function(t,e){console.error(t,e),i.options.onError(t,e)},this.calculateVolume=function(t){if(0===t.length)return 0;for(var e=0,n=0;n<t.length;n++)e+=t[n]/255;return(e/=t.length)<0?0:e>1?1:e},this.getId=function(){return i.connection.conversationId},this.setVolume=function(t){i.volume=t.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.options=t,this.connection=n,this.input=o,this.output=r,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(t){i.onEvent(t)}),this.connection.socket.addEventListener("error",function(t){i.updateStatus("disconnected"),i.onError("Socket error",t)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(e){try{var o=t({},f,p,e);o.onStatusChange({status:"connecting"});var i=null,s=null,u=null;return Promise.resolve(l(function(){return Promise.resolve(r.create(16e3)).then(function(t){return i=t,Promise.resolve(c.create(e)).then(function(t){return s=t,Promise.resolve(a.create(s.sampleRate)).then(function(t){return new n(o,s,i,u=t)})})})},function(t){var e,n;return o.onStatusChange({status:"disconnected"}),null==(e=s)||e.close(),Promise.resolve(null==(n=i)?void 0:n.close()).then(function(){var e;return Promise.resolve(null==(e=u)?void 0:e.close()).then(function(){throw t})})}))}catch(t){return Promise.reject(t)}},n}();
2
2
  //# sourceMappingURL=lib.cjs.map
package/dist/lib.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"lib.cjs","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","export type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n ping_ms?: number;\n };\n};\n\n// TODO correction missing\nexport type SocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent;\n\nexport function isValidSocketEvent(event: any): event is SocketEvent {\n return !!event.type;\n}\n","import { ConfigEvent, isValidSocketEvent } from \"./events\";\n\nconst MAIN_PROTOCOL = \"convai\";\n\nexport type SessionConfig = {\n origin?: string;\n authorization?: string;\n} & (\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined }\n);\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin = config.origin ?? WSS_API_ORIGIN;\n const url = config.signedUrl\n ? config.signedUrl\n : origin + WSS_API_PATHNAME + config.agentId;\n\n const protocols = [MAIN_PROTOCOL];\n if (config.authorization) {\n protocols.push(`bearer.${config.authorization}`);\n }\n socket = new WebSocket(url, protocols);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport { isValidSocketEvent, PingEvent } from \"./utils/events\";\n\nexport type { SocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks;\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n onDisconnect: () => void;\n onMessage: (props: { message: string; source: Role }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onError: (message: string, context?: any) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onDebug: () => {},\n onMessage: () => {},\n onStatusChange: () => {},\n onModeChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.socket.send(\n JSON.stringify({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n })\n );\n // parsedEvent.ping_event.ping_ms can be used on client side, for example\n // to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n const audioMessage = JSON.stringify({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n if (this.status === \"connected\") {\n this.connection.socket.send(audioMessage);\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","sampleRate","Promise","resolve","_temp2","audioWorklet","addModule","then","navigator","mediaDevices","getUserMedia","audio","ideal","echoCancellation","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","_catch","error","_inputStream","_context","getTracks","forEach","track","stop","close","e","reject","_proto","prototype","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","socket","conversationId","config","origin","_config$origin","url","signedUrl","agentId","protocols","authorization","push","WebSocket","addEventListener","message","JSON","parse","data","conversation_initiation_metadata_event","console","warn","once","conversationConfig","conversation_id","parseInt","agent_output_audio_format","replace","_socket","defaultCallbacks","onConnect","onDisconnect","onError","onDebug","onMessage","onStatusChange","onModeChange","Conversation","options","connection","input","output","_this2","_this","_this3","_this4","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","endSession","updateStatus","updateMode","onEvent","parsedEvent","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","audio_event","addAudioBase64Chunk","audio_base_64","send","stringify","ping_event","_unused","onInputWorkletMessage","b","audioMessage","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","_ref","finished","chunk","value","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","_ref2","getInputByteFrequencyData","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage","startSession","fullOptions","_extends","_Input$create","_Connection$create","_Output$create","_connection","_input","_output"],"mappings":"wNAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCfA,IAAMC,EAAO,IAAIC,KACf,CA2DC,i6EACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC3DxCM,eAuCX,WAAA,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAHAH,KAAAA,aACAC,EAAAA,KAAAA,cACAC,EAAAA,KAAAA,oBACAC,iBAAA,EAHAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,CAKF,OALGJ,EA3CgBM,OAAA,SAAOC,GAAkB,IAC3C,IAAIN,EAA+B,KAC/BG,EAAkC,KAAK,OAAAI,QAAAC,gCAEvC,WAAA,SAAAC,IAAAF,OAAAA,QAAAC,QAWIR,EAAQU,aAAaC,UAAUf,IAAkBgB,KAAA,WAAA,OAAAL,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAO,CACLV,WAAY,CAAEW,MAAOX,GACrBY,iBAAkB,CAAED,OAAO,OAE7BL,KAAA,SAAAO,GAEF,IAAMC,EAASpB,EAAQqB,wBAPvBlB,EAAWgB,GAQLjB,EAAU,IAAIoB,iBAAiBtB,EAAS,uBAK9C,OAHAoB,EAAOG,QAAQtB,GACfA,EAASsB,QAAQrB,GAEN,IAAAH,EAAMC,EAASC,EAAUC,EAASC,EAAa,EAzB1D,EAAA,CAAA,IAAMqB,EACJX,UAAUC,aAAaW,0BAA0BnB,WAK7CL,GAHND,EAAU,IAAIhB,OAAO0C,aACnBF,EAA+B,CAAElB,WAAAA,GAAe,CAAE,IAE3BqB,iBAAiBC,EACtC,WAAA,IAACJ,EAA4BjB,OAAAA,QAAAC,QACzBR,EAAQU,aAAaC,UAhBjC,sGAgB4DC,KAAAgB,WAAAA,EAAAA,CADpD,GACoDA,OAAAA,GAAAA,EAAAhB,KAAAgB,EAAAhB,KAAAH,GAAAA,GAkB1D,6DA7B2CoB,CAEvC,EA2BKC,SAAAA,GAAO,IAAAC,EAAAC,EAGd,MAFW,OAAXD,EAAA5B,IAAA4B,EAAaE,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GACtDJ,OAAAA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,GAAA,OAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAzC,EAAA0C,UASYJ,iBAAK,IAC4C,OAA5DjC,KAAKD,YAAY8B,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GAAE7B,QAAAC,QAA5DJ,KACWJ,QAAQqC,SAAOzB,KAAA,WAAA,EAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAvC,CAAA,CAVD,GC5CIN,EAAO,IAAIC,KACf,CA6DC,03DACD,CAAEC,KAAM,2BAGG+C,EAAuB7C,IAAIC,gBAAgBL,GChE3CkD,eAoBX,WAAA,SAAAA,EACkB3C,EACAC,EACA2C,EACA1C,GAAyBE,KAHzBJ,aACAC,EAAAA,KAAAA,cACA2C,EAAAA,KAAAA,UACA1C,EAAAA,KAAAA,aAHA,EAAAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIwC,KAAJA,EACAxC,KAAOF,QAAPA,CACf,QAACyC,EAxBgBtC,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAAK,OAAAO,QAAAC,iCAGhCP,GADND,EAAU,IAAI0B,aAAa,CAAEpB,WAAAA,KACJqB,kBACnBiB,EAAO5C,EAAQ6C,cAChBtB,QAAQtB,GACbA,EAASsB,QAAQvB,EAAQ8C,aAAavC,QAAAC,QAChCR,EAAQU,aAAaC,UAAU+B,IAAqB9B,KAC1D,WAAA,IAAMV,EAAU,IAAIoB,iBAAiBtB,EAAS,0BAG9C,OAFAE,EAAQqB,QAAQqB,GAET,IAAID,EAAO3C,EAASC,EAAU2C,EAAM1C,EAAS,yBAXd,IAGhCD,EACA2C,sCAJgCf,CAAA,EAY/BC,SAAAA,GAAO,IAAAE,EAEd,MADO,OAAPA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAG,EAAAF,UASYJ,MAAK,WAAA,IACN9B,OAAAA,QAAAC,QAAJJ,KAAKJ,QAAQqC,SAAOzB,kBAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,KAAAK,CAAA,CATD,GC8Bc,SAAAI,EAAmBC,GACjC,QAASA,EAAMrD,IACjB,CCpDA,IAaasD,eAAU,WAqDrB,SAAAA,EACkBC,EACAC,EACA7C,GAFA4C,KAAAA,mBACAC,oBAAA,EAAA/C,KACAE,gBAFA,EAAAF,KAAM8C,OAANA,EACA9C,KAAc+C,eAAdA,EACA/C,KAAUE,WAAVA,CACf,CAIF,OAJG2C,EAxDgB5C,OAAA,SAAO+C,GAAqB,IAC9C,IAAIF,EAA2B,KAAK,OAAA3C,QAAAC,iCAG5B6C,EAAsB,OAAhBC,EAAGF,EAAOC,QAAMC,EARX,0BASXC,EAAMH,EAAOI,UACfJ,EAAOI,UACPH,EAVe,oCAUaD,EAAOK,QAEjCC,EAAY,CAvBF,UAwBZN,EAAOO,eACTD,EAAUE,eAAeR,EAAOO,eAElCT,EAAS,IAAIW,UAAUN,EAAKG,GAAWnD,QAAAC,QACN,IAAID,QAEnC,SAACC,EAAS+B,GACVW,EAAQY,iBAAiB,QAASvB,GAClCW,EAAQY,iBAAiB,QAASvB,GAClCW,EAAQY,iBACN,UACA,SAACd,GACC,IAAMe,EAAUC,KAAKC,MAAMjB,EAAMkB,MAE5BnB,EAAmBgB,KAIH,qCAAjBA,EAAQpE,KACVa,EAAQuD,EAAQI,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEC,MAAM,GAEZ,IAAE1D,cAxBI2D,GA0BN,IAAMpB,EAAiBoB,EAAmBC,gBACpClE,EAAamE,SACjBF,EAAmBG,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAO,IAAI1B,EAAWC,EAAQC,EAAgB7C,EAAY,yBA5CxB,IAEhCgD,EACID,EACAE,EAIAG,sCAR4B7B,CAAA,EA6C3BC,SAAAA,GAAO,IAAA8C,EAEd,MADAA,OAAAA,EAAA1B,IAAA0B,EAAQvC,QACFP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAW,EAAAR,UAQMJ,MAAA,WACLjC,KAAK8C,OAAOb,OACd,EAACY,CAAA,CA7DoB,GCejB4B,EAA8B,CAClCC,UAAW,WAAQ,EACnBC,aAAc,WAAK,EACnBC,QAAS,WAAK,EACdC,QAAS,WAAQ,EACjBC,UAAW,aACXC,eAAgB,WAAK,EACrBC,aAAc,gDAGS,WAqCvB,SAAAC,EACmBC,EACAC,EACDC,EACAC,GAAcC,IAAAA,OAAAC,EAsB1BvF,KAAIwF,EA8HRxF,KAAIyF,EAUJzF,KAAIA,KAjKakF,aACAC,EAAAA,KAAAA,uBACDC,WAAA,EAAApF,KACAqF,YAXVK,EAAAA,KAAAA,uBAAiC,EAAC1F,KAClC2F,KAAa,iBACbC,OAAiB,aACjBC,KAAAA,+BACAC,yBAAmB,EAAA9F,KACnB+F,OAAiB,OA2BlBC,WAAU,WAAA,IACf,MAAoB,cAAhBT,EAAKK,OAAwBzF,QAAAC,WACjCmF,EAAKU,aAAa,iBAElBV,EAAKJ,WAAWlD,QAAQ9B,QAAAC,QAClBmF,EAAKH,MAAMnD,SAAOzB,uBAAAL,QAAAC,QAClBmF,EAAKF,OAAOpD,SAAOzB,KAAA,WAEzB+E,EAAKU,aAAa,eAAgB,EACpC,GAAA,CAAC,MAAA/D,UAAA/B,QAAAgC,OAAAD,UAEOgE,WAAa,SAACP,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKJ,QAAQF,aAAa,CAAEW,KAAAA,IAEhC,EAAC3F,KAEOiG,aAAe,SAACL,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKJ,QAAQH,eAAe,CAAEa,OAAAA,IAElC,EAEQO,KAAAA,QAAU,SAACvD,GACjB,IACE,IAAMwD,EAAcxC,KAAKC,MAAMjB,EAAMkB,MAErC,IAAKnB,EAAmByD,GACtB,OAGF,OAAQA,EAAY7G,MAClB,IAAK,eACC6G,EAAYC,qBACdf,EAAKI,uBACHU,EAAYC,mBAAmBC,UAEnChB,EAAKiB,eACL,MAGF,IAAK,iBACHjB,EAAKJ,QAAQJ,UAAU,CACrB9D,OAAQ,KACR2C,QAASyC,EAAYI,qBAAqBC,iBAE5C,MAGF,IAAK,kBACHnB,EAAKJ,QAAQJ,UAAU,CACrB9D,OAAQ,OACR2C,QAASyC,EAAYM,yBAAyBC,kBAEhD,MAGF,IAAK,oCACHrB,EAAKJ,QAAQL,QAAQ,CACnBtF,KAAM,2BACNqH,SACER,EAAYS,wCACTC,2BAEP,MAGF,IAAK,QAEDxB,EAAKI,wBAA0BU,EAAYW,YAAYT,WAEvDhB,EAAK0B,oBAAoBZ,EAAYW,YAAYE,eACjD3B,EAAKY,WAAW,aAElB,MAGF,IAAK,OACHZ,EAAKH,WAAWrC,OAAOoE,KACrBtD,KAAKuD,UAAU,CACb5H,KAAM,OACN+G,SAAWF,EAA0BgB,WAAWd,YAKpD,MAIF,QACEhB,EAAKJ,QAAQL,QAAQuB,GAI3B,CAAE,MAAAiB,GAEA,YADA/B,EAAKV,QAAQ,6BAA8B,CAAEhC,MAAAA,GAE/C,CACF,EAEQ0E,KAAAA,sBAAwB,SAAC1E,GAC/B,IP9MgC2E,EAC5BnI,EOmNEoI,EAAe5D,KAAKuD,UAAU,CAClCM,kBPrN8BF,EO8MR3E,EAAMkB,KAAK,GAOqB1E,OPpNpDA,EAAS,IAAIH,WAAWsI,GAEX3I,OAAO8I,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuBvI,OOqNhC,cAAhBkG,EAAKM,QACPN,EAAKH,WAAWrC,OAAOoE,KAAKM,EAGhC,OAEQM,uBAAyB,SAAAC,OAAGjE,EAAIiE,EAAJjE,KAChB,YAAdA,EAAKvE,MACP+F,EAAKY,WAAWpC,EAAKkE,SAAW,YAAc,WAElD,OAEQhB,oBAAmB,SAAUiB,GAAiB,IAMjD,OALHzC,EAAKH,OAAO7C,KAAKA,KAAK0F,MAAQ1C,EAAKO,OACnCP,EAAKH,OAAOvF,QAAQqI,KAAKC,YAAY,CAAE7I,KAAM,qBAC7CiG,EAAKH,OAAOvF,QAAQqI,KAAKC,YAAY,CACnC7I,KAAM,SACNH,OAAQX,EAAoBwJ,KAC3B9H,QAAAC,SACL,CAAC,MAAA8B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAlC,KAEOuG,aAA0B,WAAA,IAavB,OAXTd,EAAKS,WAAW,aAChBT,EAAKJ,OAAOvF,QAAQqI,KAAKC,YAAY,CAAE7I,KAAM,cAC7CkG,EAAKJ,OAAO7C,KAAKA,KAAK6F,6BACpB,KACA5C,EAAKJ,OAAOzF,QAAQ0I,YAAc,GAIpCC,WAAW,WACT9C,EAAKJ,OAAO7C,KAAKA,KAAK0F,MAAQzC,EAAKM,OACnCN,EAAKJ,OAAOvF,QAAQqI,KAAKC,YAAY,CAAE7I,KAAM,oBAC/C,EAAG,KAAMY,QAAAC,SACX,CAAC,MAAA8B,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEO0C,CAAAA,EAAAA,KAAAA,QAAU,SAACjB,EAAiB/D,GAClCoE,QAAQtC,MAAMiC,EAAS/D,GACvB0F,EAAKJ,QAAQN,QAAQjB,EAAS/D,EAChC,OAEQ4I,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAc1J,OAChB,OACF,EAKA,IADA,IAAIgH,EAAS,EACJ7G,EAAI,EAAGA,EAAIuJ,EAAc1J,OAAQG,IACxC6G,GAAU0C,EAAcvJ,GAAK,IAI/B,OAFA6G,GAAU0C,EAAc1J,QAER,EAAI,EAAIgH,EAAS,EAAI,EAAIA,CAC3C,EAAC/F,KAEM0I,MAAQ,WAAA,OAAMpD,EAAKH,WAAWpC,cAAc,OAE5C4F,UAAY,SAAAC,GACjBtD,EAAKS,OADqB6C,EAAN7C,MAEtB,EAAC/F,KAEM6I,0BAA4B,WAKjC,aAJAvD,EAAKO,qBAALP,EAAKO,mBAAuB,IAAI5G,WAC9BqG,EAAKF,MAAMvF,SAASiJ,oBAEtBxD,EAAKF,MAAMvF,SAASkJ,qBAAqBzD,EAAKO,oBACvCP,EAAKO,kBACd,EAAC7F,KAEMgJ,2BAA6B,WAKlC,aAJA1D,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAI7G,WAC/BqG,EAAKD,OAAOxF,SAASiJ,oBAEvBxD,EAAKD,OAAOxF,SAASkJ,qBAAqBzD,EAAKQ,qBACxCR,EAAKQ,mBACd,OAEOmD,eAAiB,WACtB,OAAO3D,EAAKkD,gBAAgBlD,EAAKuD,4BACnC,OAEOK,gBAAkB,WACvB,OAAO5D,EAAKkD,gBAAgBlD,EAAK0D,6BACnC,EAhOmBhJ,KAAOkF,QAAPA,EACAlF,KAAUmF,WAAVA,EACDnF,KAAKoF,MAALA,EACApF,KAAMqF,OAANA,EAEhBrF,KAAKkF,QAAQR,UAAU,CAAE3B,eAAgBoC,EAAWpC,iBAEpD/C,KAAKmF,WAAWrC,OAAOY,iBAAiB,UAAW,SAAAd,GACjD0C,EAAKa,QAAQvD,EACf,GACA5C,KAAKmF,WAAWrC,OAAOY,iBAAiB,QAAS,SAAAd,GAC/C0C,EAAKW,aAAa,gBAClBX,EAAKV,QAAQ,eAAgBhC,EAC/B,GACA5C,KAAKmF,WAAWrC,OAAOY,iBAAiB,QAAS,WAC/C4B,EAAKW,aAAa,gBAClBX,EAAKJ,QAAQP,cACf,GAEA3E,KAAKoF,MAAMtF,QAAQqI,KAAKgB,UAAYnJ,KAAKsH,sBACzCtH,KAAKqF,OAAOvF,QAAQqI,KAAKgB,UAAYnJ,KAAK8H,uBAC1C9H,KAAKiG,aAAa,YACpB,CAhCC,OAgCAhB,EA3DmBmE,aAAY,SAC9BlE,GAA2C,IAE3C,IAAMmE,EAAWC,EAAA,GACZ7E,EACAS,GAGLmE,EAAYtE,eAAe,CAAEa,OAAQ,eAErC,IAAIR,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAAlF,QAAAC,gCAE7BD,QAAAC,QACYT,EAAMM,OA5BE,OA4ByBO,KAAA,SAAA+I,GAAC,OAAhDnE,EAAKmE,EAA2CpJ,QAAAC,QAC7ByC,EAAW5C,OAAOiF,IAAQ1E,KAAAgJ,SAAAA,GAAC,OAA9CrE,EAAUqE,EAAoCrJ,QAAAC,QAC/BmC,EAAOtC,OAAOkF,EAAWjF,aAAWM,KAAA,SAAAiJ,GAEnD,WAAWxE,EAAaoE,EAAalE,EAAYC,EAFjDC,EAAMoE,EAE0D,gEAPjChI,GAQhC,SAAQC,GAAO,IAAAgI,EAAAC,EAEM,OADpBN,EAAYtE,eAAe,CAAEa,OAAQ,iBACrC8D,OAAAA,EAAAvE,IAAAuE,EAAYzH,QAAQ9B,QAAAC,QACduJ,OADcA,EACdvE,QAAAuE,EAAAA,EAAO1H,SAAOzB,oBAAAoJ,EAAA,OAAAzJ,QAAAC,QACdwJ,OADcA,EACdvE,QAAAuE,EAAAA,EAAQ3H,SAAOzB,gBACrB,MAAMkB,CAAM,EACd,EAAA,GACF,CAAC,MAAAQ,GAAA/B,OAAAA,QAAAgC,OAAAD,EAAA,CAAA,EAAA+C,CAAA,CA5BsB"}
1
+ {"version":3,"file":"lib.cjs","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","import { Language } from \"./connection\";\n\nexport type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AgentAudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n ping_ms?: number;\n };\n};\nexport type ClientToolCallEvent = {\n type: \"client_tool_call\";\n client_tool_call: {\n tool_name: string;\n tool_call_id: string;\n parameters: any;\n expects_response: boolean;\n };\n};\n\n// TODO correction missing\nexport type IncomingSocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AgentAudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent\n | ClientToolCallEvent;\n\nexport type PongEvent = {\n type: \"pong\";\n event_id: number;\n};\nexport type UserAudioEvent = {\n user_audio_chunk: string;\n};\nexport type ClientToolResultEvent = {\n type: \"client_tool_result\";\n tool_call_id: string;\n response: any;\n is_error: boolean;\n};\nexport type ClientOverridesEvent = {\n type: \"conversation_initiation_client_data\";\n conversation_initiation_client_data: {\n custom_llm_extra_body?: any;\n agent?: {\n prompt?: {\n prompt?: string;\n };\n first_message?: string;\n language?: Language;\n };\n tts?: {\n voice_id?: string;\n };\n };\n};\nexport type OutgoingSocketEvent =\n | PongEvent\n | UserAudioEvent\n | ClientOverridesEvent\n | ClientToolResultEvent;\n\nexport function isValidSocketEvent(event: any): event is IncomingSocketEvent {\n return !!event.type;\n}\n","import {\n ClientOverridesEvent,\n ConfigEvent,\n isValidSocketEvent,\n OutgoingSocketEvent,\n} from \"./events\";\n\nconst MAIN_PROTOCOL = \"convai\";\n\nexport type Language =\n | \"en\"\n | \"ja\"\n | \"zh\"\n | \"de\"\n | \"hi\"\n | \"fr\"\n | \"ko\"\n | \"pt\"\n | \"it\"\n | \"es\"\n | \"id\"\n | \"nl\"\n | \"tr\"\n | \"pl\"\n | \"sv\"\n | \"bg\"\n | \"ro\"\n | \"ar\"\n | \"cs\"\n | \"el\"\n | \"fi\"\n | \"ms\"\n | \"da\"\n | \"ta\"\n | \"uk\"\n | \"ru\"\n | \"hu\"\n | \"no\"\n | \"vi\";\nexport type SessionConfig = {\n origin?: string;\n authorization?: string;\n overrides?: {\n customLlmExtraBody?: any;\n agent?: {\n prompt?: {\n prompt?: string;\n };\n firstMessage?: string;\n language?: Language;\n };\n tts?: {\n voiceId?: string;\n };\n };\n} & (\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined }\n);\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin = config.origin ?? WSS_API_ORIGIN;\n const url = config.signedUrl\n ? config.signedUrl\n : origin + WSS_API_PATHNAME + config.agentId;\n\n const protocols = [MAIN_PROTOCOL];\n if (config.authorization) {\n protocols.push(`bearer.${config.authorization}`);\n }\n socket = new WebSocket(url, protocols);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\n \"open\",\n () => {\n if (config.overrides) {\n const overridesEvent: ClientOverridesEvent = {\n type: \"conversation_initiation_client_data\",\n conversation_initiation_client_data: {\n custom_llm_extra_body: config.overrides.customLlmExtraBody,\n agent: {\n prompt: config.overrides.agent?.prompt,\n first_message: config.overrides.agent?.firstMessage,\n language: config.overrides.agent?.language,\n },\n tts: {\n voice_id: config.overrides.tts?.voiceId,\n },\n },\n };\n\n socket?.send(JSON.stringify(overridesEvent));\n }\n },\n { once: true }\n );\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n\n public sendMessage(message: OutgoingSocketEvent) {\n this.socket.send(JSON.stringify(message));\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport {\n ClientToolCallEvent,\n isValidSocketEvent,\n PingEvent,\n} from \"./utils/events\";\n\nexport type { IncomingSocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks & ClientToolsConfig;\nexport type ClientToolsConfig = {\n clientTools: Record<\n string,\n (\n parameters: any\n ) => Promise<string | number | void> | string | number | void\n >;\n};\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onDisconnect: () => void;\n onError: (message: string, context?: any) => void;\n onMessage: (props: { message: string; source: Role }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onUnhandledClientToolCall?: (\n params: ClientToolCallEvent[\"client_tool_call\"]\n ) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultClientTools = { clientTools: {} };\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDebug: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onMessage: () => {},\n onModeChange: () => {},\n onStatusChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultClientTools,\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = async (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"client_tool_call\": {\n if (\n this.options.clientTools.hasOwnProperty(\n parsedEvent.client_tool_call.tool_name\n )\n ) {\n try {\n const result = await this.options.clientTools[\n parsedEvent.client_tool_call.tool_name\n ](parsedEvent.client_tool_call.parameters);\n\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n response: result,\n is_error: false,\n });\n } catch (e) {\n this.onError(\n \"Client tool execution failed with following error: \" +\n (e as Error)?.message,\n {\n clientToolName: parsedEvent.client_tool_call.tool_name,\n }\n );\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n response:\n \"Client tool execution failed: \" + (e as Error)?.message,\n is_error: true,\n });\n }\n\n break;\n }\n\n if (this.options.onUnhandledClientToolCall) {\n this.options.onUnhandledClientToolCall(\n parsedEvent.client_tool_call\n );\n\n break;\n }\n\n this.onError(\n `Client tool with name ${parsedEvent.client_tool_call.tool_name} is not defined on client`,\n {\n clientToolName: parsedEvent.client_tool_call.tool_name,\n }\n );\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n response: `Client tool with name ${parsedEvent.client_tool_call.tool_name} is not defined on client`,\n is_error: true,\n });\n\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.sendMessage({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n });\n // parsedEvent.ping_event.ping_ms can be used on client side, for example\n // to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n if (this.status === \"connected\") {\n this.connection.sendMessage({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","sampleRate","Promise","resolve","_temp2","audioWorklet","addModule","then","navigator","mediaDevices","getUserMedia","audio","ideal","echoCancellation","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","_catch","error","_inputStream","_context","getTracks","forEach","track","stop","close","e","reject","_proto","prototype","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","socket","conversationId","config","origin","_config$origin","url","signedUrl","agentId","protocols","authorization","push","WebSocket","addEventListener","overrides","_config$overrides$age","_config$overrides$age2","_config$overrides$age3","_config$overrides$tts","_socket","overridesEvent","conversation_initiation_client_data","custom_llm_extra_body","customLlmExtraBody","agent","prompt","first_message","firstMessage","language","tts","voice_id","voiceId","send","JSON","stringify","once","message","parse","data","conversation_initiation_metadata_event","console","warn","conversationConfig","conversation_id","parseInt","agent_output_audio_format","replace","_socket2","sendMessage","_settle","pact","state","value","s","_Pact","o","bind","observer","defaultClientTools","clientTools","result","onFulfilled","onRejected","callback","v","_this","defaultCallbacks","onConnect","onDebug","onDisconnect","onError","onMessage","onModeChange","onStatusChange","Conversation","options","connection","input","output","_this2","_this3","_this4","_this5","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","endSession","updateStatus","updateMode","onEvent","_interrupt","parsedEvent","_temp5","_switch","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","_temp4","onUnhandledClientToolCall","client_tool_call","tool_name","clientToolName","tool_call_id","is_error","_temp3","hasOwnProperty","parameters","audio_event","addAudioBase64Chunk","audio_base_64","ping_event","onInputWorkletMessage","b","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","_ref","finished","chunk","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","_ref2","getInputByteFrequencyData","_this2$inputFrequency","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage","startSession","fullOptions","_extends","_Input$create","_Connection$create","_Output$create","_connection","_input","_output"],"mappings":"wNAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCfA,IAAMC,EAAO,IAAIC,KACf,CA2DC,i6EACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC3DxCM,eAuCX,WAAA,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAHAH,KAAAA,aACAC,EAAAA,KAAAA,cACAC,EAAAA,KAAAA,oBACAC,iBAAA,EAHAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,CAKF,OALGJ,EA3CgBM,OAAA,SAAOC,GAAkB,IAC3C,IAAIN,EAA+B,KAC/BG,EAAkC,KAAK,OAAAI,QAAAC,gCAEvC,WAAA,SAAAC,IAAAF,OAAAA,QAAAC,QAWIR,EAAQU,aAAaC,UAAUf,IAAkBgB,KAAA,WAAA,OAAAL,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAO,CACLV,WAAY,CAAEW,MAAOX,GACrBY,iBAAkB,CAAED,OAAO,OAE7BL,KAAA,SAAAO,GAEF,IAAMC,EAASpB,EAAQqB,wBAPvBlB,EAAWgB,GAQLjB,EAAU,IAAIoB,iBAAiBtB,EAAS,uBAK9C,OAHAoB,EAAOG,QAAQtB,GACfA,EAASsB,QAAQrB,GAEN,IAAAH,EAAMC,EAASC,EAAUC,EAASC,EAAa,EAzB1D,EAAA,CAAA,IAAMqB,EACJX,UAAUC,aAAaW,0BAA0BnB,WAK7CL,GAHND,EAAU,IAAIhB,OAAO0C,aACnBF,EAA+B,CAAElB,WAAAA,GAAe,CAAE,IAE3BqB,iBAAiBC,EACtC,WAAA,IAACJ,EAA4BjB,OAAAA,QAAAC,QACzBR,EAAQU,aAAaC,UAhBjC,sGAgB4DC,KAAAgB,WAAAA,EAAAA,CADpD,GACoDA,OAAAA,GAAAA,EAAAhB,KAAAgB,EAAAhB,KAAAH,GAAAA,GAkB1D,6DA7B2CoB,CAEvC,EA2BKC,SAAAA,GAAO,IAAAC,EAAAC,EAGd,MAFW,OAAXD,EAAA5B,IAAA4B,EAAaE,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GACtDJ,OAAAA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,GAAA,OAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAzC,EAAA0C,UASYJ,iBAAK,IAC4C,OAA5DjC,KAAKD,YAAY8B,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GAAE7B,QAAAC,QAA5DJ,KACWJ,QAAQqC,SAAOzB,KAAA,WAAA,EAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAvC,CAAA,CAVD,GC5CIN,EAAO,IAAIC,KACf,CA6DC,03DACD,CAAEC,KAAM,2BAGG+C,EAAuB7C,IAAIC,gBAAgBL,GChE3CkD,eAoBX,WAAA,SAAAA,EACkB3C,EACAC,EACA2C,EACA1C,GAAyBE,KAHzBJ,aACAC,EAAAA,KAAAA,cACA2C,EAAAA,KAAAA,UACA1C,EAAAA,KAAAA,aAHA,EAAAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIwC,KAAJA,EACAxC,KAAOF,QAAPA,CACf,QAACyC,EAxBgBtC,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAAK,OAAAO,QAAAC,iCAGhCP,GADND,EAAU,IAAI0B,aAAa,CAAEpB,WAAAA,KACJqB,kBACnBiB,EAAO5C,EAAQ6C,cAChBtB,QAAQtB,GACbA,EAASsB,QAAQvB,EAAQ8C,aAAavC,QAAAC,QAChCR,EAAQU,aAAaC,UAAU+B,IAAqB9B,KAC1D,WAAA,IAAMV,EAAU,IAAIoB,iBAAiBtB,EAAS,0BAG9C,OAFAE,EAAQqB,QAAQqB,GAET,IAAID,EAAO3C,EAASC,EAAU2C,EAAM1C,EAAS,yBAXd,IAGhCD,EACA2C,sCAJgCf,CAAA,EAY/BC,SAAAA,GAAO,IAAAE,EAEd,MADO,OAAPA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAG,EAAAF,UASYJ,MAAK,WAAA,IACN9B,OAAAA,QAAAC,QAAJJ,KAAKJ,QAAQqC,SAAOzB,kBAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,KAAAK,CAAA,CATD,GC6Ec,SAAAI,EAAmBC,GACjC,QAASA,EAAMrD,IACjB,CC9FA,IAwDasD,eAAU,WA6ErB,SAAAA,EACkBC,EACAC,EACA7C,GAAkBF,KAFlB8C,YAAA,EAAA9C,KACA+C,oBAAA,EAAA/C,KACAE,gBAFA,EAAAF,KAAM8C,OAANA,EACA9C,KAAc+C,eAAdA,EACA/C,KAAUE,WAAVA,CACf,CAAC2C,EAhFgB5C,OAAA,SAAO+C,GAAqB,IAC9C,IAAIF,EAA2B,KAAK,OAAA3C,QAAAC,iCAG5B6C,EAAsB,OAAhBC,EAAGF,EAAOC,QAAMC,EARX,0BASXC,EAAMH,EAAOI,UACfJ,EAAOI,UACPH,EAVe,oCAUaD,EAAOK,QAEjCC,EAAY,CAlEF,UAmEZN,EAAOO,eACTD,EAAUE,KAAI,UAAWR,EAAOO,eAElCT,EAAS,IAAIW,UAAUN,EAAKG,GAAWnD,QAAAC,QACN,IAAID,QAEnC,SAACC,EAAS+B,GACVW,EAAQY,iBACN,OACA,WACE,GAAIV,EAAOW,UAAW,CAAA,IAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EACdC,EAAuC,CAC3C1E,KAAM,sCACN2E,oCAAqC,CACnCC,sBAAuBnB,EAAOW,UAAUS,mBACxCC,MAAO,CACLC,OAA8B,OAAxBV,EAAEZ,EAAOW,UAAUU,YAAK,EAAtBT,EAAwBU,OAChCC,cAAeV,OAAFA,EAAEb,EAAOW,UAAUU,YAAjBR,EAAAA,EAAwBW,aACvCC,SAAgC,OAAxBX,EAAEd,EAAOW,UAAUU,YAAK,EAAtBP,EAAwBW,UAEpCC,IAAK,CACHC,SAA8B,OAAtBZ,EAAEf,EAAOW,UAAUe,UAAG,EAApBX,EAAsBa,WAKhC,OAANZ,EAAAlB,IAAAkB,EAAQa,KAAKC,KAAKC,UAAUd,GAC9B,CACF,EACA,CAAEe,MAAM,IAEVlC,EAAQY,iBAAiB,QAASvB,GAClCW,EAAQY,iBAAiB,QAASvB,GAClCW,EAAQY,iBACN,UACA,SAACd,GACC,IAAMqC,EAAUH,KAAKI,MAAMtC,EAAMuC,MAE5BxC,EAAmBsC,KAIH,qCAAjBA,EAAQ1F,KACVa,EAAQ6E,EAAQG,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEN,MAAM,GAEZ,IAAExE,KAAA,SAhDI+E,GAkDN,IAAMxC,EAAiBwC,EAAmBC,gBACpCtF,EAAauF,SACjBF,EAAmBG,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAW,IAAA9C,EAAWC,EAAQC,EAAgB7C,EAAY,yBApExB,IAEhCgD,EACID,EACAE,EAIAG,sCAR4B7B,CAAA,EAqE3BC,SAAAA,GAAO,IAAAkE,EAEd,MADM,OAANA,EAAA9C,IAAA8C,EAAQ3D,QACFP,CACR,GACF,CAAC,MAAAQ,GAAA,OAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAA,IAAAA,EAAAS,EAAAR,iBAAAD,EAQMH,MAAA,WACLjC,KAAK8C,OAAOb,OACd,EAACG,EAEMyD,YAAA,SAAYZ,GACjBjF,KAAK8C,OAAO+B,KAAKC,KAAKC,UAAUE,GAClC,EAACpC,CAAA,CAzFoB,mGCiBPiD,EAAEC,EAAKC,EAAGC,GACpB,IAAAF,EAAAG,EAAA,CACA,GAAAD,aAAYE,EAAO,CACnB,IAAAF,EAAAC,EAQI,YADAD,EAAAG,EAAAN,EAAkBO,KAAc,KAAAN,EAAAC,IANrC,EAADA,IACDA,EAAAC,EAAAC,GAGOD,KAMR,OAC0BA,EAAAzF,mBACGA,KAAAsF,EAAAO,YAAYL,GAAAF,EAAAO,KAAA,KAAAN,EAAA,IAIvCA,EAAAG,EAAAF,MAEIC,EACF,IAAAK,EAAKP,EAAOK,EACdE,KACIP,EAEF,CACF,CA/DJ,IAAMQ,EAAqB,CAAEC,YAAa,IA5CjCL,0BACT,SAAAA,IAAiB,QACjBA,EAAO9D,UAAE7B,KAA2B,cACpC,IAEEiG,EAAA,IAAAN,EAqCIH,EAAAhG,KAAAkG,EAEN,GAAMF,GACN,MAAoC,IAAAU,EAAAC,EAClC,GAAAC,EAAW,CACX,IACAd,EAAAW,EAAiB,EAAEG,EAAG5G,KAAA6G,GACtB,CAAA,MAAS3E,GACT4D,EAASW,EAAO,EAAGvE,EACnB,CACA,OAAAuE,CACA,CAEF,WAwCqB,QACDzG,KAAAoG,EAAA,SAAAU,GACA,IAzCX,IAAMb,EAAOa,EAAAD,IAGlBC,EAAMZ,EACJJ,EAAAW,EAAqB,EAAAC,EAAAA,EAAAT,GAAAA,GAClBU,EACHb,EAAAW,EAAU,EAAAE,EAAAV,MAGDQ,aAGPvE,KACAuE,EAAwB,EAAAvE,EAE5B,WA1BE6E,EAA8B,CAClCC,UAAW,WAAQ,EACnBC,QAAS,WAAK,EACdC,aAAc,WAAQ,EACtBC,QAAS,WAAK,EACdC,UAAW,aACXC,aAAc,WAAK,EACnBC,eAAgB,2DAyChB,SAAAC,EACmBC,EACAC,EACDC,EACAC,GAAcC,IAAAA,OAAAd,EAsB1B9G,KAAI6H,EAmCA7H,KAAI8H,EAqJZ9H,KAAI+H,EAUJ/H,UA3NiBwH,aAAA,EAAAxH,KACAyH,gBACDC,EAAAA,KAAAA,kBACAC,YAAA,EAAA3H,KAXVgI,uBAAiC,EACjCC,KAAAA,KAAa,YAAWjI,KACxBkI,OAAiB,aACjBC,KAAAA,+BACAC,yBAAmB,EAAApI,KACnBqI,OAAiB,EA2BlBC,KAAAA,WAAwB,WAAA,IAC7B,MAAoB,cAAhBxB,EAAKoB,OAAwB/H,QAAAC,WACjC0G,EAAKyB,aAAa,iBAElBzB,EAAKW,WAAWxF,QAAQ9B,QAAAC,QAClB0G,EAAKY,MAAMzF,SAAOzB,KAAAL,WAAAA,OAAAA,QAAAC,QAClB0G,EAAKa,OAAO1F,SAAOzB,KAEzBsG,WAAAA,EAAKyB,aAAa,eAAgB,EAAA,GACpC,CAAC,MAAArG,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEOsG,CAAAA,EAAAA,KAAAA,WAAa,SAACP,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKJ,QAAQH,aAAa,CAAEY,KAAAA,IAEhC,OAEQM,aAAe,SAACL,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKJ,QAAQF,eAAe,CAAEY,OAAAA,IAElC,OAEQO,QAAO,SAAU7F,GAAuB,IAAA,OAAAzC,QAAAC,QAAAqB,aAC1CiH,IAAAA,EACIC,EAAc7D,KAAKI,MAAMtC,EAAMuC,MAErC,GAAKxC,EAAmBgG,GAAxB,CAEC,IAAAC,uzBAAAC,CAEOF,EAAYpJ,wBACb,cAAc,EAAA,WAOnB,OANMoJ,EAAYG,qBACdjB,EAAKG,uBACHW,EAAYG,mBAAmBC,UAEnClB,EAAKmB,oBAAeN,EAEtB,EAAC,qBAEI,gBAAgB,EAAA,WAMrB,OALEb,EAAKL,QAAQJ,UAAU,CACrBpG,OAAQ,KACRiE,QAAS0D,EAAYM,qBAAqBC,sBACzCR,EAEL,EAAC,GAEI,CAAA,WAAA,MAAA,iBAAiB,aAMtB,OALEb,EAAKL,QAAQJ,UAAU,CACrBpG,OAAQ,OACRiE,QAAS0D,EAAYQ,yBAAyBC,uBAC7CV,EAEL,EAAC,qBAEI,mCAAmC,EAAA,kBACtCb,EAAKL,QAAQP,QAAQ,CACnB1H,KAAM,2BACN8J,SACEV,EAAYW,wCACTC,gCACJb,IAEJ,GAAA,CAAA,WAAA,MAEI,kBAAkB,EAAE,WAAA,IAAAc,EAAAA,WAqCvB,GAAI3B,EAAKL,QAAQiC,0BAGb,OAFF5B,EAAKL,QAAQiC,0BACXd,EAAYe,uBACZhB,EAAA,GAKJb,EAAKV,QAAO,yBACewB,EAAYe,iBAAiBC,UACtD,4BAAA,CACEC,eAAgBjB,EAAYe,iBAAiBC,YAGjD9B,EAAKJ,WAAW5B,YAAY,CAC1BtG,KAAM,qBACNsK,aAAclB,EAAYe,iBAAiBG,aAC3CR,SAAmCV,yBAAAA,EAAYe,iBAAiBC,UAAS,4BACzEG,UAAU,IACTpB,KAAAqB,EAAA,WAAA,GAtDDlC,EAAKL,QAAQhB,YAAYwD,eACvBrB,EAAYe,iBAAiBC,WAC9BtJ,CAAAA,IAAAA,aAAAqI,EAAAlH,CAAAA,EAAAA,EAAAC,EAAA,WAEGtB,OAAAA,QAAAC,QACmByH,EAAKL,QAAQhB,YAChCmC,EAAYe,iBAAiBC,WAC7BhB,EAAYe,iBAAiBO,aAAWzJ,KAFpCiG,SAAAA,GAINoB,EAAKJ,WAAW5B,YAAY,CAC1BtG,KAAM,qBACNsK,aAAclB,EAAYe,iBAAiBG,aAC3CR,SAAU5C,EACVqD,UAAU,GACT,EACL,EAAC,SAAQ5H,GACP2F,EAAKV,QACH,uDACc,MAAXjF,OAAW,EAAXA,EAAa+C,SAChB,CACE2E,eAAgBjB,EAAYe,iBAAiBC,YAGjD9B,EAAKJ,WAAW5B,YAAY,CAC1BtG,KAAM,qBACNsK,aAAclB,EAAYe,iBAAiBG,aAC3CR,SACE,kCAA+C,MAAXnH,OAAW,EAAXA,EAAa+C,SACnD6E,UAAU,GAEd,GAAC,OAAAtI,GAAAA,EAAAhB,KAAAgB,EAAAhB,KAAAH,GAAAA,GAAA,CAAA,CAwBA,GAxBA,OAAA0J,GAAAA,EAAAvJ,KAAAuJ,EAAAvJ,KAAAgJ,GAAAA,GA2BJ,EAAAd,WAAAA,OAAAA,GAAAA,CAAA,GAAA,CAAA,WAAA,MAEI,OAAO,EAAA,kBAERb,EAAKG,wBAA0BW,EAAYuB,YAAYnB,WAEvDlB,EAAKsC,oBAAoBxB,EAAYuB,YAAYE,eACjDvC,EAAKW,WAAW,kBACjBE,IAEF,GAAA,CAAA,WAAA,MAEI,MAAM,EAAA,WAIN,OAHHb,EAAKJ,WAAW5B,YAAY,CAC1BtG,KAAM,OACNwJ,SAAWJ,EAA0B0B,WAAWtB,gBAC/CL,EAAA,EAIJ,GAAA,MAAA,EAAA,kBAICb,EAAKL,QAAQP,QAAQ0B,QAAaD,IAEnC,KAAA,OAAAE,GAAAA,EAAApI,KAAAoI,EAAApI,KAEL,WAAA,QAFK,CA3HH,CA6HF,aACEqH,EAAKV,QAAQ,6BAA8B,CAAEvE,MAAAA,GAE/C,GACF,CAAC,MAAAV,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEOoI,CAAAA,EAAAA,KAAAA,sBAAwB,SAAC1H,GAC/B,IP1RgC2H,EAC5BnL,EO+RgB,cAAhBwI,EAAKM,QACPN,EAAKH,WAAW5B,YAAY,CAC1B2E,kBPlS4BD,EO0RR3H,EAAMuC,KAAK,GAQuB/F,OPjStDA,EAAS,IAAIH,WAAWsL,GAEX3L,OAAO6L,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuBtL,MOoStD,EAACY,KAEO6K,uBAAyB,SAAAC,GAAiC,IAA9B3F,EAAI2F,EAAJ3F,KAChB,YAAdA,EAAK5F,MACPqI,EAAKY,WAAWrD,EAAK4F,SAAW,YAAc,WAElD,EAEQZ,KAAAA,6BAA6Ba,GAAa,IAM7C,OALHlD,EAAKH,OAAOnF,KAAKA,KAAKyD,MAAQ6B,EAAKO,OACnCP,EAAKH,OAAO7H,QAAQmL,KAAKC,YAAY,CAAE3L,KAAM,qBAC7CuI,EAAKH,OAAO7H,QAAQmL,KAAKC,YAAY,CACnC3L,KAAM,SACNH,OAAQX,EAAoBuM,KAC3B7K,QAAAC,SACL,CAAC,MAAA8B,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEO8G,CAAAA,EAAAA,KAAAA,4BAaG,OAXTjB,EAAKS,WAAW,aAChBT,EAAKJ,OAAO7H,QAAQmL,KAAKC,YAAY,CAAE3L,KAAM,cAC7CwI,EAAKJ,OAAOnF,KAAKA,KAAK2I,6BACpB,KACApD,EAAKJ,OAAO/H,QAAQwL,YAAc,GAIpCC,WAAW,WACTtD,EAAKJ,OAAOnF,KAAKA,KAAKyD,MAAQ8B,EAAKM,OACnCN,EAAKJ,OAAO7H,QAAQmL,KAAKC,YAAY,CAAE3L,KAAM,oBAC/C,EAAG,KAAMY,QAAAC,SACX,CAAC,MAAA8B,UAAA/B,QAAAgC,OAAAD,EAEOiF,CAAAA,EAAAA,KAAAA,QAAU,SAAClC,EAAiBrF,GAClCyF,QAAQ3D,MAAMuD,EAASrF,GACvBgI,EAAKJ,QAAQL,QAAQlC,EAASrF,EAChC,EAEQ0L,KAAAA,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAcxM,OAChB,SAMF,IADA,IAAIsJ,EAAS,EACJnJ,EAAI,EAAGA,EAAIqM,EAAcxM,OAAQG,IACxCmJ,GAAUkD,EAAcrM,GAAK,IAI/B,OAFAmJ,GAAUkD,EAAcxM,QAER,EAAI,EAAIsJ,EAAS,EAAI,EAAIA,CAC3C,EAEOmD,KAAAA,MAAQ,WAAA,OAAM5D,EAAKH,WAAW1E,cAAc,OAE5C0I,UAAY,SAAAC,GACjB9D,EAAKS,OADqBqD,EAANrD,MAEtB,EAEOsD,KAAAA,0BAA4B,WAKjC,OAJuBC,MAAvBhE,EAAKO,qBAALP,EAAKO,mBAAuB,IAAIlJ,WAC9B2I,EAAKF,MAAM7H,SAASgM,oBAEtBjE,EAAKF,MAAM7H,SAASiM,qBAAqBlE,EAAKO,oBACvCP,EAAKO,kBACd,EAEO4D,KAAAA,2BAA6B,WAKlC,aAJAnE,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAInJ,WAC/B2I,EAAKD,OAAO9H,SAASgM,oBAEvBjE,EAAKD,OAAO9H,SAASiM,qBAAqBlE,EAAKQ,qBACxCR,EAAKQ,mBACd,EAACpI,KAEMgM,eAAiB,WACtB,OAAOpE,EAAK0D,gBAAgB1D,EAAK+D,4BACnC,EAEOM,KAAAA,gBAAkB,WACvB,OAAOrE,EAAK0D,gBAAgB1D,EAAKmE,6BACnC,EA1RmB/L,KAAOwH,QAAPA,EACAxH,KAAUyH,WAAVA,EACDzH,KAAK0H,MAALA,EACA1H,KAAM2H,OAANA,EAEhB3H,KAAKwH,QAAQR,UAAU,CAAEjE,eAAgB0E,EAAW1E,iBAEpD/C,KAAKyH,WAAW3E,OAAOY,iBAAiB,UAAW,SAAAd,GACjDgF,EAAKa,QAAQ7F,EACf,GACA5C,KAAKyH,WAAW3E,OAAOY,iBAAiB,QAAS,SAAAd,GAC/CgF,EAAKW,aAAa,gBAClBX,EAAKT,QAAQ,eAAgBvE,EAC/B,GACA5C,KAAKyH,WAAW3E,OAAOY,iBAAiB,QAAS,WAC/CkE,EAAKW,aAAa,gBAClBX,EAAKJ,QAAQN,cACf,GAEAlH,KAAK0H,MAAM5H,QAAQmL,KAAKiB,UAAYlM,KAAKsK,sBACzCtK,KAAK2H,OAAO7H,QAAQmL,KAAKiB,UAAYlM,KAAK6K,uBAC1C7K,KAAKuI,aAAa,YACpB,QAAChB,EA5DmB4E,aAAA,SAClB3E,GAAwE,IAExE,IAAM4E,EAAWC,EACZ9F,GAAAA,EACAQ,EACAS,GAGL4E,EAAY9E,eAAe,CAAEY,OAAQ,eAErC,IAAIR,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAAxH,QAAAC,QAAAqB,EAE7B,WAAA,OAAAtB,QAAAC,QACYT,EAAMM,OA9BE,OA8ByBO,KAAA8L,SAAAA,GAAC,OAAhD5E,EAAK4E,EAA2CnM,QAAAC,QAC7ByC,EAAW5C,OAAOuH,IAAQhH,cAAA+L,GAAC,OAA9C9E,EAAU8E,EAAoCpM,QAAAC,QAC/BmC,EAAOtC,OAAOwH,EAAWvH,aAAWM,KAAAgM,SAAAA,GAEnD,OAAW,IAAAjF,EAAa6E,EAAa3E,EAAYC,EAFjDC,EAAM6E,EAE0D,EAAA,EAAA,EAClE,EAAS9K,SAAAA,GAAO,IAAA+K,EAAAC,EAEM,OADpBN,EAAY9E,eAAe,CAAEY,OAAQ,wBACrCuE,EAAAhF,IAAAgF,EAAYxK,QAAQ9B,QAAAC,QACdsM,OADcA,EACdhF,QAAAgF,EAAAA,EAAOzK,SAAOzB,KAAA,WAAA,IAAAmM,EAAAxM,OAAAA,QAAAC,QACduM,OADcA,EACdhF,QAAAgF,EAAAA,EAAQ1K,SAAOzB,KACrB,WAAA,MAAMkB,CAAM,EAAA,EACd,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,KAAAqF,CAAA"}
@@ -1,2 +1,2 @@
1
- function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)({}).hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},t.apply(null,arguments)}function e(t){const e=new Uint8Array(t);return window.btoa(String.fromCharCode(...e))}function n(t){const e=window.atob(t),n=e.length,s=new Uint8Array(n);for(let t=0;t<n;t++)s[t]=e.charCodeAt(t);return s.buffer}const s=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(s);class i{static async create(t){let e=null,n=null;try{const s=navigator.mediaDevices.getSupportedConstraints().sampleRate;e=new window.AudioContext(s?{sampleRate:t}:{});const o=e.createAnalyser();s||await e.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await e.audioWorklet.addModule(a),n=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}});const r=e.createMediaStreamSource(n),u=new AudioWorkletNode(e,"raw-audio-processor");return r.connect(o),o.connect(u),new i(e,o,u,n)}catch(t){var s,o;throw null==(s=n)||s.getTracks().forEach(t=>t.stop()),null==(o=e)||o.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=s}async close(){this.inputStream.getTracks().forEach(t=>t.stop()),await this.context.close()}}const o=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(o);class u{static async create(t){let e=null;try{e=new AudioContext({sampleRate:t});const n=e.createAnalyser(),s=e.createGain();s.connect(n),n.connect(e.destination),await e.audioWorklet.addModule(r);const a=new AudioWorkletNode(e,"audio-concat-processor");return a.connect(s),new u(e,n,s,a)}catch(t){var n;throw null==(n=e)||n.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=s}async close(){await this.context.close()}}function c(t){return!!t.type}class l{static async create(t){let e=null;try{var n;const s=null!=(n=t.origin)?n:"wss://api.elevenlabs.io",a=t.signedUrl?t.signedUrl:s+"/v1/convai/conversation?agent_id="+t.agentId,i=["convai"];t.authorization&&i.push(`bearer.${t.authorization}`),e=new WebSocket(a,i);const o=await new Promise((t,n)=>{e.addEventListener("error",n),e.addEventListener("close",n),e.addEventListener("message",e=>{const n=JSON.parse(e.data);c(n)&&("conversation_initiation_metadata"===n.type?t(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),r=o.conversation_id,u=parseInt(o.agent_output_audio_format.replace("pcm_",""));return new l(e,r,u)}catch(t){var s;throw null==(s=e)||s.close(),t}}constructor(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}close(){this.socket.close()}}const h={onConnect:()=>{},onDisconnect:()=>{},onError:()=>{},onDebug:()=>{},onMessage:()=>{},onStatusChange:()=>{},onModeChange:()=>{}};class p{static async startSession(e){const n=t({},h,e);n.onStatusChange({status:"connecting"});let s=null,a=null,o=null;try{return s=await i.create(16e3),a=await l.create(e),o=await u.create(a.sampleRate),new p(n,a,s,o)}catch(t){var r,c,d;throw n.onStatusChange({status:"disconnected"}),null==(r=a)||r.close(),await(null==(c=s)?void 0:c.close()),await(null==(d=o)?void 0:d.close()),t}}constructor(t,s,a,i){var o=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=async function(){"connected"===o.status&&(o.updateStatus("disconnecting"),o.connection.close(),await o.input.close(),await o.output.close(),o.updateStatus("disconnected"))},this.updateMode=t=>{t!==this.mode&&(this.mode=t,this.options.onModeChange({mode:t}))},this.updateStatus=t=>{t!==this.status&&(this.status=t,this.options.onStatusChange({status:t}))},this.onEvent=t=>{try{const e=JSON.parse(t.data);if(!c(e))return;switch(e.type){case"interruption":e.interruption_event&&(this.lastInterruptTimestamp=e.interruption_event.event_id),this.fadeOutAudio();break;case"agent_response":this.options.onMessage({source:"ai",message:e.agent_response_event.agent_response});break;case"user_transcript":this.options.onMessage({source:"user",message:e.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":this.options.onDebug({type:"tentative_agent_response",response:e.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":this.lastInterruptTimestamp<=e.audio_event.event_id&&(this.addAudioBase64Chunk(e.audio_event.audio_base_64),this.updateMode("speaking"));break;case"ping":this.connection.socket.send(JSON.stringify({type:"pong",event_id:e.ping_event.event_id}));break;default:this.options.onDebug(e)}}catch(e){return void this.onError("Failed to parse event data",{event:t})}},this.onInputWorkletMessage=t=>{const n=JSON.stringify({user_audio_chunk:e(t.data[0].buffer)});"connected"===this.status&&this.connection.socket.send(n)},this.onOutputWorkletMessage=({data:t})=>{"process"===t.type&&this.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(t){o.output.gain.gain.value=o.volume,o.output.worklet.port.postMessage({type:"clearInterrupted"}),o.output.worklet.port.postMessage({type:"buffer",buffer:n(t)})},this.fadeOutAudio=async function(){o.updateMode("listening"),o.output.worklet.port.postMessage({type:"interrupt"}),o.output.gain.gain.exponentialRampToValueAtTime(1e-4,o.output.context.currentTime+2),setTimeout(()=>{o.output.gain.gain.value=o.volume,o.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(t,e)=>{console.error(t,e),this.options.onError(t,e)},this.calculateVolume=t=>{if(0===t.length)return 0;let e=0;for(let n=0;n<t.length;n++)e+=t[n]/255;return e/=t.length,e<0?0:e>1?1:e},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:t})=>{this.volume=t},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.options=t,this.connection=s,this.input=a,this.output=i,this.options.onConnect({conversationId:s.conversationId}),this.connection.socket.addEventListener("message",t=>{this.onEvent(t)}),this.connection.socket.addEventListener("error",t=>{this.updateStatus("disconnected"),this.onError("Socket error",t)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}export{p as Conversation};
1
+ function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)({}).hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},t.apply(null,arguments)}function e(t){const e=new Uint8Array(t);return window.btoa(String.fromCharCode(...e))}function n(t){const e=window.atob(t),n=e.length,s=new Uint8Array(n);for(let t=0;t<n;t++)s[t]=e.charCodeAt(t);return s.buffer}const s=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(s);class a{static async create(t){let e=null,n=null;try{const s=navigator.mediaDevices.getSupportedConstraints().sampleRate;e=new window.AudioContext(s?{sampleRate:t}:{});const i=e.createAnalyser();s||await e.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await e.audioWorklet.addModule(o),n=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}});const r=e.createMediaStreamSource(n),l=new AudioWorkletNode(e,"raw-audio-processor");return r.connect(i),i.connect(l),new a(e,i,l,n)}catch(t){var s,i;throw null==(s=n)||s.getTracks().forEach(t=>t.stop()),null==(i=e)||i.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=s}async close(){this.inputStream.getTracks().forEach(t=>t.stop()),await this.context.close()}}const i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(i);class l{static async create(t){let e=null;try{e=new AudioContext({sampleRate:t});const n=e.createAnalyser(),s=e.createGain();s.connect(n),n.connect(e.destination),await e.audioWorklet.addModule(r);const o=new AudioWorkletNode(e,"audio-concat-processor");return o.connect(s),new l(e,n,s,o)}catch(t){var n;throw null==(n=e)||n.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=s}async close(){await this.context.close()}}function c(t){return!!t.type}class u{static async create(t){let e=null;try{var n;const s=null!=(n=t.origin)?n:"wss://api.elevenlabs.io",o=t.signedUrl?t.signedUrl:s+"/v1/convai/conversation?agent_id="+t.agentId,a=["convai"];t.authorization&&a.push(`bearer.${t.authorization}`),e=new WebSocket(o,a);const i=await new Promise((n,s)=>{e.addEventListener("open",()=>{if(t.overrides){var n,s,o,a,i;const r={type:"conversation_initiation_client_data",conversation_initiation_client_data:{custom_llm_extra_body:t.overrides.customLlmExtraBody,agent:{prompt:null==(n=t.overrides.agent)?void 0:n.prompt,first_message:null==(s=t.overrides.agent)?void 0:s.firstMessage,language:null==(o=t.overrides.agent)?void 0:o.language},tts:{voice_id:null==(a=t.overrides.tts)?void 0:a.voiceId}}};null==(i=e)||i.send(JSON.stringify(r))}},{once:!0}),e.addEventListener("error",s),e.addEventListener("close",s),e.addEventListener("message",t=>{const e=JSON.parse(t.data);c(e)&&("conversation_initiation_metadata"===e.type?n(e.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),r=i.conversation_id,l=parseInt(i.agent_output_audio_format.replace("pcm_",""));return new u(e,r,l)}catch(t){var s;throw null==(s=e)||s.close(),t}}constructor(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}close(){this.socket.close()}sendMessage(t){this.socket.send(JSON.stringify(t))}}const h={clientTools:{}},d={onConnect:()=>{},onDebug:()=>{},onDisconnect:()=>{},onError:()=>{},onMessage:()=>{},onModeChange:()=>{},onStatusChange:()=>{}};class p{static async startSession(e){const n=t({},h,d,e);n.onStatusChange({status:"connecting"});let s=null,o=null,i=null;try{return s=await a.create(16e3),o=await u.create(e),i=await l.create(o.sampleRate),new p(n,o,s,i)}catch(t){var r,c,f;throw n.onStatusChange({status:"disconnected"}),null==(r=o)||r.close(),await(null==(c=s)?void 0:c.close()),await(null==(f=i)?void 0:f.close()),t}}constructor(t,s,o,a){var i=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=async function(){"connected"===i.status&&(i.updateStatus("disconnecting"),i.connection.close(),await i.input.close(),await i.output.close(),i.updateStatus("disconnected"))},this.updateMode=t=>{t!==this.mode&&(this.mode=t,this.options.onModeChange({mode:t}))},this.updateStatus=t=>{t!==this.status&&(this.status=t,this.options.onStatusChange({status:t}))},this.onEvent=async function(t){try{const e=JSON.parse(t.data);if(!c(e))return;switch(e.type){case"interruption":e.interruption_event&&(i.lastInterruptTimestamp=e.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:e.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:e.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:e.tentative_agent_response_internal_event.tentative_agent_response});break;case"client_tool_call":if(i.options.clientTools.hasOwnProperty(e.client_tool_call.tool_name)){try{const t=await i.options.clientTools[e.client_tool_call.tool_name](e.client_tool_call.parameters);i.connection.sendMessage({type:"client_tool_result",tool_call_id:e.client_tool_call.tool_call_id,response:t,is_error:!1})}catch(t){i.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:e.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:e.client_tool_call.tool_call_id,response:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})}break}if(i.options.onUnhandledClientToolCall){i.options.onUnhandledClientToolCall(e.client_tool_call);break}i.onError(`Client tool with name ${e.client_tool_call.tool_name} is not defined on client`,{clientToolName:e.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:e.client_tool_call.tool_call_id,response:`Client tool with name ${e.client_tool_call.tool_name} is not defined on client`,is_error:!0});break;case"audio":i.lastInterruptTimestamp<=e.audio_event.event_id&&(i.addAudioBase64Chunk(e.audio_event.audio_base_64),i.updateMode("speaking"));break;case"ping":i.connection.sendMessage({type:"pong",event_id:e.ping_event.event_id});break;default:i.options.onDebug(e)}}catch(e){return void i.onError("Failed to parse event data",{event:t})}},this.onInputWorkletMessage=t=>{"connected"===this.status&&this.connection.sendMessage({user_audio_chunk:e(t.data[0].buffer)})},this.onOutputWorkletMessage=({data:t})=>{"process"===t.type&&this.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(t){i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"}),i.output.worklet.port.postMessage({type:"buffer",buffer:n(t)})},this.fadeOutAudio=async function(){i.updateMode("listening"),i.output.worklet.port.postMessage({type:"interrupt"}),i.output.gain.gain.exponentialRampToValueAtTime(1e-4,i.output.context.currentTime+2),setTimeout(()=>{i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(t,e)=>{console.error(t,e),this.options.onError(t,e)},this.calculateVolume=t=>{if(0===t.length)return 0;let e=0;for(let n=0;n<t.length;n++)e+=t[n]/255;return e/=t.length,e<0?0:e>1?1:e},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:t})=>{this.volume=t},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.options=t,this.connection=s,this.input=o,this.output=a,this.options.onConnect({conversationId:s.conversationId}),this.connection.socket.addEventListener("message",t=>{this.onEvent(t)}),this.connection.socket.addEventListener("error",t=>{this.updateStatus("disconnected"),this.onError("Socket error",t)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}export{p as Conversation};
2
2
  //# sourceMappingURL=lib.modern.js.map