@11labs/client 0.0.4 → 0.0.5-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -11
- package/dist/index.d.ts +12 -3
- package/dist/lib.cjs +1 -1
- package/dist/lib.cjs.map +1 -1
- package/dist/lib.modern.js +1 -1
- package/dist/lib.modern.js.map +1 -1
- package/dist/lib.module.js +1 -1
- package/dist/lib.module.js.map +1 -1
- package/dist/lib.umd.js +1 -1
- package/dist/lib.umd.js.map +1 -1
- package/dist/utils/connection.d.ts +6 -1
- package/dist/utils/events.d.ts +7 -1
- package/dist/utils/input.d.ts +5 -1
- package/dist/utils/output.d.ts +2 -1
- package/package.json +1 -1
package/README.md
CHANGED
@@ -34,11 +34,16 @@ First, initialize the Conversation instance:
|
|
34
34
|
const conversation = await Conversation.startSession(options);
|
35
35
|
```
|
36
36
|
|
37
|
-
This will kick off the websocket connection and start using microphone to communicate with the ElevenLabs Conversational AI agent. Consider explaining and allowing microphone access in your apps UI before the Conversation kicks off:
|
37
|
+
This will kick off the websocket connection and start using microphone to communicate with the ElevenLabs Conversational AI agent. Consider explaining and allowing microphone access in your apps UI before the Conversation kicks off. The microphone may also be blocked for the current page by default, resulting in the allow prompt not showing up at all. You should handle such use case in your application and display appropriate message to the user:
|
38
38
|
|
39
39
|
```js
|
40
40
|
// call after explaning to the user why the microphone access is needed
|
41
|
-
|
41
|
+
// handle errors and show appropriate message to the user
|
42
|
+
try {
|
43
|
+
await navigator.mediaDevices.getUserMedia();
|
44
|
+
} catch {
|
45
|
+
// handle error
|
46
|
+
}
|
42
47
|
```
|
43
48
|
|
44
49
|
#### Session configuration
|
@@ -107,6 +112,7 @@ The options passed to `startSession` can also be used to register optional callb
|
|
107
112
|
- **onError** - handler called when an error is encountered.
|
108
113
|
- **onStatusChange** - handler called whenever connection status changes. Can be `connected`, `connecting` and `disconnected` (initial).
|
109
114
|
- **onModeChange** - handler called when a status changes, eg. agent switches from `speaking` to `listening`, or the other way around.
|
115
|
+
- **onCanSendFeedbackChange** - handler called when sending feedback becomes available or unavailable.
|
110
116
|
|
111
117
|
#### Client Tools
|
112
118
|
|
@@ -115,25 +121,25 @@ Client tools are a way to enabled agent to invoke client-side functionality. Thi
|
|
115
121
|
Client tools definition is an object of functions, and needs to be identical with your configuration within the [ElevenLabs UI](https://elevenlabs.io/app/conversational-ai), where you can name and describe different tools, as well as set up the parameters passed by the agent.
|
116
122
|
|
117
123
|
```ts
|
118
|
-
const conversation = await Conversation.startSession({
|
124
|
+
const conversation = await Conversation.startSession({
|
119
125
|
clientTools: {
|
120
|
-
displayMessage: async (parameters: {text: string}) => {
|
126
|
+
displayMessage: async (parameters: { text: string }) => {
|
121
127
|
alert(text);
|
122
|
-
|
128
|
+
|
123
129
|
return "Message displayed";
|
124
|
-
}
|
125
|
-
}
|
130
|
+
},
|
131
|
+
},
|
126
132
|
});
|
127
133
|
```
|
128
134
|
|
129
|
-
In case function returns a value, it will be passed back to the agent as a response.
|
135
|
+
In case function returns a value, it will be passed back to the agent as a response.
|
130
136
|
Note that the tool needs to be explicitly set to be blocking conversation in ElevenLabs UI for the agent to await and react to the response, otherwise agent assumes success and continues the conversation.
|
131
137
|
|
132
138
|
#### Conversation overrides
|
133
139
|
|
134
140
|
You may choose to override various settings of the conversation and set them dynamically based other user interactions.
|
135
|
-
We support overriding various settings.
|
136
|
-
These settings are optional and can be used to customize the conversation experience.
|
141
|
+
We support overriding various settings.
|
142
|
+
These settings are optional and can be used to customize the conversation experience.
|
137
143
|
The following settings are available:
|
138
144
|
|
139
145
|
```ts
|
@@ -147,12 +153,22 @@ const conversation = await Conversation.startSession({
|
|
147
153
|
language: "en",
|
148
154
|
},
|
149
155
|
tts: {
|
150
|
-
voiceId: "custom voice id"
|
156
|
+
voiceId: "custom voice id",
|
151
157
|
},
|
152
158
|
},
|
153
159
|
});
|
154
160
|
```
|
155
161
|
|
162
|
+
#### Prefer Headphones for iOS Devices
|
163
|
+
|
164
|
+
While this SDK leaves the choice of audio input/output device to the browser/system, iOS Safari seem to prefer the built-in speaker over headphones even when bluetooth device is in use. If you want to "force" the use of headphones on iOS devices when available, you can use the following option. Please, keep in mind that this is not guaranteed, since this functionality is not provided by the browser. System audio should be the default choice.
|
165
|
+
|
166
|
+
```ts
|
167
|
+
const conversation = await Conversation.startSession({
|
168
|
+
preferHeadphonesForIosDevices: true,
|
169
|
+
});
|
170
|
+
```
|
171
|
+
|
156
172
|
#### Return value
|
157
173
|
|
158
174
|
`startSession` returns a `Conversation` instance that can be used to control the session. The method will throw an error if the session cannot be established. This can happen if the user denies microphone access, or if the websocket connection
|
@@ -167,6 +183,17 @@ Afterwards the conversation instance will be unusable and can be safely discarde
|
|
167
183
|
await conversation.endSession();
|
168
184
|
```
|
169
185
|
|
186
|
+
##### sendFeedback
|
187
|
+
|
188
|
+
A method for sending binary feedback to the agent.
|
189
|
+
The method accepts a boolean value, where `true` represents positive feedback and `false` negative feedback.
|
190
|
+
Feedback is always correlated to the most recent agent response and can be sent only once per response.
|
191
|
+
You can listen to `onCanSendFeedbackChange` to know if feedback can be sent at the given moment.
|
192
|
+
|
193
|
+
```js
|
194
|
+
conversation.sendFeedback(true);
|
195
|
+
```
|
196
|
+
|
170
197
|
##### getId
|
171
198
|
|
172
199
|
A method returning the conversation ID.
|
package/dist/index.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { Input } from "./utils/input";
|
1
|
+
import { Input, InputConfig } from "./utils/input";
|
2
2
|
import { Output } from "./utils/output";
|
3
3
|
import { SessionConfig } from "./utils/connection";
|
4
4
|
import { ClientToolCallEvent } from "./utils/events";
|
@@ -7,7 +7,7 @@ export type { SessionConfig } from "./utils/connection";
|
|
7
7
|
export type Role = "user" | "ai";
|
8
8
|
export type Mode = "speaking" | "listening";
|
9
9
|
export type Status = "connecting" | "connected" | "disconnecting" | "disconnected";
|
10
|
-
export type Options = SessionConfig & Callbacks & ClientToolsConfig;
|
10
|
+
export type Options = SessionConfig & Callbacks & ClientToolsConfig & InputConfig;
|
11
11
|
export type ClientToolsConfig = {
|
12
12
|
clientTools: Record<string, (parameters: any) => Promise<string | number | void> | string | number | void>;
|
13
13
|
};
|
@@ -28,6 +28,9 @@ export type Callbacks = {
|
|
28
28
|
onStatusChange: (prop: {
|
29
29
|
status: Status;
|
30
30
|
}) => void;
|
31
|
+
onCanSendFeedbackChange: (prop: {
|
32
|
+
canSendFeedback: boolean;
|
33
|
+
}) => void;
|
31
34
|
onUnhandledClientToolCall?: (params: ClientToolCallEvent["client_tool_call"]) => void;
|
32
35
|
};
|
33
36
|
export declare class Conversation {
|
@@ -35,17 +38,21 @@ export declare class Conversation {
|
|
35
38
|
private readonly connection;
|
36
39
|
readonly input: Input;
|
37
40
|
readonly output: Output;
|
38
|
-
static startSession(options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig>): Promise<Conversation>;
|
41
|
+
static startSession(options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig> & Partial<InputConfig>): Promise<Conversation>;
|
39
42
|
private lastInterruptTimestamp;
|
40
43
|
private mode;
|
41
44
|
private status;
|
42
45
|
private inputFrequencyData?;
|
43
46
|
private outputFrequencyData?;
|
44
47
|
private volume;
|
48
|
+
private currentEventId;
|
49
|
+
private lastFeedbackEventId;
|
50
|
+
private canSendFeedback;
|
45
51
|
private constructor();
|
46
52
|
endSession: () => Promise<void>;
|
47
53
|
private updateMode;
|
48
54
|
private updateStatus;
|
55
|
+
private updateCanSendFeedback;
|
49
56
|
private onEvent;
|
50
57
|
private onInputWorkletMessage;
|
51
58
|
private onOutputWorkletMessage;
|
@@ -61,4 +68,6 @@ export declare class Conversation {
|
|
61
68
|
getOutputByteFrequencyData: () => Uint8Array;
|
62
69
|
getInputVolume: () => number;
|
63
70
|
getOutputVolume: () => number;
|
71
|
+
sendFeedback: (like: boolean) => void;
|
64
72
|
}
|
73
|
+
export declare function postOverallFeedback(conversationId: string, like: boolean, origin?: string): Promise<Response>;
|
package/dist/lib.cjs
CHANGED
@@ -1,2 +1,2 @@
|
|
1
|
-
function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var o in n)({}).hasOwnProperty.call(n,o)&&(t[o]=n[o])}return t},t.apply(null,arguments)}function e(t){for(var e=window.atob(t),n=e.length,o=new Uint8Array(n),r=0;r<n;r++)o[r]=e.charCodeAt(r);return o.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(n),r=/*#__PURE__*/function(){function t(t,e,n,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=o}return t.create=function(e){try{var n=null,r=null;return Promise.resolve(function(i,s){try{var a=function(){function i(){return Promise.resolve(n.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:e},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}}})).then(function(e){var o=n.createMediaStreamSource(r=e),i=new AudioWorkletNode(n,"raw-audio-processor");return o.connect(a),a.connect(i),new t(n,a,i,r)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(s?{sampleRate:e}:{})).createAnalyser(),u=function(){if(!s)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(i):i()}()}catch(t){return s(t)}return a&&a.then?a.then(void 0,s):a}(0,function(t){var e,o;throw null==(e=r)||e.getTracks().forEach(function(t){return t.stop()}),null==(o=n)||o.close(),t}))}catch(t){return Promise.reject(t)}},t.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(t){return t.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(t){return Promise.reject(t)}},t}(),i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(i),a=/*#__PURE__*/function(){function t(t,e,n,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=o}return t.create=function(e){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=(n=new AudioContext({sampleRate:e})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(s)).then(function(){var e=new AudioWorkletNode(n,"audio-concat-processor");return e.connect(u),new t(n,a,u,e)}))}catch(t){return r(t)}var a,u;return i&&i.then?i.then(void 0,r):i}(0,function(t){var e;throw null==(e=n)||e.close(),t}))}catch(t){return Promise.reject(t)}},t.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(t){return Promise.reject(t)}},t}();function u(t){return!!t.type}var c=/*#__PURE__*/function(){function t(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}t.create=function(e){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=null!=(s=e.origin)?s:"wss://api.elevenlabs.io",c=e.signedUrl?e.signedUrl:a+"/v1/convai/conversation?agent_id="+e.agentId,l=["convai"],e.authorization&&l.push("bearer."+e.authorization),n=new WebSocket(c,l),Promise.resolve(new Promise(function(t,o){n.addEventListener("open",function(){var t,o,r,i,s,a={type:"conversation_initiation_client_data"};e.overrides&&(a.conversation_config_override={agent:{prompt:null==(o=e.overrides.agent)?void 0:o.prompt,first_message:null==(r=e.overrides.agent)?void 0:r.firstMessage,language:null==(i=e.overrides.agent)?void 0:i.language},tts:{voice_id:null==(s=e.overrides.tts)?void 0:s.voiceId}}),e.customLlmExtraBody&&(a.custom_llm_extra_body=e.customLlmExtraBody),null==(t=n)||t.send(JSON.stringify(a))},{once:!0}),n.addEventListener("error",o),n.addEventListener("close",o),n.addEventListener("message",function(e){var n=JSON.parse(e.data);u(n)&&("conversation_initiation_metadata"===n.type?t(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(e){var o=e.conversation_id,r=parseInt(e.agent_output_audio_format.replace("pcm_",""));return new t(n,o,r)}))}catch(t){return r(t)}var s,a,c,l;return i&&i.then?i.then(void 0,r):i}(0,function(t){var e;throw null==(e=n)||e.close(),t}))}catch(t){return Promise.reject(t)}};var e=t.prototype;return e.close=function(){this.socket.close()},e.sendMessage=function(t){this.socket.send(JSON.stringify(t))},t}();function l(t,e){try{var n=t()}catch(t){return e(t)}return n&&n.then?n.then(void 0,e):n}function h(t,e,n){if(!t.s){if(n instanceof d){if(!n.s)return void(n.o=h.bind(null,t,e));1&e&&(e=n.s),n=n.v}if(n&&n.then)return void n.then(h.bind(null,t,e),h.bind(null,t,2));t.s=e,t.v=n;var o=t.o;o&&o(t)}}var f={clientTools:{}},d=/*#__PURE__*/function(){function t(){}return t.prototype.then=function(e,n){var o=new t,r=this.s;if(r){var i=1&r?e:n;if(i){try{h(o,1,i(this.v))}catch(t){h(o,2,t)}return o}return this}return this.o=function(t){try{var r=t.v;1&t.s?h(o,1,e?e(r):r):n?h(o,1,n(r)):h(o,2,r)}catch(t){h(o,2,t)}},o},t}(),p={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){}};exports.Conversation=/*#__PURE__*/function(){function n(t,n,o,r){var i=this,s=this,a=this,c=this,f=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==s.status?Promise.resolve():(s.updateStatus("disconnecting"),s.connection.close(),Promise.resolve(s.input.close()).then(function(){return Promise.resolve(s.output.close()).then(function(){s.updateStatus("disconnected")})}))}catch(t){return Promise.reject(t)}},this.updateMode=function(t){t!==i.mode&&(i.mode=t,i.options.onModeChange({mode:t}))},this.updateStatus=function(t){t!==i.status&&(i.status=t,i.options.onStatusChange({status:t}))},this.onEvent=function(t){try{return Promise.resolve(l(function(){var e,n=JSON.parse(t.data);if(u(n)){var o=function(t,e){var n,o=-1;t:{for(var r=0;r<e.length;r++){var i=e[r][0];if(i){var s=i();if(s&&s.then)break t;if(s===t){o=r;break}}else o=r}if(-1!==o){do{for(var a=e[o][1];!a;)o++,a=e[o][1];var u=a();if(u&&u.then){n=!0;break t}var c=e[o][2];o++}while(c&&!c());return u}}var l=new d,f=h.bind(null,l,2);return(n?u.then(p):s.then(function n(s){for(;;){if(s===t){o=r;break}if(++r===e.length){if(-1!==o)break;return void h(l,1,u)}if(i=e[r][0]){if((s=i())&&s.then)return void s.then(n).then(void 0,f)}else o=r}do{for(var a=e[o][1];!a;)o++,a=e[o][1];var u=a();if(u&&u.then)return void u.then(p).then(void 0,f);var c=e[o][2];o++}while(c&&!c());h(l,1,u)})).then(void 0,f),l;function p(t){for(;;){var n=e[o][2];if(!n||n())break;o++;for(var r=e[o][1];!r;)o++,r=e[o][1];if((t=r())&&t.then)return void t.then(p).then(void 0,f)}h(l,1,t)}}(n.type,[[function(){return"interruption"},function(){return n.interruption_event&&(a.lastInterruptTimestamp=n.interruption_event.event_id),a.fadeOutAudio(),void(e=1)}],[function(){return"agent_response"},function(){return a.options.onMessage({source:"ai",message:n.agent_response_event.agent_response}),void(e=1)}],[function(){return"user_transcript"},function(){return a.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript}),void(e=1)}],[function(){return"internal_tentative_agent_response"},function(){return a.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response}),void(e=1)}],[function(){return"client_tool_call"},function(){var t=function(){if(a.options.onUnhandledClientToolCall)return a.options.onUnhandledClientToolCall(n.client_tool_call),void(e=1);a.onError("Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:"Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",is_error:!0}),e=1},o=function(){if(a.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){var t=function(){e=1},o=l(function(){return Promise.resolve(a.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters)).then(function(t){a.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:t,is_error:!1})})},function(t){a.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})});return o&&o.then?o.then(t):t()}}();return o&&o.then?o.then(t):t()},function(){return e||e}],[function(){return"audio"},function(){return a.lastInterruptTimestamp<=n.audio_event.event_id&&(a.addAudioBase64Chunk(n.audio_event.audio_base_64),a.updateMode("speaking")),void(e=1)}],[function(){return"ping"},function(){return a.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id}),void(e=1)}],[void 0,function(){return a.options.onDebug(n),void(e=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){a.onError("Failed to parse event data",{event:t})}))}catch(t){return Promise.reject(t)}},this.onInputWorkletMessage=function(t){var e,n;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(e=t.data[0].buffer,n=new Uint8Array(e),window.btoa(String.fromCharCode.apply(String,n)))})},this.onOutputWorkletMessage=function(t){var e=t.data;"process"===e.type&&i.updateMode(e.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(t){try{return c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"}),c.output.worklet.port.postMessage({type:"buffer",buffer:e(t)}),Promise.resolve()}catch(t){return Promise.reject(t)}},this.fadeOutAudio=function(){try{return f.updateMode("listening"),f.output.worklet.port.postMessage({type:"interrupt"}),f.output.gain.gain.exponentialRampToValueAtTime(1e-4,f.output.context.currentTime+2),setTimeout(function(){f.output.gain.gain.value=f.volume,f.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(t){return Promise.reject(t)}},this.onError=function(t,e){console.error(t,e),i.options.onError(t,e)},this.calculateVolume=function(t){if(0===t.length)return 0;for(var e=0,n=0;n<t.length;n++)e+=t[n]/255;return(e/=t.length)<0?0:e>1?1:e},this.getId=function(){return i.connection.conversationId},this.setVolume=function(t){i.volume=t.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.options=t,this.connection=n,this.input=o,this.output=r,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(t){i.onEvent(t)}),this.connection.socket.addEventListener("error",function(t){i.updateStatus("disconnected"),i.onError("Socket error",t)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(e){try{var o=t({},f,p,e);o.onStatusChange({status:"connecting"});var i=null,s=null,u=null;return Promise.resolve(l(function(){return Promise.resolve(r.create(16e3)).then(function(t){return i=t,Promise.resolve(c.create(e)).then(function(t){return s=t,Promise.resolve(a.create(s.sampleRate)).then(function(t){return new n(o,s,i,u=t)})})})},function(t){var e,n;return o.onStatusChange({status:"disconnected"}),null==(e=s)||e.close(),Promise.resolve(null==(n=i)?void 0:n.close()).then(function(){var e;return Promise.resolve(null==(e=u)?void 0:e.close()).then(function(){throw t})})}))}catch(t){return Promise.reject(t)}},n}();
|
1
|
+
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var o in t)({}).hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e},e.apply(null,arguments)}function n(e){for(var n=window.atob(e),t=n.length,o=new Uint8Array(t),r=0;r<t;r++)o[r]=n.charCodeAt(r);return o.buffer}var t=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(t),r=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=n,this.worklet=t,this.inputStream=o}return e.create=function(n){var t=n.sampleRate,r=n.format,i=n.preferHeadphonesForIosDevices;try{var a=null,s=null;return Promise.resolve(function(n,u){try{var c=(l={sampleRate:{ideal:t},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}},Promise.resolve(navigator.mediaDevices.getUserMedia({audio:!0})).then(function(n){function u(){function n(){return Promise.resolve(a.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:l})).then(function(n){var o=a.createMediaStreamSource(s=n),i=new AudioWorkletNode(a,"raw-audio-processor");return i.port.postMessage({type:"setFormat",format:r,sampleRate:t}),o.connect(u),u.connect(i),new e(a,u,i,s)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,u=(a=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),c=function(){if(!i)return Promise.resolve(a.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return c&&c.then?c.then(n):n()}null==n||n.getTracks().forEach(function(e){return e.stop()});var c=function(){if((["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document)&&i)return Promise.resolve(window.navigator.mediaDevices.enumerateDevices()).then(function(e){var n=e.find(function(e){return"audioinput"===e.kind&&["airpod","headphone","earphone"].find(function(n){return e.label.toLowerCase().includes(n)})});n&&(l.deviceId={ideal:n.deviceId})})}();return c&&c.then?c.then(u):u()}))}catch(e){return u(e)}var l;return c&&c.then?c.then(void 0,u):c}(0,function(e){var n,t;throw null==(n=s)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=a)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),i=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(i),s=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=n,this.gain=t,this.worklet=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var r=null;return Promise.resolve(function(n,i){try{var s=(u=(r=new AudioContext({sampleRate:t})).createAnalyser(),(c=r.createGain()).connect(u),u.connect(r.destination),Promise.resolve(r.audioWorklet.addModule(a)).then(function(){var n=new AudioWorkletNode(r,"audio-concat-processor");return n.port.postMessage({type:"setFormat",format:o}),n.connect(c),new e(r,u,c,n)}))}catch(e){return i(e)}var u,c;return s&&s.then?s.then(void 0,i):s}(0,function(e){var n;throw null==(n=r)||n.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,n,t,o){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=n,this.inputFormat=t,this.outputFormat=o}e.create=function(n){try{var t=null;return Promise.resolve(function(o,r){try{var i=(s=null!=(a=n.origin)?a:"wss://api.elevenlabs.io",c=n.signedUrl?n.signedUrl:s+"/v1/convai/conversation?agent_id="+n.agentId,d=["convai"],n.authorization&&d.push("bearer."+n.authorization),t=new WebSocket(c,d),Promise.resolve(new Promise(function(e,o){t.addEventListener("open",function(){var e,o,r,i,a,s={type:"conversation_initiation_client_data"};n.overrides&&(s.conversation_config_override={agent:{prompt:null==(o=n.overrides.agent)?void 0:o.prompt,first_message:null==(r=n.overrides.agent)?void 0:r.firstMessage,language:null==(i=n.overrides.agent)?void 0:i.language},tts:{voice_id:null==(a=n.overrides.tts)?void 0:a.voiceId}}),n.customLlmExtraBody&&(s.custom_llm_extra_body=n.customLlmExtraBody),null==(e=t)||e.send(JSON.stringify(s))},{once:!0}),t.addEventListener("error",o),t.addEventListener("close",o),t.addEventListener("message",function(n){var t=JSON.parse(n.data);u(t)&&("conversation_initiation_metadata"===t.type?e(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(n){var o=n.conversation_id,r=n.agent_output_audio_format,i=n.user_input_audio_format,a=l(null!=i?i:"pcm_16000"),s=l(r);return new e(t,o,a,s)}))}catch(e){return r(e)}var a,s,c,d;return i&&i.then?i.then(void 0,r):i}(0,function(e){var n;throw null==(n=t)||n.close(),e}))}catch(e){return Promise.reject(e)}};var n=e.prototype;return n.close=function(){this.socket.close()},n.sendMessage=function(e){this.socket.send(JSON.stringify(e))},e}();function l(e){var n=e.split("_"),t=n[0],o=n[1];if(!["pcm","ulaw"].includes(t))throw new Error("Invalid format: "+e);var r=parseInt(o);if(isNaN(r))throw new Error("Invalid sample rate: "+o);return{format:t,sampleRate:r}}function d(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var f={clientTools:{}};function p(e,n,t){if(!e.s){if(t instanceof v){if(!t.s)return void(t.o=p.bind(null,e,n));1&n&&(n=t.s),t=t.v}if(t&&t.then)return void t.then(p.bind(null,e,n),p.bind(null,e,2));e.s=n,e.v=t;var o=e.o;o&&o(e)}}var h={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){},onCanSendFeedbackChange:function(){}},v=/*#__PURE__*/function(){function e(){}return e.prototype.then=function(n,t){var o=new e,r=this.s;if(r){var i=1&r?n:t;if(i){try{p(o,1,i(this.v))}catch(e){p(o,2,e)}return o}return this}return this.o=function(e){try{var r=e.v;1&e.s?p(o,1,n?n(r):r):t?p(o,1,t(r)):p(o,2,r)}catch(e){p(o,2,e)}},o},e}();exports.Conversation=/*#__PURE__*/function(){function t(e,t,o,r){var i=this,a=this,s=this,c=this,l=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=function(){try{return"connected"!==a.status?Promise.resolve():(a.updateStatus("disconnecting"),a.connection.close(),Promise.resolve(a.input.close()).then(function(){return Promise.resolve(a.output.close()).then(function(){a.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==i.mode&&(i.mode=e,i.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==i.status&&(i.status=e,i.options.onStatusChange({status:e}))},this.updateCanSendFeedback=function(){var e=i.currentEventId!==i.lastFeedbackEventId;i.canSendFeedback!==e&&(i.canSendFeedback=e,i.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=function(e){try{return Promise.resolve(d(function(){var n,t=JSON.parse(e.data);if(u(t)){var o=function(e,n){var t,o=-1;e:{for(var r=0;r<n.length;r++){var i=n[r][0];if(i){var a=i();if(a&&a.then)break e;if(a===e){o=r;break}}else o=r}if(-1!==o){do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then){t=!0;break e}var c=n[o][2];o++}while(c&&!c());return u}}var l=new v,d=p.bind(null,l,2);return(t?u.then(f):a.then(function t(a){for(;;){if(a===e){o=r;break}if(++r===n.length){if(-1!==o)break;return void p(l,1,u)}if(i=n[r][0]){if((a=i())&&a.then)return void a.then(t).then(void 0,d)}else o=r}do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then)return void u.then(f).then(void 0,d);var c=n[o][2];o++}while(c&&!c());p(l,1,u)})).then(void 0,d),l;function f(e){for(;;){var t=n[o][2];if(!t||t())break;o++;for(var r=n[o][1];!r;)o++,r=n[o][1];if((e=r())&&e.then)return void e.then(f).then(void 0,d)}p(l,1,e)}}(t.type,[[function(){return"interruption"},function(){return t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio(),void(n=1)}],[function(){return"agent_response"},function(){return s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response}),void(n=1)}],[function(){return"user_transcript"},function(){return s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript}),void(n=1)}],[function(){return"internal_tentative_agent_response"},function(){return s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response}),void(n=1)}],[function(){return"client_tool_call"},function(){var e=function(){if(s.options.onUnhandledClientToolCall)return s.options.onUnhandledClientToolCall(t.client_tool_call),void(n=1);s.onError("Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",is_error:!0}),n=1},o=function(){if(s.options.clientTools.hasOwnProperty(t.client_tool_call.tool_name)){var e=function(){n=1},o=d(function(){return Promise.resolve(s.options.clientTools[t.client_tool_call.tool_name](t.client_tool_call.parameters)).then(function(e){s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:e,is_error:!1})})},function(e){s.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})});return o&&o.then?o.then(e):e()}}();return o&&o.then?o.then(e):e()},function(){return n||n}],[function(){return"audio"},function(){return s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.currentEventId=t.audio_event.event_id,s.updateCanSendFeedback(),s.updateMode("speaking")),void(n=1)}],[function(){return"ping"},function(){return s.connection.sendMessage({type:"pong",event_id:t.ping_event.event_id}),void(n=1)}],[void 0,function(){return s.options.onDebug(t),void(n=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){s.onError("Failed to parse event data",{event:e})}))}catch(e){return Promise.reject(e)}},this.onInputWorkletMessage=function(e){var n,t;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(n=e.data[0].buffer,t=new Uint8Array(n),window.btoa(String.fromCharCode.apply(String,t)))})},this.onOutputWorkletMessage=function(e){var n=e.data;"process"===n.type&&i.updateMode(n.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"}),c.output.worklet.port.postMessage({type:"buffer",buffer:n(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return l.updateMode("listening"),l.output.worklet.port.postMessage({type:"interrupt"}),l.output.gain.gain.exponentialRampToValueAtTime(1e-4,l.output.context.currentTime+2),setTimeout(function(){l.output.gain.gain.value=l.volume,l.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,n){console.error(e,n),i.options.onError(e,n)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var n=0,t=0;t<e.length;t++)n+=e[t]/255;return(n/=e.length)<0?0:n>1?1:n},this.getId=function(){return i.connection.conversationId},this.setVolume=function(e){i.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.sendFeedback=function(e){i.canSendFeedback?(i.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:i.currentEventId}),i.lastFeedbackEventId=i.currentEventId,i.updateCanSendFeedback()):console.warn(0===i.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=t,this.input=o,this.output=r,this.options.onConnect({conversationId:t.conversationId}),this.connection.socket.addEventListener("message",function(e){i.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){i.updateStatus("disconnected"),i.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return t.startSession=function(n){try{var o=e({},f,h,n);o.onStatusChange({status:"connecting"}),o.onCanSendFeedbackChange({canSendFeedback:!1});var i=null,a=null,u=null;return Promise.resolve(d(function(){return Promise.resolve(c.create(n)).then(function(c){return a=c,Promise.resolve(Promise.all([r.create(e({},a.inputFormat,{preferHeadphonesForIosDevices:n.preferHeadphonesForIosDevices})),s.create(a.outputFormat)])).then(function(e){return new t(o,a,i=e[0],u=e[1])})})},function(e){var n,t;return o.onStatusChange({status:"disconnected"}),null==(n=a)||n.close(),Promise.resolve(null==(t=i)?void 0:t.close()).then(function(){var n;return Promise.resolve(null==(n=u)?void 0:n.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},t}(),exports.postOverallFeedback=function(e,n,t){return void 0===t&&(t="https://api.elevenlabs.io"),fetch(t+"/v1/convai/conversations/"+e+"/feedback",{method:"POST",body:JSON.stringify({feedback:n?"like":"dislike"}),headers:{"Content-Type":"application/json"}})};
|
2
2
|
//# sourceMappingURL=lib.cjs.map
|
package/dist/lib.cjs.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"lib.cjs","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n noiseSuppression: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","import { Language } from \"./connection\";\n\nexport type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AgentAudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n ping_ms?: number;\n };\n};\nexport type ClientToolCallEvent = {\n type: \"client_tool_call\";\n client_tool_call: {\n tool_name: string;\n tool_call_id: string;\n parameters: any;\n expects_response: boolean;\n };\n};\n\n// TODO correction missing\nexport type IncomingSocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AgentAudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent\n | ClientToolCallEvent;\n\nexport type PongEvent = {\n type: \"pong\";\n event_id: number;\n};\nexport type UserAudioEvent = {\n user_audio_chunk: string;\n};\nexport type ClientToolResultEvent = {\n type: \"client_tool_result\";\n tool_call_id: string;\n result: any;\n is_error: boolean;\n};\nexport type InitiationClientDataEvent = {\n type: \"conversation_initiation_client_data\";\n conversation_config_override?: {\n agent?: {\n prompt?: {\n prompt?: string;\n };\n first_message?: string;\n language?: Language;\n };\n tts?: {\n voice_id?: string;\n };\n };\n custom_llm_extra_body?: any;\n};\nexport type OutgoingSocketEvent =\n | PongEvent\n | UserAudioEvent\n | InitiationClientDataEvent\n | ClientToolResultEvent;\n\nexport function isValidSocketEvent(event: any): event is IncomingSocketEvent {\n return !!event.type;\n}\n","import {\n InitiationClientDataEvent,\n ConfigEvent,\n isValidSocketEvent,\n OutgoingSocketEvent,\n} from \"./events\";\n\nconst MAIN_PROTOCOL = \"convai\";\n\nexport type Language =\n | \"en\"\n | \"ja\"\n | \"zh\"\n | \"de\"\n | \"hi\"\n | \"fr\"\n | \"ko\"\n | \"pt\"\n | \"it\"\n | \"es\"\n | \"id\"\n | \"nl\"\n | \"tr\"\n | \"pl\"\n | \"sv\"\n | \"bg\"\n | \"ro\"\n | \"ar\"\n | \"cs\"\n | \"el\"\n | \"fi\"\n | \"ms\"\n | \"da\"\n | \"ta\"\n | \"uk\"\n | \"ru\"\n | \"hu\"\n | \"no\"\n | \"vi\";\nexport type SessionConfig = {\n origin?: string;\n authorization?: string;\n overrides?: {\n agent?: {\n prompt?: {\n prompt?: string;\n };\n firstMessage?: string;\n language?: Language;\n };\n tts?: {\n voiceId?: string;\n };\n };\n customLlmExtraBody?: any;\n} & (\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined }\n);\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin = config.origin ?? WSS_API_ORIGIN;\n const url = config.signedUrl\n ? config.signedUrl\n : origin + WSS_API_PATHNAME + config.agentId;\n\n const protocols = [MAIN_PROTOCOL];\n if (config.authorization) {\n protocols.push(`bearer.${config.authorization}`);\n }\n socket = new WebSocket(url, protocols);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\n \"open\",\n () => {\n const overridesEvent: InitiationClientDataEvent = {\n type: \"conversation_initiation_client_data\",\n };\n\n if (config.overrides) {\n overridesEvent.conversation_config_override = {\n agent: {\n prompt: config.overrides.agent?.prompt,\n first_message: config.overrides.agent?.firstMessage,\n language: config.overrides.agent?.language,\n },\n tts: {\n voice_id: config.overrides.tts?.voiceId,\n },\n };\n }\n\n if (config.customLlmExtraBody) {\n overridesEvent.custom_llm_extra_body = config.customLlmExtraBody;\n }\n\n socket?.send(JSON.stringify(overridesEvent));\n },\n { once: true }\n );\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n\n public sendMessage(message: OutgoingSocketEvent) {\n this.socket.send(JSON.stringify(message));\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport {\n ClientToolCallEvent,\n isValidSocketEvent,\n PingEvent,\n} from \"./utils/events\";\n\nexport type { IncomingSocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks & ClientToolsConfig;\nexport type ClientToolsConfig = {\n clientTools: Record<\n string,\n (\n parameters: any\n ) => Promise<string | number | void> | string | number | void\n >;\n};\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onDisconnect: () => void;\n onError: (message: string, context?: any) => void;\n onMessage: (props: { message: string; source: Role }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onUnhandledClientToolCall?: (\n params: ClientToolCallEvent[\"client_tool_call\"]\n ) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultClientTools = { clientTools: {} };\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDebug: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onMessage: () => {},\n onModeChange: () => {},\n onStatusChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultClientTools,\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = async (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"client_tool_call\": {\n if (\n this.options.clientTools.hasOwnProperty(\n parsedEvent.client_tool_call.tool_name\n )\n ) {\n try {\n const result = await this.options.clientTools[\n parsedEvent.client_tool_call.tool_name\n ](parsedEvent.client_tool_call.parameters) ?? \"Client tool execution successful.\"; // default client-tool call response\n\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n result: result,\n is_error: false,\n });\n } catch (e) {\n this.onError(\n \"Client tool execution failed with following error: \" +\n (e as Error)?.message,\n {\n clientToolName: parsedEvent.client_tool_call.tool_name,\n }\n );\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n result: \"Client tool execution failed: \" + (e as Error)?.message,\n is_error: true,\n });\n }\n\n break;\n }\n\n if (this.options.onUnhandledClientToolCall) {\n this.options.onUnhandledClientToolCall(\n parsedEvent.client_tool_call\n );\n\n break;\n }\n\n this.onError(\n `Client tool with name ${parsedEvent.client_tool_call.tool_name} is not defined on client`,\n {\n clientToolName: parsedEvent.client_tool_call.tool_name,\n }\n );\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n result: `Client tool with name ${parsedEvent.client_tool_call.tool_name} is not defined on client`,\n is_error: true,\n });\n\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.sendMessage({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n });\n // parsedEvent.ping_event.ping_ms can be used on client side, for example\n // to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n if (this.status === \"connected\") {\n this.connection.sendMessage({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","sampleRate","Promise","resolve","_temp2","audioWorklet","addModule","then","navigator","mediaDevices","getUserMedia","audio","ideal","echoCancellation","noiseSuppression","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","_catch","error","_inputStream","_context","getTracks","forEach","track","stop","close","e","reject","prototype","audioConcatProcessor","Output","gain","createGain","destination","_proto","isValidSocketEvent","event","Connection","socket","conversationId","config","origin","_config$origin","url","signedUrl","agentId","protocols","authorization","push","WebSocket","addEventListener","_socket","_config$overrides$age","_config$overrides$age2","_config$overrides$age3","_config$overrides$tts","overridesEvent","overrides","conversation_config_override","agent","prompt","first_message","firstMessage","language","tts","voice_id","voiceId","customLlmExtraBody","custom_llm_extra_body","send","JSON","stringify","once","message","parse","data","conversation_initiation_metadata_event","console","warn","conversationConfig","conversation_id","parseInt","agent_output_audio_format","replace","_socket2","sendMessage","_settle","pact","state","value","s","_Pact","o","bind","observer","defaultClientTools","clientTools","result","onFulfilled","onRejected","callback","v","_this","defaultCallbacks","onConnect","onDebug","onDisconnect","onError","onMessage","onModeChange","onStatusChange","Conversation","options","connection","input","output","_this2","_this3","_this4","_this5","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","endSession","updateStatus","updateMode","onEvent","_interrupt","parsedEvent","_temp5","_switch","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","_temp4","onUnhandledClientToolCall","client_tool_call","tool_name","clientToolName","tool_call_id","is_error","_temp3","hasOwnProperty","parameters","audio_event","addAudioBase64Chunk","audio_base_64","ping_event","onInputWorkletMessage","b","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","_ref","finished","chunk","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","_ref2","getInputByteFrequencyData","_this2$inputFrequency","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage","startSession","fullOptions","_extends","_Input$create","_Connection$create","_Output$create","_connection","_input","_output"],"mappings":"wNAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCfA,IAAMC,EAAO,IAAIC,KACf,CA2DC,i6EACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC3DxCM,eAAK,WAwChB,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAAwBC,KAHxBJ,aAAA,EAAAI,KACAH,cAAA,EAAAG,KACAF,aAAA,EAAAE,KACAD,iBAHA,EAAAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,CAKF,OALGJ,EA5CgBM,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAC/BG,EAAkC,KAAK,OAAAI,QAAAC,gCAAA,WAEvCC,SAAAA,IAAAF,OAAAA,QAAAC,QAWIR,EAAQU,aAAaC,UAAUf,IAAkBgB,KAAA,WAAA,OAAAL,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAO,CACLV,WAAY,CAAEW,MAAOX,GACrBY,iBAAkB,CAAED,OAAO,GAC3BE,iBAAkB,CAAEF,OAAO,OAE7BL,KAAAQ,SAAAA,GAEF,IAAMC,EAASrB,EAAQsB,wBARvBnB,EAAWiB,GASLlB,EAAU,IAAIqB,iBAAiBvB,EAAS,uBAK9C,OAHAqB,EAAOG,QAAQvB,GACfA,EAASuB,QAAQtB,OAENH,EAAMC,EAASC,EAAUC,EAASC,EAAa,EAAA,EAAA,CA1B1D,IAAMsB,EACJZ,UAAUC,aAAaY,0BAA0BpB,WAK7CL,GAHND,EAAU,IAAIhB,OAAO2C,aACnBF,EAA+B,CAAEnB,WAAAA,GAAe,CAAE,IAE3BsB,iBAAiBC,EACtC,WAAA,IAACJ,EAA4B,OAAAlB,QAAAC,QACzBR,EAAQU,aAAaC,UAhBjC,sGAgB4DC,KAAA,WAAA,EAAA,CADpD,GACoD,OAAAiB,GAAAA,EAAAjB,KAAAiB,EAAAjB,KAAAH,GAAAA,GAmB1D,6DA9B2CqB,CAAA,EA8BlCC,SAAAA,GAAO,IAAAC,EAAAC,EAGd,MAFW,OAAXD,EAAA7B,IAAA6B,EAAaE,YAAYC,QAAQ,SAAAC,GAAS,OAAAA,EAAMC,MAAM,GAC/C,OAAPJ,EAAAjC,IAAAiC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAAhC,QAAAiC,OAAAD,EAAA,CAAA,EAAAxC,EAAA0C,UASYH,MAAA,WAAK,IAC4C,OAA5DlC,KAAKD,YAAY+B,YAAYC,QAAQ,SAAAC,GAAS,OAAAA,EAAMC,MAAM,GAAE9B,QAAAC,QAA5DJ,KACWJ,QAAQsC,SAAO1B,KAAA,WAAA,EAC5B,CAAC,MAAA2B,GAAA,OAAAhC,QAAAiC,OAAAD,EAAA,CAAA,EAAAxC,CAAA,CAlDe,GCLZN,EAAO,IAAIC,KACf,CA6DC,03DACD,CAAEC,KAAM,2BAGG+C,EAAuB7C,IAAIC,gBAAgBL,GChE3CkD,eAoBX,WAAA,SAAAA,EACkB3C,EACAC,EACA2C,EACA1C,GAAyBE,KAHzBJ,aACAC,EAAAA,KAAAA,cACA2C,EAAAA,KAAAA,UACA1C,EAAAA,KAAAA,aAHA,EAAAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIwC,KAAJA,EACAxC,KAAOF,QAAPA,CACf,QAACyC,EAxBgBtC,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAAK,OAAAO,QAAAC,iCAGhCP,GADND,EAAU,IAAI2B,aAAa,CAAErB,WAAAA,KACJsB,kBACnBgB,EAAO5C,EAAQ6C,cAChBrB,QAAQvB,GACbA,EAASuB,QAAQxB,EAAQ8C,aAAavC,QAAAC,QAChCR,EAAQU,aAAaC,UAAU+B,IAAqB9B,KAC1D,WAAA,IAAMV,EAAU,IAAIqB,iBAAiBvB,EAAS,0BAG9C,OAFAE,EAAQsB,QAAQoB,GAET,IAAID,EAAO3C,EAASC,EAAU2C,EAAM1C,EAAS,yBAXd,IAGhCD,EACA2C,sCAJgCd,CAAA,EAY/BC,SAAAA,GAAO,IAAAE,EAEd,MADO,OAAPA,EAAAjC,IAAAiC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAAhC,QAAAiC,OAAAD,EAAAQ,CAAAA,EAAAJ,EAAAF,UASYH,MAAK,WAAA,IACN/B,OAAAA,QAAAC,QAAJJ,KAAKJ,QAAQsC,SAAO1B,kBAC5B,CAAC,MAAA2B,GAAA,OAAAhC,QAAAiC,OAAAD,KAAAI,CAAA,CATD,GC6Ec,SAAAK,EAAmBC,GACjC,QAASA,EAAMtD,IACjB,CC9FA,IAwDauD,eAiFX,WAAA,SAAAA,EACkBC,EACAC,EACA9C,GAFA6C,KAAAA,YACAC,EAAAA,KAAAA,oBACA9C,EAAAA,KAAAA,kBAFAF,KAAM+C,OAANA,EACA/C,KAAcgD,eAAdA,EACAhD,KAAUE,WAAVA,CACf,CAAC4C,EApFgB7C,OAAM,SAACgD,OACzB,IAAIF,EAA2B,KAAK,OAAA5C,QAAAC,iCAG5B8C,SAAMC,EAAGF,EAAOC,QAAMC,EARX,0BASXC,EAAMH,EAAOI,UACfJ,EAAOI,UACPH,EAVe,oCAUaD,EAAOK,QAEjCC,EAAY,CAlEF,UAmEZN,EAAOO,eACTD,EAAUE,KAAI,UAAWR,EAAOO,eAElCT,EAAS,IAAIW,UAAUN,EAAKG,GAAWpD,QAAAC,QACN,IAAID,QAEnC,SAACC,EAASgC,GACVW,EAAQY,iBACN,OACA,eAAKC,EAKmBC,EAAAC,EAAAC,EAAAC,EAJhBC,EAA4C,CAChD1E,KAAM,uCAGJ0D,EAAOiB,YACTD,EAAeE,6BAA+B,CAC5CC,MAAO,CACLC,OAA8B,OAAxBR,EAAEZ,EAAOiB,UAAUE,YAAK,EAAtBP,EAAwBQ,OAChCC,cAAeR,OAAFA,EAAEb,EAAOiB,UAAUE,YAAjBN,EAAAA,EAAwBS,aACvCC,SAAgC,OAAxBT,EAAEd,EAAOiB,UAAUE,YAAK,EAAtBL,EAAwBS,UAEpCC,IAAK,CACHC,SAA8B,OAAtBV,EAAEf,EAAOiB,UAAUO,UAAG,EAApBT,EAAsBW,WAKlC1B,EAAO2B,qBACTX,EAAeY,sBAAwB5B,EAAO2B,oBAG1C,OAANhB,EAAAb,IAAAa,EAAQkB,KAAKC,KAAKC,UAAUf,GAC9B,EACA,CAAEgB,MAAM,IAEVlC,EAAQY,iBAAiB,QAASvB,GAClCW,EAAQY,iBAAiB,QAASvB,GAClCW,EAAQY,iBACN,UACA,SAACd,GACC,IAAMqC,EAAUH,KAAKI,MAAMtC,EAAMuC,MAE5BxC,EAAmBsC,KAIH,qCAAjBA,EAAQ3F,KACVa,EAAQ8E,EAAQG,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEN,MAAM,GAEZ,IAAEzE,KApDIgF,SAAAA,GAsDN,IAAMxC,EAAiBwC,EAAmBC,gBACpCvF,EAAawF,SACjBF,EAAmBG,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAO,IAAI9C,EAAWC,EAAQC,EAAgB9C,EAAY,yBAtExD,IAAAiD,EACID,EACAE,EAIAG,sCAR4B7B,CAEhC,EAuEH,SAAQC,GAAOkE,IAAAA,EAEd,MADAA,OAAAA,EAAA9C,IAAA8C,EAAQ3D,QACFP,CACR,GACF,CAAC,MAAAQ,UAAAhC,QAAAiC,OAAAD,EAAA,CAAA,EAAA,IAAAQ,EAAAG,EAAAT,iBAAAM,EAQMT,MAAA,WACLlC,KAAK+C,OAAOb,OACd,EAACS,EAEMmD,YAAA,SAAYZ,GACjBlF,KAAK+C,OAAO+B,KAAKC,KAAKC,UAAUE,GAClC,EAACpC,CAAA,CAZD,mGChEciD,EAAEC,EAAKC,EAAGC,GACpB,IAAAF,EAAAG,EAAA,CACA,GAAAD,aAAYE,EAAO,CACnB,IAAAF,EAAAC,EAQI,YADAD,EAAAG,EAAAN,EAAkBO,KAAc,KAAAN,EAAAC,IANrC,EAADA,IACDA,EAAAC,EAAAC,GAGOD,KAMR,OAC0BA,EAAA1F,mBACGA,KAAAuF,EAAAO,YAAYL,GAAAF,EAAAO,KAAA,KAAAN,EAAA,IAIvCA,EAAAG,EAAAF,MAEIC,EACF,IAAAK,EAAKP,EAAOK,EACdE,KACIP,EAEF,CACF,CA/DJ,IAAMQ,EAAqB,CAAEC,YAAa,IA5CjCL,0BACT,SAAAA,IAAiB,QACjBA,EAAO/D,UAAE7B,KAA2B,cACpC,IAEEkG,EAAA,IAAAN,EAqCIH,EAAAjG,KAAAmG,EAEN,GAAMF,GACN,MAAoC,IAAAU,EAAAC,EAClC,GAAAC,EAAW,CACX,IACAd,EAAAW,EAAiB,EAAEG,EAAG7G,KAAA8G,GACtB,CAAA,MAAS3E,GACT4D,EAASW,EAAO,EAAGvE,EACnB,CACA,OAAAuE,CACA,CAEF,WAwCqB,QACD1G,KAAAqG,EAAA,SAAAU,GACA,IAzCX,IAAMb,EAAOa,EAAAD,IAGlBC,EAAMZ,EACJJ,EAAAW,EAAqB,EAAAC,EAAAA,EAAAT,GAAAA,GAClBU,EACHb,EAAAW,EAAU,EAAAE,EAAAV,MAGDQ,aAGPvE,KACAuE,EAAwB,EAAAvE,EAE5B,WA1BE6E,EAA8B,CAClCC,UAAW,WAAQ,EACnBC,QAAS,WAAK,EACdC,aAAc,WAAQ,EACtBC,QAAS,WAAK,EACdC,UAAW,aACXC,aAAc,WAAK,EACnBC,eAAgB,2DAyChB,SAAAC,EACmBC,EACAC,EACDC,EACAC,GAAcC,IAAAA,OAAAd,EAsB1B/G,KAAI8H,EAmCA9H,KAAI+H,EAoJZ/H,KAAIgI,EAUJhI,UA1NiByH,aAAA,EAAAzH,KACA0H,gBACDC,EAAAA,KAAAA,kBACAC,YAAA,EAAA5H,KAXViI,uBAAiC,EACjCC,KAAAA,KAAa,YAAWlI,KACxBmI,OAAiB,aACjBC,KAAAA,+BACAC,yBAAmB,EAAArI,KACnBsI,OAAiB,EA2BlBC,KAAAA,WAAwB,WAAA,IAC7B,MAAoB,cAAhBxB,EAAKoB,OAAwBhI,QAAAC,WACjC2G,EAAKyB,aAAa,iBAElBzB,EAAKW,WAAWxF,QAAQ/B,QAAAC,QAClB2G,EAAKY,MAAMzF,SAAO1B,KAAAL,WAAAA,OAAAA,QAAAC,QAClB2G,EAAKa,OAAO1F,SAAO1B,KAEzBuG,WAAAA,EAAKyB,aAAa,eAAgB,EAAA,GACpC,CAAC,MAAArG,GAAAhC,OAAAA,QAAAiC,OAAAD,EAEOsG,CAAAA,EAAAA,KAAAA,WAAa,SAACP,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKJ,QAAQH,aAAa,CAAEY,KAAAA,IAEhC,OAEQM,aAAe,SAACL,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKJ,QAAQF,eAAe,CAAEY,OAAAA,IAElC,OAEQO,QAAO,SAAU7F,GAAuB,IAAA,OAAA1C,QAAAC,QAAAsB,aAC1CiH,IAAAA,EACIC,EAAc7D,KAAKI,MAAMtC,EAAMuC,MAErC,GAAKxC,EAAmBgG,GAAxB,CAEC,IAAAC,uzBAAAC,CAEOF,EAAYrJ,wBACb,cAAc,EAAA,WAOnB,OANMqJ,EAAYG,qBACdjB,EAAKG,uBACHW,EAAYG,mBAAmBC,UAEnClB,EAAKmB,oBAAeN,EAEtB,EAAC,qBAEI,gBAAgB,EAAA,WAMrB,OALEb,EAAKL,QAAQJ,UAAU,CACrBpG,OAAQ,KACRiE,QAAS0D,EAAYM,qBAAqBC,sBACzCR,EAEL,EAAC,GAEI,CAAA,WAAA,MAAA,iBAAiB,aAMtB,OALEb,EAAKL,QAAQJ,UAAU,CACrBpG,OAAQ,OACRiE,QAAS0D,EAAYQ,yBAAyBC,uBAC7CV,EAEL,EAAC,qBAEI,mCAAmC,EAAA,kBACtCb,EAAKL,QAAQP,QAAQ,CACnB3H,KAAM,2BACN+J,SACEV,EAAYW,wCACTC,gCACJb,IAEJ,GAAA,CAAA,WAAA,MAEI,kBAAkB,EAAE,WAAA,IAAAc,EAAAA,WAoCvB,GAAI3B,EAAKL,QAAQiC,0BAGb,OAFF5B,EAAKL,QAAQiC,0BACXd,EAAYe,uBACZhB,EAAA,GAKJb,EAAKV,QAAO,yBACewB,EAAYe,iBAAiBC,UACtD,4BAAA,CACEC,eAAgBjB,EAAYe,iBAAiBC,YAGjD9B,EAAKJ,WAAW5B,YAAY,CAC1BvG,KAAM,qBACNuK,aAAclB,EAAYe,iBAAiBG,aAC3CpD,OAAiCkC,yBAAAA,EAAYe,iBAAiBC,UAAS,4BACvEG,UAAU,IACTpB,KAAAqB,EAAA,WAAA,GArDDlC,EAAKL,QAAQhB,YAAYwD,eACvBrB,EAAYe,iBAAiBC,WAC9BvJ,CAAAA,IAAAA,aAAAsI,EAAAlH,CAAAA,EAAAA,EAAAC,EAAA,WAEGvB,OAAAA,QAAAC,QACmB0H,EAAKL,QAAQhB,YAChCmC,EAAYe,iBAAiBC,WAC7BhB,EAAYe,iBAAiBO,aAAW1J,KAFpCkG,SAAAA,GAINoB,EAAKJ,WAAW5B,YAAY,CAC1BvG,KAAM,qBACNuK,aAAclB,EAAYe,iBAAiBG,aAC3CpD,OAAQA,EACRqD,UAAU,GACT,EACL,EAAC,SAAQ5H,GACP2F,EAAKV,QACH,uDACc,MAAXjF,OAAW,EAAXA,EAAa+C,SAChB,CACE2E,eAAgBjB,EAAYe,iBAAiBC,YAGjD9B,EAAKJ,WAAW5B,YAAY,CAC1BvG,KAAM,qBACNuK,aAAclB,EAAYe,iBAAiBG,aAC3CpD,OAAQ,kCAA+C,MAAXvE,OAAW,EAAXA,EAAa+C,SACzD6E,UAAU,GAEd,GAAC,OAAAtI,GAAAA,EAAAjB,KAAAiB,EAAAjB,KAAAH,GAAAA,GAAA,CAAA,CAwBA,GAxBA,OAAA2J,GAAAA,EAAAxJ,KAAAwJ,EAAAxJ,KAAAiJ,GAAAA,GA2BJ,EAAAd,WAAAA,OAAAA,GAAAA,CAAA,GAAA,CAAA,WAAA,MAEI,OAAO,EAAA,kBAERb,EAAKG,wBAA0BW,EAAYuB,YAAYnB,WAEvDlB,EAAKsC,oBAAoBxB,EAAYuB,YAAYE,eACjDvC,EAAKW,WAAW,kBACjBE,IAEF,GAAA,CAAA,WAAA,MAEI,MAAM,EAAA,WAIN,OAHHb,EAAKJ,WAAW5B,YAAY,CAC1BvG,KAAM,OACNyJ,SAAWJ,EAA0B0B,WAAWtB,gBAC/CL,EAAA,EAIJ,GAAA,MAAA,EAAA,kBAICb,EAAKL,QAAQP,QAAQ0B,QAAaD,IAEnC,KAAA,OAAAE,GAAAA,EAAArI,KAAAqI,EAAArI,KAEL,WAAA,QAFK,CA1HH,CA4HF,aACEsH,EAAKV,QAAQ,6BAA8B,CAAEvE,MAAAA,GAE/C,GACF,CAAC,MAAAV,GAAAhC,OAAAA,QAAAiC,OAAAD,EAEOoI,CAAAA,EAAAA,KAAAA,sBAAwB,SAAC1H,GAC/B,IPzRgC2H,EAC5BpL,EO8RgB,cAAhByI,EAAKM,QACPN,EAAKH,WAAW5B,YAAY,CAC1B2E,kBPjS4BD,EOyRR3H,EAAMuC,KAAK,GAQuBhG,OPhStDA,EAAS,IAAIH,WAAWuL,GAEX5L,OAAO8L,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuBvL,MOmStD,EAACY,KAEO8K,uBAAyB,SAAAC,GAAiC,IAA9B3F,EAAI2F,EAAJ3F,KAChB,YAAdA,EAAK7F,MACPsI,EAAKY,WAAWrD,EAAK4F,SAAW,YAAc,WAElD,EAEQZ,KAAAA,6BAA6Ba,GAAa,IAM7C,OALHlD,EAAKH,OAAOpF,KAAKA,KAAK0D,MAAQ6B,EAAKO,OACnCP,EAAKH,OAAO9H,QAAQoL,KAAKC,YAAY,CAAE5L,KAAM,qBAC7CwI,EAAKH,OAAO9H,QAAQoL,KAAKC,YAAY,CACnC5L,KAAM,SACNH,OAAQX,EAAoBwM,KAC3B9K,QAAAC,SACL,CAAC,MAAA+B,GAAAhC,OAAAA,QAAAiC,OAAAD,EAEO8G,CAAAA,EAAAA,KAAAA,4BAaG,OAXTjB,EAAKS,WAAW,aAChBT,EAAKJ,OAAO9H,QAAQoL,KAAKC,YAAY,CAAE5L,KAAM,cAC7CyI,EAAKJ,OAAOpF,KAAKA,KAAK4I,6BACpB,KACApD,EAAKJ,OAAOhI,QAAQyL,YAAc,GAIpCC,WAAW,WACTtD,EAAKJ,OAAOpF,KAAKA,KAAK0D,MAAQ8B,EAAKM,OACnCN,EAAKJ,OAAO9H,QAAQoL,KAAKC,YAAY,CAAE5L,KAAM,oBAC/C,EAAG,KAAMY,QAAAC,SACX,CAAC,MAAA+B,UAAAhC,QAAAiC,OAAAD,EAEOiF,CAAAA,EAAAA,KAAAA,QAAU,SAAClC,EAAiBtF,GAClC0F,QAAQ3D,MAAMuD,EAAStF,GACvBiI,EAAKJ,QAAQL,QAAQlC,EAAStF,EAChC,EAEQ2L,KAAAA,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAczM,OAChB,SAMF,IADA,IAAIuJ,EAAS,EACJpJ,EAAI,EAAGA,EAAIsM,EAAczM,OAAQG,IACxCoJ,GAAUkD,EAActM,GAAK,IAI/B,OAFAoJ,GAAUkD,EAAczM,QAER,EAAI,EAAIuJ,EAAS,EAAI,EAAIA,CAC3C,EAEOmD,KAAAA,MAAQ,WAAA,OAAM5D,EAAKH,WAAW1E,cAAc,OAE5C0I,UAAY,SAAAC,GACjB9D,EAAKS,OADqBqD,EAANrD,MAEtB,EAEOsD,KAAAA,0BAA4B,WAKjC,OAJuBC,MAAvBhE,EAAKO,qBAALP,EAAKO,mBAAuB,IAAInJ,WAC9B4I,EAAKF,MAAM9H,SAASiM,oBAEtBjE,EAAKF,MAAM9H,SAASkM,qBAAqBlE,EAAKO,oBACvCP,EAAKO,kBACd,EAEO4D,KAAAA,2BAA6B,WAKlC,aAJAnE,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAIpJ,WAC/B4I,EAAKD,OAAO/H,SAASiM,oBAEvBjE,EAAKD,OAAO/H,SAASkM,qBAAqBlE,EAAKQ,qBACxCR,EAAKQ,mBACd,EAACrI,KAEMiM,eAAiB,WACtB,OAAOpE,EAAK0D,gBAAgB1D,EAAK+D,4BACnC,EAEOM,KAAAA,gBAAkB,WACvB,OAAOrE,EAAK0D,gBAAgB1D,EAAKmE,6BACnC,EAzRmBhM,KAAOyH,QAAPA,EACAzH,KAAU0H,WAAVA,EACD1H,KAAK2H,MAALA,EACA3H,KAAM4H,OAANA,EAEhB5H,KAAKyH,QAAQR,UAAU,CAAEjE,eAAgB0E,EAAW1E,iBAEpDhD,KAAK0H,WAAW3E,OAAOY,iBAAiB,UAAW,SAAAd,GACjDgF,EAAKa,QAAQ7F,EACf,GACA7C,KAAK0H,WAAW3E,OAAOY,iBAAiB,QAAS,SAAAd,GAC/CgF,EAAKW,aAAa,gBAClBX,EAAKT,QAAQ,eAAgBvE,EAC/B,GACA7C,KAAK0H,WAAW3E,OAAOY,iBAAiB,QAAS,WAC/CkE,EAAKW,aAAa,gBAClBX,EAAKJ,QAAQN,cACf,GAEAnH,KAAK2H,MAAM7H,QAAQoL,KAAKiB,UAAYnM,KAAKuK,sBACzCvK,KAAK4H,OAAO9H,QAAQoL,KAAKiB,UAAYnM,KAAK8K,uBAC1C9K,KAAKwI,aAAa,YACpB,QAAChB,EA5DmB4E,aAAA,SAClB3E,GAAwE,IAExE,IAAM4E,EAAWC,EACZ9F,GAAAA,EACAQ,EACAS,GAGL4E,EAAY9E,eAAe,CAAEY,OAAQ,eAErC,IAAIR,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAAzH,QAAAC,QAAAsB,EAE7B,WAAA,OAAAvB,QAAAC,QACYT,EAAMM,OA9BE,OA8ByBO,KAAA+L,SAAAA,GAAC,OAAhD5E,EAAK4E,EAA2CpM,QAAAC,QAC7B0C,EAAW7C,OAAOwH,IAAQjH,cAAAgM,GAAC,OAA9C9E,EAAU8E,EAAoCrM,QAAAC,QAC/BmC,EAAOtC,OAAOyH,EAAWxH,aAAWM,KAAAiM,SAAAA,GAEnD,OAAW,IAAAjF,EAAa6E,EAAa3E,EAAYC,EAFjDC,EAAM6E,EAE0D,EAAA,EAAA,EAClE,EAAS9K,SAAAA,GAAO,IAAA+K,EAAAC,EAEM,OADpBN,EAAY9E,eAAe,CAAEY,OAAQ,wBACrCuE,EAAAhF,IAAAgF,EAAYxK,QAAQ/B,QAAAC,QACduM,OADcA,EACdhF,QAAAgF,EAAAA,EAAOzK,SAAO1B,KAAA,WAAA,IAAAoM,EAAAzM,OAAAA,QAAAC,QACdwM,OADcA,EACdhF,QAAAgF,EAAAA,EAAQ1K,SAAO1B,KACrB,WAAA,MAAMmB,CAAM,EAAA,EACd,GACF,CAAC,MAAAQ,UAAAhC,QAAAiC,OAAAD,KAAAqF,CAAA"}
|
1
|
+
{"version":3,"file":"lib.cjs","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","/*\n * ulaw encoding logic taken from the wavefile library\n * https://github.com/rochars/wavefile/blob/master/lib/codecs/mulaw.js\n */\n\nconst blob = new Blob(\n [\n // language=JavaScript\n `\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === \"ulaw\"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === \"ulaw\") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\nimport { FormatConfig } from \"./connection\";\n\nexport type InputConfig = {\n preferHeadphonesForIosDevices?: boolean;\n};\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nfunction isIosDevice() {\n return (\n [\n \"iPad Simulator\",\n \"iPhone Simulator\",\n \"iPod Simulator\",\n \"iPad\",\n \"iPhone\",\n \"iPod\",\n ].includes(navigator.platform) ||\n // iPad on iOS 13 detection\n (navigator.userAgent.includes(\"Mac\") && \"ontouchend\" in document)\n );\n}\n\nexport class Input {\n public static async create({\n sampleRate,\n format,\n preferHeadphonesForIosDevices,\n }: FormatConfig & InputConfig): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const options: MediaTrackConstraints = {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n noiseSuppression: { ideal: true },\n };\n\n // some browsers won't allow calling getSupportedConstraints or enumerateDevices\n // before getting approval for microphone access\n const preliminaryInputStream = await navigator.mediaDevices.getUserMedia({\n audio: true,\n });\n preliminaryInputStream?.getTracks().forEach(track => track.stop());\n\n if (isIosDevice() && preferHeadphonesForIosDevices) {\n const availableDevices =\n await window.navigator.mediaDevices.enumerateDevices();\n const idealDevice = availableDevices.find(\n d =>\n // cautious to include \"bluetooth\" in the search\n // as might trigger bluetooth speakers\n d.kind === \"audioinput\" &&\n [\"airpod\", \"headphone\", \"earphone\"].find(keyword =>\n d.label.toLowerCase().includes(keyword)\n )\n );\n if (idealDevice) {\n options.deviceId = { ideal: idealDevice.deviceId };\n }\n }\n\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: options,\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n worklet.port.postMessage({ type: \"setFormat\", format, sampleRate });\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","/*\n * ulaw decoding logic taken from the wavefile library\n * https://github.com/rochars/wavefile/blob/master/lib/codecs/mulaw.js\n */\n\nconst blob = new Blob(\n [\n // language=JavaScript\n `\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"setFormat\":\n this.format = data.format;\n break;\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === \"ulaw\"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === \"ulaw\") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\nimport { FormatConfig } from \"./connection\";\n\nexport class Output {\n public static async create({\n sampleRate,\n format,\n }: FormatConfig): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.port.postMessage({ type: \"setFormat\", format });\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","import { Language } from \"./connection\";\n\nexport type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AgentAudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n user_input_audio_format?: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n ping_ms?: number;\n };\n};\nexport type ClientToolCallEvent = {\n type: \"client_tool_call\";\n client_tool_call: {\n tool_name: string;\n tool_call_id: string;\n parameters: any;\n expects_response: boolean;\n };\n};\n\n// TODO correction missing\nexport type IncomingSocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AgentAudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent\n | ClientToolCallEvent;\n\nexport type PongEvent = {\n type: \"pong\";\n event_id: number;\n};\nexport type UserAudioEvent = {\n user_audio_chunk: string;\n};\nexport type UserFeedbackEvent = {\n type: \"feedback\";\n score: \"like\" | \"dislike\";\n event_id: number;\n};\nexport type ClientToolResultEvent = {\n type: \"client_tool_result\";\n tool_call_id: string;\n result: any;\n is_error: boolean;\n};\nexport type InitiationClientDataEvent = {\n type: \"conversation_initiation_client_data\";\n conversation_config_override?: {\n agent?: {\n prompt?: {\n prompt?: string;\n };\n first_message?: string;\n language?: Language;\n };\n tts?: {\n voice_id?: string;\n };\n };\n custom_llm_extra_body?: any;\n};\nexport type OutgoingSocketEvent =\n | PongEvent\n | UserAudioEvent\n | InitiationClientDataEvent\n | UserFeedbackEvent\n | ClientToolResultEvent;\n\nexport function isValidSocketEvent(event: any): event is IncomingSocketEvent {\n return !!event.type;\n}\n","import {\n InitiationClientDataEvent,\n ConfigEvent,\n isValidSocketEvent,\n OutgoingSocketEvent,\n} from \"./events\";\n\nconst MAIN_PROTOCOL = \"convai\";\n\nexport type Language =\n | \"en\"\n | \"ja\"\n | \"zh\"\n | \"de\"\n | \"hi\"\n | \"fr\"\n | \"ko\"\n | \"pt\"\n | \"it\"\n | \"es\"\n | \"id\"\n | \"nl\"\n | \"tr\"\n | \"pl\"\n | \"sv\"\n | \"bg\"\n | \"ro\"\n | \"ar\"\n | \"cs\"\n | \"el\"\n | \"fi\"\n | \"ms\"\n | \"da\"\n | \"ta\"\n | \"uk\"\n | \"ru\"\n | \"hu\"\n | \"no\"\n | \"vi\";\nexport type SessionConfig = {\n origin?: string;\n authorization?: string;\n overrides?: {\n agent?: {\n prompt?: {\n prompt?: string;\n };\n firstMessage?: string;\n language?: Language;\n };\n tts?: {\n voiceId?: string;\n };\n };\n customLlmExtraBody?: any;\n} & (\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined }\n);\nexport type FormatConfig = {\n format: \"pcm\" | \"ulaw\";\n sampleRate: number;\n};\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin = config.origin ?? WSS_API_ORIGIN;\n const url = config.signedUrl\n ? config.signedUrl\n : origin + WSS_API_PATHNAME + config.agentId;\n\n const protocols = [MAIN_PROTOCOL];\n if (config.authorization) {\n protocols.push(`bearer.${config.authorization}`);\n }\n socket = new WebSocket(url, protocols);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\n \"open\",\n () => {\n const overridesEvent: InitiationClientDataEvent = {\n type: \"conversation_initiation_client_data\",\n };\n\n if (config.overrides) {\n overridesEvent.conversation_config_override = {\n agent: {\n prompt: config.overrides.agent?.prompt,\n first_message: config.overrides.agent?.firstMessage,\n language: config.overrides.agent?.language,\n },\n tts: {\n voice_id: config.overrides.tts?.voiceId,\n },\n };\n }\n\n if (config.customLlmExtraBody) {\n overridesEvent.custom_llm_extra_body = config.customLlmExtraBody;\n }\n\n socket?.send(JSON.stringify(overridesEvent));\n },\n { once: true }\n );\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const {\n conversation_id,\n agent_output_audio_format,\n user_input_audio_format,\n } = conversationConfig;\n\n const inputFormat = parseFormat(user_input_audio_format ?? \"pcm_16000\");\n const outputFormat = parseFormat(agent_output_audio_format);\n\n return new Connection(socket, conversation_id, inputFormat, outputFormat);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly inputFormat: FormatConfig,\n public readonly outputFormat: FormatConfig\n ) {}\n\n public close() {\n this.socket.close();\n }\n\n public sendMessage(message: OutgoingSocketEvent) {\n this.socket.send(JSON.stringify(message));\n }\n}\n\nfunction parseFormat(format: string): FormatConfig {\n const [formatPart, sampleRatePart] = format.split(\"_\");\n if (![\"pcm\", \"ulaw\"].includes(formatPart)) {\n throw new Error(`Invalid format: ${format}`);\n }\n\n const sampleRate = parseInt(sampleRatePart);\n if (isNaN(sampleRate)) {\n throw new Error(`Invalid sample rate: ${sampleRatePart}`);\n }\n\n return {\n format: formatPart as FormatConfig[\"format\"],\n sampleRate,\n };\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input, InputConfig } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport {\n ClientToolCallEvent,\n isValidSocketEvent,\n PingEvent,\n} from \"./utils/events\";\n\nexport type { IncomingSocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig &\n Callbacks &\n ClientToolsConfig &\n InputConfig;\nexport type ClientToolsConfig = {\n clientTools: Record<\n string,\n (\n parameters: any\n ) => Promise<string | number | void> | string | number | void\n >;\n};\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onDisconnect: () => void;\n onError: (message: string, context?: any) => void;\n onMessage: (props: { message: string; source: Role }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onCanSendFeedbackChange: (prop: { canSendFeedback: boolean }) => void;\n onUnhandledClientToolCall?: (\n params: ClientToolCallEvent[\"client_tool_call\"]\n ) => void;\n};\n\nconst defaultClientTools = { clientTools: {} };\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDebug: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onMessage: () => {},\n onModeChange: () => {},\n onStatusChange: () => {},\n onCanSendFeedbackChange: () => {},\n};\n\nconst HTTPS_API_ORIGIN = \"https://api.elevenlabs.io\";\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig &\n Partial<Callbacks> &\n Partial<ClientToolsConfig> &\n Partial<InputConfig>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultClientTools,\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n fullOptions.onCanSendFeedbackChange({ canSendFeedback: false });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n connection = await Connection.create(options);\n [input, output] = await Promise.all([\n Input.create({\n ...connection.inputFormat,\n preferHeadphonesForIosDevices: options.preferHeadphonesForIosDevices,\n }),\n Output.create(connection.outputFormat),\n ]);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n private currentEventId: number = 1;\n private lastFeedbackEventId: number = 1;\n private canSendFeedback: boolean = false;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private updateCanSendFeedback = () => {\n const canSendFeedback = this.currentEventId !== this.lastFeedbackEventId;\n if (this.canSendFeedback !== canSendFeedback) {\n this.canSendFeedback = canSendFeedback;\n this.options.onCanSendFeedbackChange({ canSendFeedback });\n }\n };\n\n private onEvent = async (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"client_tool_call\": {\n if (\n this.options.clientTools.hasOwnProperty(\n parsedEvent.client_tool_call.tool_name\n )\n ) {\n try {\n const result =\n (await this.options.clientTools[\n parsedEvent.client_tool_call.tool_name\n ](parsedEvent.client_tool_call.parameters)) ??\n \"Client tool execution successful.\"; // default client-tool call response\n\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n result: result,\n is_error: false,\n });\n } catch (e) {\n this.onError(\n \"Client tool execution failed with following error: \" +\n (e as Error)?.message,\n {\n clientToolName: parsedEvent.client_tool_call.tool_name,\n }\n );\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n result:\n \"Client tool execution failed: \" + (e as Error)?.message,\n is_error: true,\n });\n }\n\n break;\n }\n\n if (this.options.onUnhandledClientToolCall) {\n this.options.onUnhandledClientToolCall(\n parsedEvent.client_tool_call\n );\n\n break;\n }\n\n this.onError(\n `Client tool with name ${parsedEvent.client_tool_call.tool_name} is not defined on client`,\n {\n clientToolName: parsedEvent.client_tool_call.tool_name,\n }\n );\n this.connection.sendMessage({\n type: \"client_tool_result\",\n tool_call_id: parsedEvent.client_tool_call.tool_call_id,\n result: `Client tool with name ${parsedEvent.client_tool_call.tool_name} is not defined on client`,\n is_error: true,\n });\n\n break;\n }\n\n case \"audio\": {\n if (this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.currentEventId = parsedEvent.audio_event.event_id;\n this.updateCanSendFeedback();\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.sendMessage({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n });\n // parsedEvent.ping_event.ping_ms can be used on client side, for example\n // to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n if (this.status === \"connected\") {\n this.connection.sendMessage({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n\n public sendFeedback = (like: boolean) => {\n if (!this.canSendFeedback) {\n console.warn(\n this.lastFeedbackEventId === 0\n ? \"Cannot send feedback: the conversation has not started yet.\"\n : \"Cannot send feedback: feedback has already been sent for the current response.\"\n );\n return;\n }\n\n this.connection.sendMessage({\n type: \"feedback\",\n score: like ? \"like\" : \"dislike\",\n event_id: this.currentEventId,\n });\n this.lastFeedbackEventId = this.currentEventId;\n this.updateCanSendFeedback();\n };\n}\n\nexport function postOverallFeedback(\n conversationId: string,\n like: boolean,\n origin: string = HTTPS_API_ORIGIN\n) {\n return fetch(`${origin}/v1/convai/conversations/${conversationId}/feedback`, {\n method: \"POST\",\n body: JSON.stringify({\n feedback: like ? \"like\" : \"dislike\",\n }),\n headers: {\n \"Content-Type\": \"application/json\",\n },\n });\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","_ref","sampleRate","format","preferHeadphonesForIosDevices","Promise","resolve","options","ideal","echoCancellation","noiseSuppression","navigator","mediaDevices","getUserMedia","audio","then","preliminaryInputStream","_temp4","_temp2","audioWorklet","addModule","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","port","postMessage","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","getTracks","forEach","track","stop","_temp3","isIosDevice","includes","platform","userAgent","document","enumerateDevices","availableDevices","idealDevice","find","d","kind","keyword","label","toLowerCase","deviceId","_catch","error","_inputStream","_context","close","e","reject","prototype","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","socket","conversationId","inputFormat","outputFormat","config","origin","_config$origin","url","signedUrl","agentId","protocols","authorization","push","WebSocket","addEventListener","_socket","_config$overrides$age","_config$overrides$age2","_config$overrides$age3","_config$overrides$tts","overridesEvent","overrides","conversation_config_override","agent","prompt","first_message","firstMessage","language","tts","voice_id","voiceId","customLlmExtraBody","custom_llm_extra_body","send","JSON","stringify","once","message","parse","data","conversation_initiation_metadata_event","console","warn","conversationConfig","conversation_id","agent_output_audio_format","user_input_audio_format","parseFormat","_socket2","_proto","sendMessage","_format$split","split","formatPart","sampleRatePart","Error","parseInt","isNaN","defaultClientTools","clientTools","pact","state","value","s","_Pact","o","_settle","bind","v","observer","defaultCallbacks","onConnect","onDebug","onDisconnect","onError","onMessage","onModeChange","onStatusChange","onCanSendFeedbackChange","result","callback","onFulfilled","onRejected","_this","Conversation","connection","input","output","_this2","_this3","_this4","_this5","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","currentEventId","lastFeedbackEventId","canSendFeedback","endSession","updateStatus","updateMode","updateCanSendFeedback","onEvent","_interrupt","parsedEvent","_temp5","_switch","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","onUnhandledClientToolCall","client_tool_call","tool_name","clientToolName","tool_call_id","is_error","hasOwnProperty","parameters","audio_event","addAudioBase64Chunk","audio_base_64","ping_event","onInputWorkletMessage","b","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","finished","chunk","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","_ref2","getInputByteFrequencyData","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","_this2$outputFrequenc","getInputVolume","getOutputVolume","sendFeedback","like","score","onmessage","startSession","fullOptions","_extends","_Connection$create","all","_Promise$all","_connection","_input","_output","fetch","method","body","feedback","headers"],"mappings":"wNAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCVA,IAAMC,EAAO,IAAIC,KACf,CA8GC,07HACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC/FxCM,eAAK,WAuEhB,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAAwBC,KAHxBJ,aACAC,EAAAA,KAAAA,qBACAC,aAAA,EAAAE,KACAD,iBAHA,EAAAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,QAACJ,EA3EgBM,OAAM,SAAAC,GACxB,IAAAC,EAAUD,EAAVC,WACAC,EAAMF,EAANE,OACAC,EAA6BH,EAA7BG,8BAC2B,IAC3B,IAAIT,EAA+B,KAC/BG,EAAkC,KAAK,OAAAO,QAAAC,iCAGnCC,EAAiC,CACrCL,WAAY,CAAEM,MAAON,GACrBO,iBAAkB,CAAED,OAAO,GAC3BE,iBAAkB,CAAEF,OAAO,IAC3BH,QAAAC,QAImCK,UAAUC,aAAaC,aAAa,CACvEC,OAAO,KACPC,KAFIC,SAAAA,YAAsBC,IAAA,SAAAC,IAAAb,OAAAA,QAAAC,QAgCtBX,EAAQwB,aAAaC,UAAU7B,IAAkBwB,KAAA,WAAA,OAAAV,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAOP,KACPQ,KAAA,SAAAM,GAEF,IAAMC,EAAS3B,EAAQ4B,wBAJvBzB,EAAWuB,GAKLxB,EAAU,IAAI2B,iBAAiB7B,EAAS,uBAM9C,OALAE,EAAQ4B,KAAKC,YAAY,CAAEpC,KAAM,YAAaa,OAAAA,EAAQD,WAAAA,IAEtDoB,EAAOK,QAAQ/B,GACfA,EAAS+B,QAAQ9B,GAEV,IAAIH,EAAMC,EAASC,EAAUC,EAASC,EAAa,KAvB1D,IAAM8B,EACJjB,UAAUC,aAAaiB,0BAA0B3B,WAK7CN,GAHND,EAAU,IAAIhB,OAAOmD,aACnBF,EAA+B,CAAE1B,WAAAA,GAAe,CAAE,IAE3B6B,iBAAiBC,EACtC,WAAA,IAACJ,EAA4BvB,OAAAA,QAAAC,QACzBX,EAAQwB,aAAaC,UAjEjC,sGAiE4DL,KAAA,aAAA,CADpD,GACoD,OAAAiB,GAAAA,EAAAjB,KAAAiB,EAAAjB,KAAAG,GAAAA,GAAA,CA3BxDF,MAAAA,GAAAA,EAAwBiB,YAAYC,QAAQ,SAAAC,GAAS,OAAAA,EAAMC,MAAM,GAAE,IAAAC,EAE/DC,WAAAA,IApCN,CACE,iBACA,mBACA,iBACA,OACA,SACA,QACAC,SAAS5B,UAAU6B,WAEpB7B,UAAU8B,UAAUF,SAAS,QAAU,eAAgBG,WA2BjCtC,EAA6BC,OAAAA,QAAAC,QAExC3B,OAAOgC,UAAUC,aAAa+B,oBAAkB5B,KADlD6B,SAAAA,GAEN,IAAMC,EAAcD,EAAiBE,KACnC,SAAAC,SAGa,eAAXA,EAAEC,MACF,CAAC,SAAU,YAAa,YAAYF,KAAK,SAAAG,GACvC,OAAAF,EAAEG,MAAMC,cAAcZ,SAASU,EAAQ,EACxC,GAEDJ,IACFtC,EAAQ6C,SAAW,CAAE5C,MAAOqC,EAAYO,UAAW,EAAA,CAbnDd,GAamD,OAAAD,GAAAA,EAAAtB,KAAAsB,EAAAtB,KAAAE,GAAAA,GA4BzD,yBAzD2C,IAGnCV,sCAHmC8C,CAAA,WAyDlCC,GAAOC,IAAAA,EAAAC,EAGd,MAFAD,OAAAA,EAAAzD,IAAAyD,EAAatB,YAAYC,QAAQ,SAAAC,GAAS,OAAAA,EAAMC,MAAM,GACtDoB,OAAAA,EAAA7D,IAAA6D,EAASC,QACHH,CACR,GACF,CAAC,MAAAI,GAAA,OAAArD,QAAAsD,OAAAD,EAAA,CAAA,EAAAhE,EAAAkE,UASYH,MAAA,eACiD,OAA5D1D,KAAKD,YAAYmC,YAAYC,QAAQ,SAAAC,GAAS,OAAAA,EAAMC,MAAM,GAAE/B,QAAAC,QAA5DP,KACWJ,QAAQ8D,SAAO1C,KAAA,aAC5B,CAAC,MAAA2C,UAAArD,QAAAsD,OAAAD,KAAAhE,CAAA,CAjFe,GCpBZN,EAAO,IAAIC,KACf,CA0FC,swFACD,CAAEC,KAAM,2BAGGuE,EAAuBrE,IAAIC,gBAAgBL,GCjG3C0E,eAwBX,WAAA,SAAAA,EACkBnE,EACAC,EACAmE,EACAlE,GAAyBE,KAHzBJ,aAAA,EAAAI,KACAH,cACAmE,EAAAA,KAAAA,iBACAlE,aAAA,EAHAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIgE,KAAJA,EACAhE,KAAOF,QAAPA,CACf,QAACiE,EA5BgB9D,OAAA,SAAMC,GACxB,IAAAC,EAAUD,EAAVC,WACAC,EAAMF,EAANE,OACa,IACb,IAAIR,EAA+B,KAAK,OAAAU,QAAAC,iCAGhCV,GADND,EAAU,IAAImC,aAAa,CAAE5B,WAAAA,KACJ6B,kBACnBgC,EAAOpE,EAAQqE,cAChBrC,QAAQ/B,GACbA,EAAS+B,QAAQhC,EAAQsE,aAAa5D,QAAAC,QAChCX,EAAQwB,aAAaC,UAAUyC,IAAqB9C,KAAA,WAC1D,IAAMlB,EAAU,IAAI2B,iBAAiB7B,EAAS,0BAI9C,OAHAE,EAAQ4B,KAAKC,YAAY,CAAEpC,KAAM,YAAaa,OAAAA,IAC9CN,EAAQ8B,QAAQoC,GAET,IAAID,EAAOnE,EAASC,EAAUmE,EAAMlE,EAAS,yBAZd,IAGhCD,EACAmE,sCAJgCV,CAAA,WAa/BC,GAAOE,IAAAA,EAEd,MADO,OAAPA,EAAA7D,IAAA6D,EAASC,QACHH,CACR,GACF,CAAC,MAAAI,GAAA,OAAArD,QAAAsD,OAAAD,EAAA,CAAA,EAAAI,EAAAF,UASYH,MAAK,WAAA,WACNpD,QAAAC,QAAJP,KAAKJ,QAAQ8D,SAAO1C,KAC5B,WAAA,EAAA,CAAC,MAAA2C,UAAArD,QAAAsD,OAAAD,KAAAI,CAAA,CATD,GC+Ec,SAAAI,EAAmBC,GACjC,QAASA,EAAM7E,IACjB,CCrGA,IA4Da8E,eAqFX,WAAA,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAA0BzE,KAH1BsE,YACAC,EAAAA,KAAAA,oBACAC,EAAAA,KAAAA,wBACAC,kBAAA,EAHAzE,KAAMsE,OAANA,EACAtE,KAAcuE,eAAdA,EACAvE,KAAWwE,YAAXA,EACAxE,KAAYyE,aAAZA,CACf,CAACJ,EAzFgBpE,OAAM,SAACyE,GAAqB,IAC9C,IAAIJ,EAA2B,KAAK,OAAAhE,QAAAC,iCAG5BoE,EAAsB,OAAhBC,EAAGF,EAAOC,QAAMC,EARX,0BASXC,EAAMH,EAAOI,UACfJ,EAAOI,UACPH,EAVe,oCAUaD,EAAOK,QAEjCC,EAAY,CAtEF,UAuEZN,EAAOO,eACTD,EAAUE,KAAeR,UAAAA,EAAOO,eAElCX,EAAS,IAAIa,UAAUN,EAAKG,GAAW1E,QAAAC,QACN,IAAID,QAEnC,SAACC,EAASqD,GACVU,EAAQc,iBACN,OACA,eAAKC,EAKmBC,EAAAC,EAAAC,EAAAC,EAJhBC,EAA4C,CAChDnG,KAAM,uCAGJmF,EAAOiB,YACTD,EAAeE,6BAA+B,CAC5CC,MAAO,CACLC,OAAQR,OAAFA,EAAEZ,EAAOiB,UAAUE,YAAjBP,EAAAA,EAAwBQ,OAChCC,cAAeR,OAAFA,EAAEb,EAAOiB,UAAUE,YAAjBN,EAAAA,EAAwBS,aACvCC,gBAAQT,EAAEd,EAAOiB,UAAUE,cAAjBL,EAAwBS,UAEpCC,IAAK,CACHC,SAAUV,OAAFA,EAAEf,EAAOiB,UAAUO,UAAjBT,EAAAA,EAAsBW,WAKlC1B,EAAO2B,qBACTX,EAAeY,sBAAwB5B,EAAO2B,2BAGhDhB,EAAAf,IAAAe,EAAQkB,KAAKC,KAAKC,UAAUf,GAC9B,EACA,CAAEgB,MAAM,IAEVpC,EAAQc,iBAAiB,QAASxB,GAClCU,EAAQc,iBAAiB,QAASxB,GAClCU,EAAQc,iBACN,UACA,SAAChB,GACC,IAAMuC,EAAUH,KAAKI,MAAMxC,EAAMyC,MAE5B1C,EAAmBwC,KAIH,qCAAjBA,EAAQpH,KACVgB,EAAQoG,EAAQG,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEN,MAAM,GAEZ,IAAE1F,cApDIiG,GAsDN,IACEC,EAGED,EAHFC,gBACAC,EAEEF,EAFFE,0BACAC,EACEH,EADFG,wBAGI5C,EAAc6C,EAAYD,MAAAA,EAAAA,EAA2B,aACrD3C,EAAe4C,EAAYF,GAEjC,WAAW9C,EAAWC,EAAQ4C,EAAiB1C,EAAaC,EAAc,6BA1ExEG,EACID,EACAE,EAIAG,sCAR4B1B,GA6E3BC,SAAAA,GAAO,IAAA+D,EAEd,aADAA,EAAAhD,IAAAgD,EAAQ5D,QACFH,CACR,GACF,CAAC,MAAAI,GAAA,OAAArD,QAAAsD,OAAAD,EAAA,CAAA,EAAA,IAAA4D,EAAAlD,EAAAR,iBAAA0D,EASM7D,MAAA,WACL1D,KAAKsE,OAAOZ,OACd,EAAC6D,EAEMC,YAAA,SAAYb,GACjB3G,KAAKsE,OAAOiC,KAAKC,KAAKC,UAAUE,GAClC,EAACtC,CAAA,CAbD,GAgBF,SAASgD,EAAYjH,GACnB,IAAAqH,EAAqCrH,EAAOsH,MAAM,KAA3CC,EAAUF,EAAA,GAAEG,EAAcH,KACjC,IAAK,CAAC,MAAO,QAAQjF,SAASmF,GAC5B,UAAUE,yBAAyBzH,GAGrC,IAAMD,EAAa2H,SAASF,GAC5B,GAAIG,MAAM5H,GACR,MAAM,IAAI0H,MAAK,wBAAyBD,GAG1C,MAAO,CACLxH,OAAQuH,EACRxH,WAAAA,EAEJ,wFCxIA,IAAM6H,EAAqB,CAAEC,YAAa,eAwChCC,EAAAC,EAAAC,GACF,IAAAF,EAAAG,EAAA,CACD,GAAAD,aAAEE,EAAA,OAEID,EAMP,YADAD,EAAAG,EAAAC,EAAYC,KAAO,KAAEP,EAACC,IAJvB,EAADA,MAASC,EAAAC,KAEPD,EAAUM,KAOgBN,GAAAA,EAAApH,mBAClBA,OAAoByH,KAAC,KAAAP,EAAAC,GAAAK,EAAAC,KAAA,KAAAP,EAAA,IAGzBA,EAAAG,EAAAF,IACMO,EAAAN,MACQO,EAAAT,EAAaK,EACRI,KACJT,IA7DzB,IAAMU,EAA8B,CAClCC,UAAW,WAAK,EAChBC,QAAS,aACTC,aAAc,WAAQ,EACtBC,QAAS,WAAK,EACdC,UAAW,aACXC,aAAc,WAAQ,EACtBC,eAAgB,WAAK,EACrBC,wBAAyB,WAAK,GAvDvBd,0BACT,SAAAA,IACAA,QAAAA,EAAOzE,UAAE7C,KAA2B,cACpC,IAEEqI,EAAA,IAAAf,WA0CF,GAAMH,GACJ,IAAAmB,EAAmB,EAAHnB,EAAGoB,EAAAC,EACnB,GAAAF,EAAc,CACd,IACAd,EAASa,EAAO,EAACC,EAAAtJ,KAAA0I,GACjB,CAAA,MAAS/E,GACT6E,EAAAa,EAAiB,EAAE1F,EACnB,CACA,OAAA0F,CACA,CAEI,OAENrJ,IAmDqB,CA/BjB,OAgCiBA,KAAAuI,EAAA,SAAAkB,GACD,IACA,IAAArB,EAAAqB,EAAAf,EArDE,EAAbe,EAAOpB,EAMZG,EAAAa,MAA6BE,EAAAnB,GAAAA,GACxBoB,EACHhB,EAAAa,EAAmB,EAAAG,EAAApB,MAEnBiB,EAAA,EAAAjB,SAGSzE,KAEP0F,EAAsB,EAAA1F,KAI1B0F,0CA8BF,WAAA,SAAAK,EACmBlJ,EACAmJ,EACDC,EACAC,OAAcC,EAAA9J,KAAAyJ,EAsB1BzJ,KAAI+J,EA2CA/J,KAAIgK,EAuJZhK,KAAIiK,EAUJjK,UArOiBQ,aAAA,EAAAR,KACA2J,gBACDC,EAAAA,KAAAA,kBACAC,YAAA,EAAA7J,KAdVkK,uBAAiC,EACjCC,KAAAA,KAAa,YAAWnK,KACxBoK,OAAiB,kBACjBC,wBAAkB,EAAArK,KAClBsK,yBACAC,EAAAA,KAAAA,OAAiB,EAACvK,KAClBwK,eAAyB,OACzBC,oBAA8B,EAACzK,KAC/B0K,iBAA2B,OA2B5BC,WAAU,WAAA,IACf,MAAoB,cAAhBlB,EAAKW,OAAwB9J,QAAAC,WACjCkJ,EAAKmB,aAAa,iBAElBnB,EAAKE,WAAWjG,QAAQpD,QAAAC,QAClBkJ,EAAKG,MAAMlG,SAAO1C,KAAA,WAAA,OAAAV,QAAAC,QAClBkJ,EAAKI,OAAOnG,SAAO1C,KAEzByI,WAAAA,EAAKmB,aAAa,eAAgB,KACpC,CAAC,MAAAjH,GAAArD,OAAAA,QAAAsD,OAAAD,EAEOkH,CAAAA,EAAAA,KAAAA,WAAa,SAACV,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKtJ,QAAQ0I,aAAa,CAAEiB,KAAAA,IAEhC,EAACnK,KAEO4K,aAAe,SAACR,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKtJ,QAAQ2I,eAAe,CAAEiB,OAAAA,IAElC,EAEQU,KAAAA,sBAAwB,WAC9B,IAAMJ,EAAkBZ,EAAKU,iBAAmBV,EAAKW,oBACjDX,EAAKY,kBAAoBA,IAC3BZ,EAAKY,gBAAkBA,EACvBZ,EAAKtJ,QAAQ4I,wBAAwB,CAAEsB,gBAAAA,IAE3C,OAEQK,QAAO,SAAU3G,GAAuB,IAAA,OAAA9D,QAAAC,QAAA+C,EAAA,WAC1C0H,IAAAA,EACIC,EAAczE,KAAKI,MAAMxC,EAAMyC,MAErC,GAAK1C,EAAmB8G,GAAxB,CAEC,IAAAC,uzBAAAC,CAEOF,EAAY1L,KAAI,CAAA,CAAA,WAAA,MACjB,cAAc,EAAE,kBACf0L,EAAYG,qBACdrB,EAAKG,uBACHe,EAAYG,mBAAmBC,UAEnCtB,EAAKuB,oBAAeN,IAErB,GAAA,CAAA,WAAA,MAEI,gBAAgB,EAAE,WAIlB,OAHHjB,EAAKvJ,QAAQyI,UAAU,CACrB1H,OAAQ,KACRoF,QAASsE,EAAYM,qBAAqBC,sBACzCR,EAAA,EAEJ,GAEI,CAAA,WAAA,MAAA,iBAAiB,EAAE,WAMxB,OALEjB,EAAKvJ,QAAQyI,UAAU,CACrB1H,OAAQ,OACRoF,QAASsE,EAAYQ,yBAAyBC,uBAC7CV,EAEL,EAAC,qBAEI,mCAAmC,EAAA,kBACtCjB,EAAKvJ,QAAQsI,QAAQ,CACnBvJ,KAAM,2BACNoM,SACEV,EAAYW,wCACTC,gCACJb,IAEJ,GAAA,CAAA,WAAA,MAEI,kBAAkB,EAAE,WAAA,IAAA9J,EAAAA,WAuCvB,GAAI6I,EAAKvJ,QAAQsL,0BAMjB,OALE/B,EAAKvJ,QAAQsL,0BACXb,EAAYc,uBACZf,EAGJ,GAEAjB,EAAKf,iCACsBiC,EAAYc,iBAAiBC,UACtD,4BAAA,CACEC,eAAgBhB,EAAYc,iBAAiBC,YAGjDjC,EAAKJ,WAAWnC,YAAY,CAC1BjI,KAAM,qBACN2M,aAAcjB,EAAYc,iBAAiBG,aAC3C7C,OAAiC4B,yBAAAA,EAAYc,iBAAiBC,UAAoC,4BAClGG,UAAU,IACTnB,EAAA,CAAA,EAAA1I,EAxDDyH,WAAAA,GAAAA,EAAKvJ,QAAQyH,YAAYmE,eACvBnB,EAAYc,iBAAiBC,WAC9B7K,CAAAA,IAAAA,aAAA6J,EAAA,CAAA,EAAA/I,EAAAqB,aAEGhD,OAAAA,QAAAC,QAEOwJ,EAAKvJ,QAAQyH,YAClBgD,EAAYc,iBAAiBC,WAC7Bf,EAAYc,iBAAiBM,aAAWrL,KAHtCqI,SAAAA,GAMNU,EAAKJ,WAAWnC,YAAY,CAC1BjI,KAAM,qBACN2M,aAAcjB,EAAYc,iBAAiBG,aAC3C7C,OAAQA,EACR8C,UAAU,GACT,EACL,EAASxI,SAAAA,GACPoG,EAAKf,QACH,uDACGrF,MAAAA,OAAAA,EAAAA,EAAagD,SAChB,CACEsF,eAAgBhB,EAAYc,iBAAiBC,YAGjDjC,EAAKJ,WAAWnC,YAAY,CAC1BjI,KAAM,qBACN2M,aAAcjB,EAAYc,iBAAiBG,aAC3C7C,OACE,kCAAoC1F,MAAAA,OAAAA,EAAAA,EAAagD,SACnDwF,UAAU,GAEd,UAAClK,GAAAA,EAAAjB,KAAAiB,EAAAjB,KAAAG,GAAAA,GAAA,CAAA,CAhCD4I,GAgCC,OAAAzH,GAAAA,EAAAtB,KAAAsB,EAAAtB,KAAAE,GAAAA,GA2BJ,EAAA,WAAA,OAAA8J,GAAAA,CAAA,GAEI,CAAA,WAAA,MAAA,OAAO,oBACNjB,EAAKG,wBAA0Be,EAAYqB,YAAYjB,WACzDtB,EAAKwC,oBAAoBtB,EAAYqB,YAAYE,eACjDzC,EAAKS,eAAiBS,EAAYqB,YAAYjB,SAC9CtB,EAAKe,wBACLf,EAAKc,WAAW,kBACjBG,IAEF,GAAA,CAAA,WAAA,MAEI,MAAM,EAAE,WAQb,OAPEjB,EAAKJ,WAAWnC,YAAY,CAC1BjI,KAAM,OACN8L,SAAWJ,EAA0BwB,WAAWpB,gBAC/CL,EAIL,EAAC,sBAMD,OAFEjB,EAAKvJ,QAAQsI,QAAQmC,QAAaD,EAEpC,EAAC,YAAAE,GAAAA,EAAAlK,KAAAkK,EAAAlK,KAEL,WAAA,SA/HE,CA+HF,aACE+I,EAAKf,QAAQ,6BAA8B,CAAE5E,MAAAA,GAE/C,GACF,CAAC,MAAAT,GAAA,OAAArD,QAAAsD,OAAAD,EAAA,CAAA,EAAA3D,KAEO0M,sBAAwB,SAACtI,GAC/B,IPrTgCuI,EAC5BvN,EO0TgB,cAAhB0K,EAAKM,QACPN,EAAKH,WAAWnC,YAAY,CAC1BoF,kBP7T4BD,EOqTRvI,EAAMyC,KAAK,GAQuBzH,OP5TtDA,EAAS,IAAIH,WAAW0N,GAEX/N,OAAOiO,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuB1N,MO+TtD,EAEQ6N,KAAAA,uBAAyB,SAAA/M,GAAG,IAAA2G,EAAI3G,EAAJ2G,KAChB,YAAdA,EAAKtH,MACPuK,EAAKe,WAAWhE,EAAKqG,SAAW,YAAc,WAElD,EAAClN,KAEOuM,oBAA6BY,SAAAA,GAAiB,IAMjD,OALHnD,EAAKH,OAAO7F,KAAKA,KAAKoE,MAAQ4B,EAAKO,OACnCP,EAAKH,OAAO/J,QAAQ4B,KAAKC,YAAY,CAAEpC,KAAM,qBAC7CyK,EAAKH,OAAO/J,QAAQ4B,KAAKC,YAAY,CACnCpC,KAAM,SACNH,OAAQX,EAAoB0O,KAC3B7M,QAAAC,SACL,CAAC,MAAAoD,GAAArD,OAAAA,QAAAsD,OAAAD,EAAA,CAAA,EAAA3D,KAEOsL,aAA0B,WAAA,IAavB,OAXTrB,EAAKY,WAAW,aAChBZ,EAAKJ,OAAO/J,QAAQ4B,KAAKC,YAAY,CAAEpC,KAAM,cAC7C0K,EAAKJ,OAAO7F,KAAKA,KAAKoJ,6BACpB,KACAnD,EAAKJ,OAAOjK,QAAQyN,YAAc,GAIpCC,WAAW,WACTrD,EAAKJ,OAAO7F,KAAKA,KAAKoE,MAAQ6B,EAAKM,OACnCN,EAAKJ,OAAO/J,QAAQ4B,KAAKC,YAAY,CAAEpC,KAAM,oBAC/C,EAAG,KAAMe,QAAAC,SACX,CAAC,MAAAoD,GAAArD,OAAAA,QAAAsD,OAAAD,EAEOqF,CAAAA,EAAAA,KAAAA,QAAU,SAACrC,EAAiB/G,GAClCmH,QAAQxD,MAAMoD,EAAS/G,GACvBkK,EAAKtJ,QAAQwI,QAAQrC,EAAS/G,EAChC,EAEQ2N,KAAAA,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAczO,OAChB,OAAO,EAMT,IADA,IAAIwL,EAAS,EACJrL,EAAI,EAAGA,EAAIsO,EAAczO,OAAQG,IACxCqL,GAAUiD,EAActO,GAAK,IAI/B,OAFAqL,GAAUiD,EAAczO,QAER,EAAI,EAAIwL,EAAS,EAAI,EAAIA,CAC3C,OAEOkD,MAAQ,WAAM,OAAA3D,EAAKH,WAAWpF,cAAc,EAAAvE,KAE5C0N,UAAY,SAAAC,GACjB7D,EAAKS,OADqBoD,EAANpD,MAEtB,EAACvK,KAEM4N,0BAA4B,WAKjC,aAJA9D,EAAKO,qBAALP,EAAKO,mBAAuB,IAAIpL,WAC9B6K,EAAKF,MAAM/J,SAASgO,oBAEtB/D,EAAKF,MAAM/J,SAASiO,qBAAqBhE,EAAKO,oBACvCP,EAAKO,kBACd,EAACrK,KAEM+N,2BAA6B,WAKlC,OAJwBC,MAAxBlE,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAIrL,WAC/B6K,EAAKD,OAAOhK,SAASgO,oBAEvB/D,EAAKD,OAAOhK,SAASiO,qBAAqBhE,EAAKQ,qBACxCR,EAAKQ,mBACd,EAEO2D,KAAAA,eAAiB,WACtB,OAAOnE,EAAKyD,gBAAgBzD,EAAK8D,4BACnC,EAAC5N,KAEMkO,gBAAkB,WACvB,OAAOpE,EAAKyD,gBAAgBzD,EAAKiE,6BACnC,OAEOI,aAAe,SAACC,GAChBtE,EAAKY,iBASVZ,EAAKH,WAAWnC,YAAY,CAC1BjI,KAAM,WACN8O,MAAOD,EAAO,OAAS,UACvB/C,SAAUvB,EAAKU,iBAEjBV,EAAKW,oBAAsBX,EAAKU,eAChCV,EAAKgB,yBAdH/D,QAAQC,KACuB,IAA7B8C,EAAKW,oBACD,8DACA,iFAYV,EAvTmBzK,KAAOQ,QAAPA,EACAR,KAAU2J,WAAVA,EACD3J,KAAK4J,MAALA,EACA5J,KAAM6J,OAANA,EAEhB7J,KAAKQ,QAAQqI,UAAU,CAAEtE,eAAgBoF,EAAWpF,iBAEpDvE,KAAK2J,WAAWrF,OAAOc,iBAAiB,UAAW,SAAAhB,GACjD0F,EAAKiB,QAAQ3G,EACf,GACApE,KAAK2J,WAAWrF,OAAOc,iBAAiB,QAAS,SAAAhB,GAC/C0F,EAAKc,aAAa,gBAClBd,EAAKd,QAAQ,eAAgB5E,EAC/B,GACApE,KAAK2J,WAAWrF,OAAOc,iBAAiB,QAAS,WAC/C0E,EAAKc,aAAa,gBAClBd,EAAKtJ,QAAQuI,cACf,GAEA/I,KAAK4J,MAAM9J,QAAQ4B,KAAK4M,UAAYtO,KAAK0M,sBACzC1M,KAAK6J,OAAO/J,QAAQ4B,KAAK4M,UAAYtO,KAAKiN,uBAC1CjN,KAAK4K,aAAa,YACpB,CAnCClB,OAmCAA,EAxEmB6E,sBAClB/N,GAGsB,IAEtB,IAAMgO,EAAWC,KACZzG,EACAY,EACApI,GAGLgO,EAAYrF,eAAe,CAAEiB,OAAQ,eACrCoE,EAAYpF,wBAAwB,CAAEsB,iBAAiB,IAEvD,IAAId,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAAvJ,QAAAC,QAAA+C,aAE7BhD,OAAAA,QAAAC,QACiB8D,EAAWpE,OAAOO,IAAQQ,KAAA,SAAA0N,GAAC,OAA9C/E,EAAU+E,EAAoCpO,QAAAC,QACtBD,QAAQqO,IAAI,CAClChP,EAAMM,OAAMwO,KACP9E,EAAWnF,YACdnE,CAAAA,8BAA+BG,EAAQH,iCAEzC0D,EAAO9D,OAAO0J,EAAWlF,iBACzBzD,KAAA,SAAA4N,GAEF,WAAWlF,EAAa8E,EAAa7E,EARpCC,EAAKgF,KAAE/E,EAAM+E,EAQd,GAAgE,IAClE,EAAC,SAAQrL,OAAOsL,EAAAC,EAEM,OADpBN,EAAYrF,eAAe,CAAEiB,OAAQ,iBAC3B,OAAVyE,EAAAlF,IAAAkF,EAAYnL,QAAQpD,QAAAC,QACT,OADSuO,EACdlF,QAAK,EAALkF,EAAOpL,SAAO1C,KAAA+N,WAAAA,IAAAA,EAAAzO,OAAAA,QAAAC,eAAAwO,EACdlF,UAAAkF,EAAQrL,SAAO1C,KAAA,WACrB,MAAMuC,CAAM,IACd,GACF,CAAC,MAAAI,GAAArD,OAAAA,QAAAsD,OAAAD,EAAA+F,CAAAA,EAAAA,CAAA,CAYD,+BA2TI,SACJnF,EACA6J,EACAzJ,GAEA,gBAFAA,IAAAA,EAlXuB,6BAoXhBqK,MAASrK,EAAkCJ,4BAAAA,cAA2B,CAC3E0K,OAAQ,OACRC,KAAM1I,KAAKC,UAAU,CACnB0I,SAAUf,EAAO,OAAS,YAE5BgB,QAAS,CACP,eAAgB,qBAGtB"}
|
package/dist/lib.modern.js
CHANGED
@@ -1,2 +1,2 @@
|
|
1
|
-
function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)({}).hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},t.apply(null,arguments)}function e(t){const e=new Uint8Array(t);return window.btoa(String.fromCharCode(...e))}function n(t){const e=window.atob(t),n=e.length,s=new Uint8Array(n);for(let t=0;t<n;t++)s[t]=e.charCodeAt(t);return s.buffer}const s=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(s);class a{static async create(t){let e=null,n=null;try{const s=navigator.mediaDevices.getSupportedConstraints().sampleRate;e=new window.AudioContext(s?{sampleRate:t}:{});const i=e.createAnalyser();s||await e.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await e.audioWorklet.addModule(o),n=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}}});const r=e.createMediaStreamSource(n),l=new AudioWorkletNode(e,"raw-audio-processor");return r.connect(i),i.connect(l),new a(e,i,l,n)}catch(t){var s,i;throw null==(s=n)||s.getTracks().forEach(t=>t.stop()),null==(i=e)||i.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=s}async close(){this.inputStream.getTracks().forEach(t=>t.stop()),await this.context.close()}}const i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(i);class l{static async create(t){let e=null;try{e=new AudioContext({sampleRate:t});const n=e.createAnalyser(),s=e.createGain();s.connect(n),n.connect(e.destination),await e.audioWorklet.addModule(r);const o=new AudioWorkletNode(e,"audio-concat-processor");return o.connect(s),new l(e,n,s,o)}catch(t){var n;throw null==(n=e)||n.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=s}async close(){await this.context.close()}}function c(t){return!!t.type}class u{static async create(t){let e=null;try{var n;const s=null!=(n=t.origin)?n:"wss://api.elevenlabs.io",o=t.signedUrl?t.signedUrl:s+"/v1/convai/conversation?agent_id="+t.agentId,a=["convai"];t.authorization&&a.push(`bearer.${t.authorization}`),e=new WebSocket(o,a);const i=await new Promise((n,s)=>{e.addEventListener("open",()=>{var n;const s={type:"conversation_initiation_client_data"};var o,a,i,r;t.overrides&&(s.conversation_config_override={agent:{prompt:null==(o=t.overrides.agent)?void 0:o.prompt,first_message:null==(a=t.overrides.agent)?void 0:a.firstMessage,language:null==(i=t.overrides.agent)?void 0:i.language},tts:{voice_id:null==(r=t.overrides.tts)?void 0:r.voiceId}}),t.customLlmExtraBody&&(s.custom_llm_extra_body=t.customLlmExtraBody),null==(n=e)||n.send(JSON.stringify(s))},{once:!0}),e.addEventListener("error",s),e.addEventListener("close",s),e.addEventListener("message",t=>{const e=JSON.parse(t.data);c(e)&&("conversation_initiation_metadata"===e.type?n(e.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),r=i.conversation_id,l=parseInt(i.agent_output_audio_format.replace("pcm_",""));return new u(e,r,l)}catch(t){var s;throw null==(s=e)||s.close(),t}}constructor(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}close(){this.socket.close()}sendMessage(t){this.socket.send(JSON.stringify(t))}}const h={clientTools:{}},d={onConnect:()=>{},onDebug:()=>{},onDisconnect:()=>{},onError:()=>{},onMessage:()=>{},onModeChange:()=>{},onStatusChange:()=>{}};class p{static async startSession(e){const n=t({},h,d,e);n.onStatusChange({status:"connecting"});let s=null,o=null,i=null;try{return s=await a.create(16e3),o=await u.create(e),i=await l.create(o.sampleRate),new p(n,o,s,i)}catch(t){var r,c,f;throw n.onStatusChange({status:"disconnected"}),null==(r=o)||r.close(),await(null==(c=s)?void 0:c.close()),await(null==(f=i)?void 0:f.close()),t}}constructor(t,s,o,a){var i=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=async function(){"connected"===i.status&&(i.updateStatus("disconnecting"),i.connection.close(),await i.input.close(),await i.output.close(),i.updateStatus("disconnected"))},this.updateMode=t=>{t!==this.mode&&(this.mode=t,this.options.onModeChange({mode:t}))},this.updateStatus=t=>{t!==this.status&&(this.status=t,this.options.onStatusChange({status:t}))},this.onEvent=async function(t){try{const n=JSON.parse(t.data);if(!c(n))return;switch(n.type){case"interruption":n.interruption_event&&(i.lastInterruptTimestamp=n.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:n.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response});break;case"client_tool_call":if(i.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){try{var e;const t=null!=(e=await i.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters))?e:"Client tool execution successful.";i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:t,is_error:!1})}catch(t){i.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})}break}if(i.options.onUnhandledClientToolCall){i.options.onUnhandledClientToolCall(n.client_tool_call);break}i.onError(`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,is_error:!0});break;case"audio":i.lastInterruptTimestamp<=n.audio_event.event_id&&(i.addAudioBase64Chunk(n.audio_event.audio_base_64),i.updateMode("speaking"));break;case"ping":i.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id});break;default:i.options.onDebug(n)}}catch(e){return void i.onError("Failed to parse event data",{event:t})}},this.onInputWorkletMessage=t=>{"connected"===this.status&&this.connection.sendMessage({user_audio_chunk:e(t.data[0].buffer)})},this.onOutputWorkletMessage=({data:t})=>{"process"===t.type&&this.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(t){i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"}),i.output.worklet.port.postMessage({type:"buffer",buffer:n(t)})},this.fadeOutAudio=async function(){i.updateMode("listening"),i.output.worklet.port.postMessage({type:"interrupt"}),i.output.gain.gain.exponentialRampToValueAtTime(1e-4,i.output.context.currentTime+2),setTimeout(()=>{i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(t,e)=>{console.error(t,e),this.options.onError(t,e)},this.calculateVolume=t=>{if(0===t.length)return 0;let e=0;for(let n=0;n<t.length;n++)e+=t[n]/255;return e/=t.length,e<0?0:e>1?1:e},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:t})=>{this.volume=t},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.options=t,this.connection=s,this.input=o,this.output=a,this.options.onConnect({conversationId:s.conversationId}),this.connection.socket.addEventListener("message",t=>{this.onEvent(t)}),this.connection.socket.addEventListener("error",t=>{this.updateStatus("disconnected"),this.onError("Socket error",t)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}export{p as Conversation};
|
1
|
+
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)({}).hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},e.apply(null,arguments)}function t(e){const t=new Uint8Array(e);return window.btoa(String.fromCharCode(...t))}function n(e){const t=window.atob(e),n=t.length,a=new Uint8Array(n);for(let e=0;e<n;e++)a[e]=t.charCodeAt(e);return a.buffer}const a=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(a);class o{static async create({sampleRate:e,format:t,preferHeadphonesForIosDevices:n}){let a=null,i=null;try{const r={sampleRate:{ideal:e},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}},l=await navigator.mediaDevices.getUserMedia({audio:!0});if(null==l||l.getTracks().forEach(e=>e.stop()),(["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document)&&n){const e=(await window.navigator.mediaDevices.enumerateDevices()).find(e=>"audioinput"===e.kind&&["airpod","headphone","earphone"].find(t=>e.label.toLowerCase().includes(t)));e&&(r.deviceId={ideal:e.deviceId})}const c=navigator.mediaDevices.getSupportedConstraints().sampleRate;a=new window.AudioContext(c?{sampleRate:e}:{});const u=a.createAnalyser();c||await a.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await a.audioWorklet.addModule(s),i=await navigator.mediaDevices.getUserMedia({audio:r});const d=a.createMediaStreamSource(i),p=new AudioWorkletNode(a,"raw-audio-processor");return p.port.postMessage({type:"setFormat",format:t,sampleRate:e}),d.connect(u),u.connect(p),new o(a,u,p,i)}catch(e){var r,l;throw null==(r=i)||r.getTracks().forEach(e=>e.stop()),null==(l=a)||l.close(),e}}constructor(e,t,n,a){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=a}async close(){this.inputStream.getTracks().forEach(e=>e.stop()),await this.context.close()}}const i=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(i);class l{static async create({sampleRate:e,format:t}){let n=null;try{n=new AudioContext({sampleRate:e});const a=n.createAnalyser(),s=n.createGain();s.connect(a),a.connect(n.destination),await n.audioWorklet.addModule(r);const o=new AudioWorkletNode(n,"audio-concat-processor");return o.port.postMessage({type:"setFormat",format:t}),o.connect(s),new l(n,a,s,o)}catch(e){var a;throw null==(a=n)||a.close(),e}}constructor(e,t,n,a){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=a}async close(){await this.context.close()}}function c(e){return!!e.type}class u{static async create(e){let t=null;try{var n;const a=null!=(n=e.origin)?n:"wss://api.elevenlabs.io",s=e.signedUrl?e.signedUrl:a+"/v1/convai/conversation?agent_id="+e.agentId,o=["convai"];e.authorization&&o.push(`bearer.${e.authorization}`),t=new WebSocket(s,o);const i=await new Promise((n,a)=>{t.addEventListener("open",()=>{var n;const a={type:"conversation_initiation_client_data"};var s,o,i,r;e.overrides&&(a.conversation_config_override={agent:{prompt:null==(s=e.overrides.agent)?void 0:s.prompt,first_message:null==(o=e.overrides.agent)?void 0:o.firstMessage,language:null==(i=e.overrides.agent)?void 0:i.language},tts:{voice_id:null==(r=e.overrides.tts)?void 0:r.voiceId}}),e.customLlmExtraBody&&(a.custom_llm_extra_body=e.customLlmExtraBody),null==(n=t)||n.send(JSON.stringify(a))},{once:!0}),t.addEventListener("error",a),t.addEventListener("close",a),t.addEventListener("message",e=>{const t=JSON.parse(e.data);c(t)&&("conversation_initiation_metadata"===t.type?n(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),{conversation_id:r,agent_output_audio_format:l,user_input_audio_format:p}=i,h=d(null!=p?p:"pcm_16000"),m=d(l);return new u(t,r,h,m)}catch(e){var a;throw null==(a=t)||a.close(),e}}constructor(e,t,n,a){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=t,this.inputFormat=n,this.outputFormat=a}close(){this.socket.close()}sendMessage(e){this.socket.send(JSON.stringify(e))}}function d(e){const[t,n]=e.split("_");if(!["pcm","ulaw"].includes(t))throw new Error(`Invalid format: ${e}`);const a=parseInt(n);if(isNaN(a))throw new Error(`Invalid sample rate: ${n}`);return{format:t,sampleRate:a}}const p={clientTools:{}},h={onConnect:()=>{},onDebug:()=>{},onDisconnect:()=>{},onError:()=>{},onMessage:()=>{},onModeChange:()=>{},onStatusChange:()=>{},onCanSendFeedbackChange:()=>{}};class m{static async startSession(t){const n=e({},p,h,t);n.onStatusChange({status:"connecting"}),n.onCanSendFeedbackChange({canSendFeedback:!1});let a=null,s=null,i=null;try{return s=await u.create(t),[a,i]=await Promise.all([o.create(e({},s.inputFormat,{preferHeadphonesForIosDevices:t.preferHeadphonesForIosDevices})),l.create(s.outputFormat)]),new m(n,s,a,i)}catch(e){var r,c,d;throw n.onStatusChange({status:"disconnected"}),null==(r=s)||r.close(),await(null==(c=a)?void 0:c.close()),await(null==(d=i)?void 0:d.close()),e}}constructor(e,a,s,o){var i=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=async function(){"connected"===i.status&&(i.updateStatus("disconnecting"),i.connection.close(),await i.input.close(),await i.output.close(),i.updateStatus("disconnected"))},this.updateMode=e=>{e!==this.mode&&(this.mode=e,this.options.onModeChange({mode:e}))},this.updateStatus=e=>{e!==this.status&&(this.status=e,this.options.onStatusChange({status:e}))},this.updateCanSendFeedback=()=>{const e=this.currentEventId!==this.lastFeedbackEventId;this.canSendFeedback!==e&&(this.canSendFeedback=e,this.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=async function(e){try{const n=JSON.parse(e.data);if(!c(n))return;switch(n.type){case"interruption":n.interruption_event&&(i.lastInterruptTimestamp=n.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:n.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response});break;case"client_tool_call":if(i.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){try{var t;const e=null!=(t=await i.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters))?t:"Client tool execution successful.";i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:e,is_error:!1})}catch(e){i.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})}break}if(i.options.onUnhandledClientToolCall){i.options.onUnhandledClientToolCall(n.client_tool_call);break}i.onError(`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,is_error:!0});break;case"audio":i.lastInterruptTimestamp<=n.audio_event.event_id&&(i.addAudioBase64Chunk(n.audio_event.audio_base_64),i.currentEventId=n.audio_event.event_id,i.updateCanSendFeedback(),i.updateMode("speaking"));break;case"ping":i.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id});break;default:i.options.onDebug(n)}}catch(t){return void i.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=e=>{"connected"===this.status&&this.connection.sendMessage({user_audio_chunk:t(e.data[0].buffer)})},this.onOutputWorkletMessage=({data:e})=>{"process"===e.type&&this.updateMode(e.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(e){i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"}),i.output.worklet.port.postMessage({type:"buffer",buffer:n(e)})},this.fadeOutAudio=async function(){i.updateMode("listening"),i.output.worklet.port.postMessage({type:"interrupt"}),i.output.gain.gain.exponentialRampToValueAtTime(1e-4,i.output.context.currentTime+2),setTimeout(()=>{i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(e,t)=>{console.error(e,t),this.options.onError(e,t)},this.calculateVolume=e=>{if(0===e.length)return 0;let t=0;for(let n=0;n<e.length;n++)t+=e[n]/255;return t/=e.length,t<0?0:t>1?1:t},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:e})=>{this.volume=e},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.sendFeedback=e=>{this.canSendFeedback?(this.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:this.currentEventId}),this.lastFeedbackEventId=this.currentEventId,this.updateCanSendFeedback()):console.warn(0===this.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=a,this.input=s,this.output=o,this.options.onConnect({conversationId:a.conversationId}),this.connection.socket.addEventListener("message",e=>{this.onEvent(e)}),this.connection.socket.addEventListener("error",e=>{this.updateStatus("disconnected"),this.onError("Socket error",e)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}function f(e,t,n="https://api.elevenlabs.io"){return fetch(`${n}/v1/convai/conversations/${e}/feedback`,{method:"POST",body:JSON.stringify({feedback:t?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}export{m as Conversation,f as postOverallFeedback};
|
2
2
|
//# sourceMappingURL=lib.modern.js.map
|