@11labs/client 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/README.md +90 -14
  2. package/dist/BaseConversation.d.ts +76 -0
  3. package/dist/TextConversation.d.ts +4 -0
  4. package/dist/VoiceConversation.d.ts +27 -0
  5. package/dist/dist/BaseConversation.d.ts +80 -0
  6. package/dist/dist/TextConversation.d.ts +4 -0
  7. package/dist/dist/VoiceConversation.d.ts +27 -0
  8. package/dist/dist/index.d.ts +12 -0
  9. package/dist/dist/lib.cjs +2 -0
  10. package/dist/dist/lib.cjs.map +1 -0
  11. package/dist/dist/lib.modern.js +2 -0
  12. package/dist/dist/lib.modern.js.map +1 -0
  13. package/dist/dist/lib.module.js +2 -0
  14. package/dist/dist/lib.module.js.map +1 -0
  15. package/dist/dist/lib.umd.js +2 -0
  16. package/dist/dist/lib.umd.js.map +1 -0
  17. package/dist/dist/utils/BaseConnection.d.ts +99 -0
  18. package/dist/dist/utils/ConnectionFactory.d.ts +2 -0
  19. package/dist/dist/utils/WebRTCConnection.d.ts +23 -0
  20. package/dist/dist/utils/WebSocketConnection.d.ts +13 -0
  21. package/dist/dist/utils/applyDelay.d.ts +2 -0
  22. package/dist/dist/utils/audio.d.ts +2 -0
  23. package/dist/dist/utils/audioConcatProcessor.d.ts +1 -0
  24. package/dist/dist/utils/compatibility.d.ts +2 -0
  25. package/dist/dist/utils/createWorkletModuleLoader.d.ts +1 -0
  26. package/dist/dist/utils/events.d.ts +125 -0
  27. package/dist/dist/utils/input.d.ts +14 -0
  28. package/dist/dist/utils/output.d.ts +10 -0
  29. package/dist/dist/utils/overrides.d.ts +4 -0
  30. package/dist/dist/utils/postOverallFeedback.d.ts +1 -0
  31. package/dist/dist/utils/rawAudioProcessor.d.ts +1 -0
  32. package/dist/dist/version.d.ts +1 -0
  33. package/dist/index.d.ts +9 -79
  34. package/dist/lib.cjs +1 -1
  35. package/dist/lib.cjs.map +1 -1
  36. package/dist/lib.modern.js +1 -1
  37. package/dist/lib.modern.js.map +1 -1
  38. package/dist/lib.module.js +1 -1
  39. package/dist/lib.module.js.map +1 -1
  40. package/dist/lib.umd.js +1 -1
  41. package/dist/lib.umd.js.map +1 -1
  42. package/dist/utils/BaseConnection.d.ts +94 -0
  43. package/dist/utils/ConnectionFactory.d.ts +2 -0
  44. package/dist/utils/WebRTCConnection.d.ts +19 -0
  45. package/dist/utils/WebSocketConnection.d.ts +12 -0
  46. package/dist/utils/applyDelay.d.ts +2 -0
  47. package/dist/utils/events.d.ts +13 -3
  48. package/dist/utils/input.d.ts +1 -1
  49. package/dist/utils/output.d.ts +1 -1
  50. package/dist/utils/overrides.d.ts +4 -0
  51. package/dist/utils/postOverallFeedback.d.ts +1 -0
  52. package/dist/version.d.ts +1 -0
  53. package/package.json +17 -12
  54. package/LICENSE +0 -21
  55. package/dist/utils/connection.d.ts +0 -65
package/README.md CHANGED
@@ -13,11 +13,11 @@ An SDK library for using ElevenLabs in browser based applications. If you're loo
13
13
  Install the package in your project through package manager.
14
14
 
15
15
  ```shell
16
- npm install @11labs/client
16
+ npm install @elevenlabs/client
17
17
  # or
18
- yarn add @11labs/client
18
+ yarn add @elevenlabs/client
19
19
  # or
20
- pnpm install @11labs/client
20
+ pnpm install @elevenlabs/client
21
21
  ```
22
22
 
23
23
  ## Usage
@@ -26,6 +26,10 @@ This library is primarily meant for development in vanilla JavaScript projects,
26
26
  It is recommended to check whether your specific framework has it's own library.
27
27
  However, you can use this library in any JavaScript-based project.
28
28
 
29
+ ### Connection types
30
+
31
+ A conversation can be started via one of two connection types: WebSockets (the default) or WebRTC.
32
+
29
33
  ### Initialize conversation
30
34
 
31
35
  First, initialize the Conversation instance:
@@ -48,38 +52,38 @@ try {
48
52
 
49
53
  #### Session configuration
50
54
 
51
- The options passed to `startSession` specifiy how the session is established. There are two ways to start a session:
55
+ The options passed to `startSession` specifiy how the session is established. There are three ways to start a session:
52
56
 
53
- ##### Using Agent ID
57
+ ##### Public agents
58
+
59
+ Agents that don't require any authentication can be used to start a conversation by using the agent ID and the connection type. The agent ID can be acquired through the [ElevenLabs UI](https://elevenlabs.io/app/conversational-ai).
54
60
 
55
- Agent ID can be acquired through [ElevenLabs UI](https://elevenlabs.io/app/conversational-ai).
56
61
  For public agents, you can use the ID directly:
57
62
 
58
63
  ```js
59
64
  const conversation = await Conversation.startSession({
60
65
  agentId: "<your-agent-id>",
66
+ connectionType: "webrtc", // 'websocket' is also accepted
61
67
  });
62
68
  ```
63
69
 
64
- ##### Using a signed URL
70
+ ##### Private agents
65
71
 
66
- If the conversation requires authorization, you will need to add a dedicated endpoint to your server that
67
- will request a signed url using the [ElevenLabs API](https://elevenlabs.io/docs/introduction) and pass it back to the client.
72
+ If the conversation requires authorization, you will need to add a dedicated endpoint to your server that will either request a signed url (if using the WebSockets connection type) or a conversation token (if using WebRTC) using the [ElevenLabs API](https://elevenlabs.io/docs/introduction) and pass it back to the client.
68
73
 
69
- Here's an example of how it could be set up:
74
+ Here's an example for a WebSocket connection:
70
75
 
71
76
  ```js
72
77
  // Node.js server
73
78
 
74
79
  app.get("/signed-url", yourAuthMiddleware, async (req, res) => {
75
80
  const response = await fetch(
76
- `https://api.elevenlabs.io/v1/convai/conversation/get_signed_url?agent_id=${process.env.AGENT_ID}`,
81
+ `https://api.elevenlabs.io/v1/convai/conversation/get-signed-url?agent_id=${process.env.AGENT_ID}`,
77
82
  {
78
- method: "GET",
79
83
  headers: {
80
84
  // Requesting a signed url requires your ElevenLabs API key
81
85
  // Do NOT expose your API key to the client!
82
- "xi-api-key": process.env.XI_API_KEY,
86
+ "xi-api-key": process.env.ELEVENLABS_API_KEY,
83
87
  },
84
88
  }
85
89
  );
@@ -99,7 +103,50 @@ app.get("/signed-url", yourAuthMiddleware, async (req, res) => {
99
103
  const response = await fetch("/signed-url", yourAuthHeaders);
100
104
  const signedUrl = await response.text();
101
105
 
102
- const conversation = await Conversation.startSession({ signedUrl });
106
+ const conversation = await Conversation.startSession({
107
+ signedUrl,
108
+ connectionType: "websocket",
109
+ });
110
+ ```
111
+
112
+ Here's an example for WebRTC:
113
+
114
+ ```js
115
+ // Node.js server
116
+
117
+ app.get("/conversation-token", yourAuthMiddleware, async (req, res) => {
118
+ const response = await fetch(
119
+ `https://api.elevenlabs.io/v1/convai/conversation/token?agent_id=${process.env.AGENT_ID}`,
120
+ {
121
+ headers: {
122
+ // Requesting a conversation token requires your ElevenLabs API key
123
+ // Do NOT expose your API key to the client!
124
+ 'xi-api-key': process.env.ELEVENLABS_API_KEY,
125
+ }
126
+ }
127
+ );
128
+
129
+ if (!response.ok) {
130
+ return res.status(500).send("Failed to get conversation token");
131
+ }
132
+
133
+ const body = await response.json();
134
+ res.send(body.token);
135
+ );
136
+ ```
137
+
138
+ Once you have the token, providing it to `startSession` will initiate the conversation using WebRTC.
139
+
140
+ ```js
141
+ // Client
142
+
143
+ const response = await fetch("/conversation-token", yourAuthHeaders);
144
+ const conversationToken = await response.text();
145
+
146
+ const conversation = await Conversation.startSession({
147
+ conversationToken,
148
+ connectionType: "webrtc",
149
+ });
103
150
  ```
104
151
 
105
152
  #### Optional callbacks
@@ -155,10 +202,39 @@ const conversation = await Conversation.startSession({
155
202
  tts: {
156
203
  voiceId: "custom voice id",
157
204
  },
205
+ conversation: {
206
+ textOnly: true,
207
+ },
158
208
  },
159
209
  });
160
210
  ```
161
211
 
212
+ #### User identification
213
+
214
+ You can optionally pass a user ID to identify the user in the conversation. This can be your own customer identifier. This will be included in the conversation initiation data sent to the server:
215
+
216
+ Tracking this ID can be helpful for filtering conversations, tracking analytics on a user level, etc.
217
+
218
+ ```ts
219
+ const conversation = await Conversation.startSession({
220
+ agentId: "<your-agent-id>",
221
+ userId: "user-123", // Optional user identifier
222
+ connectionType: "webrtc",
223
+ });
224
+ ```
225
+
226
+ #### Text only
227
+
228
+ If your agent is configured to run in text-only mode, i.e. it does not send or receive audio messages,
229
+ you can use this flag to use a lighter version of the conversation. In that case, the
230
+ user will not be asked for microphone permissions and no audio context will be created.
231
+
232
+ ```ts
233
+ const conversation = await Conversation.startSession({
234
+ textOnly: true,
235
+ });
236
+ ```
237
+
162
238
  #### Prefer Headphones for iOS Devices
163
239
 
164
240
  While this SDK leaves the choice of audio input/output device to the browser/system, iOS Safari seem to prefer the built-in speaker over headphones even when bluetooth device is in use. If you want to "force" the use of headphones on iOS devices when available, you can use the following option. Please, keep in mind that this is not guaranteed, since this functionality is not provided by the browser. System audio should be the default choice.
@@ -0,0 +1,76 @@
1
+ import type { BaseConnection, OnDisconnectCallback, SessionConfig } from "./utils/BaseConnection";
2
+ import type { AgentAudioEvent, AgentResponseEvent, ClientToolCallEvent, InternalTentativeAgentResponseEvent, InterruptionEvent, UserTranscriptionEvent } from "./utils/events";
3
+ import type { InputConfig } from "./utils/input";
4
+ export type Role = "user" | "ai";
5
+ export type Mode = "speaking" | "listening";
6
+ export type Status = "connecting" | "connected" | "disconnecting" | "disconnected";
7
+ export type Options = SessionConfig & Callbacks & ClientToolsConfig & InputConfig;
8
+ export type PartialOptions = SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig> & Partial<InputConfig>;
9
+ export type ClientToolsConfig = {
10
+ clientTools: Record<string, (parameters: any) => Promise<string | number | void> | string | number | void>;
11
+ };
12
+ export type Callbacks = {
13
+ onConnect: (props: {
14
+ conversationId: string;
15
+ }) => void;
16
+ onDebug: (props: any) => void;
17
+ onDisconnect: OnDisconnectCallback;
18
+ onError: (message: string, context?: any) => void;
19
+ onMessage: (props: {
20
+ message: string;
21
+ source: Role;
22
+ }) => void;
23
+ onAudio: (base64Audio: string) => void;
24
+ onModeChange: (prop: {
25
+ mode: Mode;
26
+ }) => void;
27
+ onStatusChange: (prop: {
28
+ status: Status;
29
+ }) => void;
30
+ onCanSendFeedbackChange: (prop: {
31
+ canSendFeedback: boolean;
32
+ }) => void;
33
+ onUnhandledClientToolCall?: (params: ClientToolCallEvent["client_tool_call"]) => void;
34
+ };
35
+ export declare class BaseConversation {
36
+ protected readonly options: Options;
37
+ protected readonly connection: BaseConnection;
38
+ protected lastInterruptTimestamp: number;
39
+ protected mode: Mode;
40
+ protected status: Status;
41
+ protected volume: number;
42
+ protected currentEventId: number;
43
+ protected lastFeedbackEventId: number;
44
+ protected canSendFeedback: boolean;
45
+ protected static getFullOptions(partialOptions: PartialOptions): Options;
46
+ protected constructor(options: Options, connection: BaseConnection);
47
+ endSession(): Promise<void>;
48
+ private endSessionWithDetails;
49
+ protected handleEndSession(): Promise<void>;
50
+ protected updateMode(mode: Mode): void;
51
+ protected updateStatus(status: Status): void;
52
+ protected updateCanSendFeedback(): void;
53
+ protected handleInterruption(event: InterruptionEvent): void;
54
+ protected handleAgentResponse(event: AgentResponseEvent): void;
55
+ protected handleUserTranscript(event: UserTranscriptionEvent): void;
56
+ protected handleTentativeAgentResponse(event: InternalTentativeAgentResponseEvent): void;
57
+ protected handleClientToolCall(event: ClientToolCallEvent): Promise<void>;
58
+ protected handleAudio(event: AgentAudioEvent): void;
59
+ private onMessage;
60
+ private onError;
61
+ getId(): string;
62
+ isOpen(): boolean;
63
+ setVolume: ({ volume }: {
64
+ volume: number;
65
+ }) => void;
66
+ setMicMuted(isMuted: boolean): void;
67
+ getInputByteFrequencyData(): Uint8Array;
68
+ getOutputByteFrequencyData(): Uint8Array;
69
+ getInputVolume(): number;
70
+ getOutputVolume(): number;
71
+ sendFeedback(like: boolean): void;
72
+ sendContextualUpdate(text: string): void;
73
+ sendUserMessage(text: string): void;
74
+ sendUserActivity(): void;
75
+ sendMCPToolApprovalResult(toolCallId: string, isApproved: boolean): void;
76
+ }
@@ -0,0 +1,4 @@
1
+ import { BaseConversation, type PartialOptions } from "./BaseConversation";
2
+ export declare class TextConversation extends BaseConversation {
3
+ static startSession(options: PartialOptions): Promise<TextConversation>;
4
+ }
@@ -0,0 +1,27 @@
1
+ import { Input } from "./utils/input";
2
+ import { Output } from "./utils/output";
3
+ import type { BaseConnection } from "./utils/BaseConnection";
4
+ import type { AgentAudioEvent, InterruptionEvent } from "./utils/events";
5
+ import { BaseConversation, type Options, type PartialOptions } from "./BaseConversation";
6
+ export declare class VoiceConversation extends BaseConversation {
7
+ readonly input: Input;
8
+ readonly output: Output;
9
+ wakeLock: WakeLockSentinel | null;
10
+ static startSession(options: PartialOptions): Promise<VoiceConversation>;
11
+ private inputFrequencyData?;
12
+ private outputFrequencyData?;
13
+ protected constructor(options: Options, connection: BaseConnection, input: Input, output: Output, wakeLock: WakeLockSentinel | null);
14
+ protected handleEndSession(): Promise<void>;
15
+ protected handleInterruption(event: InterruptionEvent): void;
16
+ protected handleAudio(event: AgentAudioEvent): void;
17
+ private onInputWorkletMessage;
18
+ private onOutputWorkletMessage;
19
+ private addAudioBase64Chunk;
20
+ private fadeOutAudio;
21
+ private calculateVolume;
22
+ setMicMuted(isMuted: boolean): void;
23
+ getInputByteFrequencyData(): Uint8Array;
24
+ getOutputByteFrequencyData(): Uint8Array;
25
+ getInputVolume(): number;
26
+ getOutputVolume(): number;
27
+ }
@@ -0,0 +1,80 @@
1
+ import type { BaseConnection, OnDisconnectCallback, SessionConfig } from "./utils/BaseConnection";
2
+ import type { AgentAudioEvent, AgentResponseEvent, ClientToolCallEvent, InternalTentativeAgentResponseEvent, InterruptionEvent, UserTranscriptionEvent, VadScoreEvent } from "./utils/events";
3
+ import type { InputConfig } from "./utils/input";
4
+ export type Role = "user" | "ai";
5
+ export type Mode = "speaking" | "listening";
6
+ export type Status = "connecting" | "connected" | "disconnecting" | "disconnected";
7
+ export type Options = SessionConfig & Callbacks & ClientToolsConfig & InputConfig;
8
+ export type PartialOptions = SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig> & Partial<InputConfig>;
9
+ export type ClientToolsConfig = {
10
+ clientTools: Record<string, (parameters: any) => Promise<string | number | void> | string | number | void>;
11
+ };
12
+ export type Callbacks = {
13
+ onConnect: (props: {
14
+ conversationId: string;
15
+ }) => void;
16
+ onDebug: (props: any) => void;
17
+ onDisconnect: OnDisconnectCallback;
18
+ onError: (message: string, context?: any) => void;
19
+ onMessage: (props: {
20
+ message: string;
21
+ source: Role;
22
+ }) => void;
23
+ onAudio?: (base64Audio: string) => void;
24
+ onModeChange: (prop: {
25
+ mode: Mode;
26
+ }) => void;
27
+ onStatusChange: (prop: {
28
+ status: Status;
29
+ }) => void;
30
+ onCanSendFeedbackChange: (prop: {
31
+ canSendFeedback: boolean;
32
+ }) => void;
33
+ onUnhandledClientToolCall?: (params: ClientToolCallEvent["client_tool_call"]) => void;
34
+ onVadScore?: (props: {
35
+ vadScore: number;
36
+ }) => void;
37
+ };
38
+ export declare class BaseConversation {
39
+ protected readonly options: Options;
40
+ protected readonly connection: BaseConnection;
41
+ protected lastInterruptTimestamp: number;
42
+ protected mode: Mode;
43
+ protected status: Status;
44
+ protected volume: number;
45
+ protected currentEventId: number;
46
+ protected lastFeedbackEventId: number;
47
+ protected canSendFeedback: boolean;
48
+ protected static getFullOptions(partialOptions: PartialOptions): Options;
49
+ protected constructor(options: Options, connection: BaseConnection);
50
+ endSession(): Promise<void>;
51
+ private endSessionWithDetails;
52
+ protected handleEndSession(): Promise<void>;
53
+ protected updateMode(mode: Mode): void;
54
+ protected updateStatus(status: Status): void;
55
+ protected updateCanSendFeedback(): void;
56
+ protected handleInterruption(event: InterruptionEvent): void;
57
+ protected handleAgentResponse(event: AgentResponseEvent): void;
58
+ protected handleUserTranscript(event: UserTranscriptionEvent): void;
59
+ protected handleTentativeAgentResponse(event: InternalTentativeAgentResponseEvent): void;
60
+ protected handleVadScore(event: VadScoreEvent): void;
61
+ protected handleClientToolCall(event: ClientToolCallEvent): Promise<void>;
62
+ protected handleAudio(event: AgentAudioEvent): void;
63
+ private onMessage;
64
+ private onError;
65
+ getId(): string;
66
+ isOpen(): boolean;
67
+ setVolume: ({ volume }: {
68
+ volume: number;
69
+ }) => void;
70
+ setMicMuted(isMuted: boolean): void;
71
+ getInputByteFrequencyData(): Uint8Array<ArrayBuffer>;
72
+ getOutputByteFrequencyData(): Uint8Array<ArrayBuffer>;
73
+ getInputVolume(): number;
74
+ getOutputVolume(): number;
75
+ sendFeedback(like: boolean): void;
76
+ sendContextualUpdate(text: string): void;
77
+ sendUserMessage(text: string): void;
78
+ sendUserActivity(): void;
79
+ sendMCPToolApprovalResult(toolCallId: string, isApproved: boolean): void;
80
+ }
@@ -0,0 +1,4 @@
1
+ import { BaseConversation, type PartialOptions } from "./BaseConversation";
2
+ export declare class TextConversation extends BaseConversation {
3
+ static startSession(options: PartialOptions): Promise<TextConversation>;
4
+ }
@@ -0,0 +1,27 @@
1
+ import { Input } from "./utils/input";
2
+ import { Output } from "./utils/output";
3
+ import type { BaseConnection } from "./utils/BaseConnection";
4
+ import type { AgentAudioEvent, InterruptionEvent } from "./utils/events";
5
+ import { BaseConversation, type Options, type PartialOptions } from "./BaseConversation";
6
+ export declare class VoiceConversation extends BaseConversation {
7
+ readonly input: Input;
8
+ readonly output: Output;
9
+ wakeLock: WakeLockSentinel | null;
10
+ static startSession(options: PartialOptions): Promise<VoiceConversation>;
11
+ private inputFrequencyData?;
12
+ private outputFrequencyData?;
13
+ protected constructor(options: Options, connection: BaseConnection, input: Input, output: Output, wakeLock: WakeLockSentinel | null);
14
+ protected handleEndSession(): Promise<void>;
15
+ protected handleInterruption(event: InterruptionEvent): void;
16
+ protected handleAudio(event: AgentAudioEvent): void;
17
+ private onInputWorkletMessage;
18
+ private onOutputWorkletMessage;
19
+ private addAudioBase64Chunk;
20
+ private fadeOutAudio;
21
+ private calculateVolume;
22
+ setMicMuted(isMuted: boolean): void;
23
+ getInputByteFrequencyData(): Uint8Array<ArrayBufferLike>;
24
+ getOutputByteFrequencyData(): Uint8Array<ArrayBufferLike>;
25
+ getInputVolume(): number;
26
+ getOutputVolume(): number;
27
+ }
@@ -0,0 +1,12 @@
1
+ import { BaseConversation, type PartialOptions } from "./BaseConversation";
2
+ export type { Mode, Role, Options, PartialOptions, ClientToolsConfig, Callbacks, Status, } from "./BaseConversation";
3
+ export type { InputConfig } from "./utils/input";
4
+ export type { IncomingSocketEvent, VadScoreEvent } from "./utils/events";
5
+ export type { SessionConfig, BaseSessionConfig, DisconnectionDetails, Language, ConnectionType, } from "./utils/BaseConnection";
6
+ export { createConnection } from "./utils/ConnectionFactory";
7
+ export { WebSocketConnection } from "./utils/WebSocketConnection";
8
+ export { WebRTCConnection } from "./utils/WebRTCConnection";
9
+ export { postOverallFeedback } from "./utils/postOverallFeedback";
10
+ export declare class Conversation extends BaseConversation {
11
+ static startSession(options: PartialOptions): Promise<Conversation>;
12
+ }
@@ -0,0 +1,2 @@
1
+ var e=require("livekit-client");function n(){return n=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var o in t)({}).hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e},n.apply(null,arguments)}function t(e,n){e.prototype=Object.create(n.prototype),e.prototype.constructor=e,o(e,n)}function o(e,n){return o=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(e,n){return e.__proto__=n,e},o(e,n)}var r=new Uint8Array(0),i=/*#__PURE__*/function(){function e(e,n){var t=this,o=this,r=this;this.options=void 0,this.connection=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=0,this.canSendFeedback=!1,this.endSessionWithDetails=function(e){try{return"connected"!==o.status&&"connecting"!==o.status?Promise.resolve():(o.updateStatus("disconnecting"),Promise.resolve(o.handleEndSession()).then(function(){o.updateStatus("disconnected"),o.options.onDisconnect(e)}))}catch(e){return Promise.reject(e)}},this.onMessage=function(e){try{switch(e.type){case"interruption":return r.handleInterruption(e),Promise.resolve();case"agent_response":return r.handleAgentResponse(e),Promise.resolve();case"user_transcript":return r.handleUserTranscript(e),Promise.resolve();case"internal_tentative_agent_response":return r.handleTentativeAgentResponse(e),Promise.resolve();case"client_tool_call":return Promise.resolve(r.handleClientToolCall(e)).then(function(){});case"audio":return r.handleAudio(e),Promise.resolve();case"vad_score":return r.handleVadScore(e),Promise.resolve();case"ping":return r.connection.sendMessage({type:"pong",event_id:e.ping_event.event_id}),Promise.resolve();default:return r.options.onDebug(e),Promise.resolve()}}catch(e){return Promise.reject(e)}},this.setVolume=function(e){t.volume=e.volume},this.options=e,this.connection=n,this.options.onConnect({conversationId:n.conversationId}),this.connection.onMessage(this.onMessage),this.connection.onDisconnect(this.endSessionWithDetails),this.connection.onModeChange(function(e){return t.updateMode(e)}),this.updateStatus("connected")}e.getFullOptions=function(e){return n({clientTools:{},onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onAudio:function(){},onModeChange:function(){},onStatusChange:function(){},onCanSendFeedbackChange:function(){}},e)};var t=e.prototype;return t.endSession=function(){return this.endSessionWithDetails({reason:"user"})},t.handleEndSession=function(){try{return this.connection.close(),Promise.resolve()}catch(e){return Promise.reject(e)}},t.updateMode=function(e){e!==this.mode&&(this.mode=e,this.options.onModeChange({mode:e}))},t.updateStatus=function(e){e!==this.status&&(this.status=e,this.options.onStatusChange({status:e}))},t.updateCanSendFeedback=function(){var e=this.currentEventId!==this.lastFeedbackEventId;this.canSendFeedback!==e&&(this.canSendFeedback=e,this.options.onCanSendFeedbackChange({canSendFeedback:e}))},t.handleInterruption=function(e){e.interruption_event&&(this.lastInterruptTimestamp=e.interruption_event.event_id)},t.handleAgentResponse=function(e){this.options.onMessage({source:"ai",message:e.agent_response_event.agent_response})},t.handleUserTranscript=function(e){this.options.onMessage({source:"user",message:e.user_transcription_event.user_transcript})},t.handleTentativeAgentResponse=function(e){this.options.onDebug({type:"tentative_agent_response",response:e.tentative_agent_response_internal_event.tentative_agent_response})},t.handleVadScore=function(e){this.options.onVadScore&&this.options.onVadScore({vadScore:e.vad_score_event.vad_score})},t.handleClientToolCall=function(e){try{var n=this;return Promise.resolve(function(){if(Object.prototype.hasOwnProperty.call(n.options.clientTools,e.client_tool_call.tool_name)){var t=function(t,o){try{var r=Promise.resolve(n.options.clientTools[e.client_tool_call.tool_name](e.client_tool_call.parameters)).then(function(t){var o="object"==typeof t?JSON.stringify(t):String(t);n.connection.sendMessage({type:"client_tool_result",tool_call_id:e.client_tool_call.tool_call_id,result:o,is_error:!1})})}catch(e){return o(e)}return r&&r.then?r.then(void 0,o):r}(0,function(t){n.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:e.client_tool_call.tool_name}),n.connection.sendMessage({type:"client_tool_result",tool_call_id:e.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})});if(t&&t.then)return t.then(function(){})}else{if(n.options.onUnhandledClientToolCall)return void n.options.onUnhandledClientToolCall(e.client_tool_call);n.onError("Client tool with name "+e.client_tool_call.tool_name+" is not defined on client",{clientToolName:e.client_tool_call.tool_name}),n.connection.sendMessage({type:"client_tool_result",tool_call_id:e.client_tool_call.tool_call_id,result:"Client tool with name "+e.client_tool_call.tool_name+" is not defined on client",is_error:!0})}}())}catch(e){return Promise.reject(e)}},t.handleAudio=function(e){},t.onError=function(e,n){console.error(e,n),this.options.onError(e,n)},t.getId=function(){return this.connection.conversationId},t.isOpen=function(){return"connected"===this.status},t.setMicMuted=function(e){this.connection.setMicMuted(e)},t.getInputByteFrequencyData=function(){return r},t.getOutputByteFrequencyData=function(){return r},t.getInputVolume=function(){return 0},t.getOutputVolume=function(){return 0},t.sendFeedback=function(e){this.canSendFeedback?(this.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:this.currentEventId}),this.lastFeedbackEventId=this.currentEventId,this.updateCanSendFeedback()):console.warn(0===this.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},t.sendContextualUpdate=function(e){this.connection.sendMessage({type:"contextual_update",text:e})},t.sendUserMessage=function(e){this.connection.sendMessage({type:"user_message",text:e})},t.sendUserActivity=function(){this.connection.sendMessage({type:"user_activity"})},t.sendMCPToolApprovalResult=function(e,n){this.connection.sendMessage({type:"mcp_tool_approval_result",tool_call_id:e,is_approved:n})},e}(),s=/*#__PURE__*/function(){function e(e){void 0===e&&(e={}),this.queue=[],this.disconnectionDetails=null,this.onDisconnectCallback=null,this.onMessageCallback=null,this.onModeChangeCallback=null,this.onDebug=void 0,this.onDebug=e.onDebug}var n=e.prototype;return n.debug=function(e){this.onDebug&&this.onDebug(e)},n.onMessage=function(e){this.onMessageCallback=e;var n=this.queue;this.queue=[],n.length>0&&queueMicrotask(function(){n.forEach(e)})},n.onDisconnect=function(e){this.onDisconnectCallback=e;var n=this.disconnectionDetails;n&&queueMicrotask(function(){e(n)})},n.onModeChange=function(e){this.onModeChangeCallback=e},n.updateMode=function(e){var n;null==(n=this.onModeChangeCallback)||n.call(this,e)},n.disconnect=function(e){var n;this.disconnectionDetails||(this.disconnectionDetails=e,null==(n=this.onDisconnectCallback)||n.call(this,e))},n.handleMessage=function(e){this.onMessageCallback?this.onMessageCallback(e):this.queue.push(e)},e}();function a(e){var n=e.split("_"),t=n[0],o=n[1];if(!["pcm","ulaw"].includes(t))throw new Error("Invalid format: "+e);var r=Number.parseInt(o);if(Number.isNaN(r))throw new Error("Invalid sample rate: "+o);return{format:t,sampleRate:r}}var c="0.5.0";function u(e){return!!e.type}var l="conversation_initiation_client_data";function d(e){var n,t,o,r,i,s,a={type:l};return e.overrides&&(a.conversation_config_override={agent:{prompt:null==(t=e.overrides.agent)?void 0:t.prompt,first_message:null==(o=e.overrides.agent)?void 0:o.firstMessage,language:null==(r=e.overrides.agent)?void 0:r.language},tts:{voice_id:null==(i=e.overrides.tts)?void 0:i.voiceId},conversation:{text_only:null==(s=e.overrides.conversation)?void 0:s.textOnly}}),e.customLlmExtraBody&&(a.custom_llm_extra_body=e.customLlmExtraBody),e.dynamicVariables&&(a.dynamic_variables=e.dynamicVariables),e.userId&&(a.user_id=e.userId),null!=(n=e.overrides)&&n.client&&(a.source_info={source:e.overrides.client.source,version:e.overrides.client.version}),a}var h=/*#__PURE__*/function(e){function n(n,t,o,r){var i;return(i=e.call(this)||this).socket=void 0,i.conversationId=void 0,i.inputFormat=void 0,i.outputFormat=void 0,i.socket=n,i.conversationId=t,i.inputFormat=o,i.outputFormat=r,i.socket.addEventListener("error",function(e){setTimeout(function(){return i.disconnect({reason:"error",message:"The connection was closed due to a socket error.",context:e})},0)}),i.socket.addEventListener("close",function(e){i.disconnect(1e3===e.code?{reason:"agent",context:e}:{reason:"error",message:e.reason||"The connection was closed by the server.",context:e})}),i.socket.addEventListener("message",function(e){try{var n=JSON.parse(e.data);if(!u(n))return;i.handleMessage(n)}catch(e){}}),i}t(n,e),n.create=function(e){try{var t=null;return Promise.resolve(function(o,r){try{var i=function(){var o,r,i,s,l=null!=(o=e.origin)?o:"wss://api.elevenlabs.io",h=(null==(r=e.overrides)||null==(r=r.client)?void 0:r.version)||c,f=(null==(i=e.overrides)||null==(i=i.client)?void 0:i.source)||"js_sdk";if(e.signedUrl){var p=e.signedUrl.includes("?")?"&":"?";s=""+e.signedUrl+p+"source="+f+"&version="+h}else s=l+"/v1/convai/conversation?agent_id="+e.agentId+"&source="+f+"&version="+h;var v=["convai"];return e.authorization&&v.push("bearer."+e.authorization),t=new WebSocket(s,v),Promise.resolve(new Promise(function(n,o){t.addEventListener("open",function(){var n,o=d(e);null==(n=t)||n.send(JSON.stringify(o))},{once:!0}),t.addEventListener("error",function(e){setTimeout(function(){return o(e)},0)}),t.addEventListener("close",o),t.addEventListener("message",function(e){var t=JSON.parse(e.data);u(t)&&("conversation_initiation_metadata"===t.type?n(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(e){var o=e.conversation_id,r=e.agent_output_audio_format,i=e.user_input_audio_format,s=a(null!=i?i:"pcm_16000"),c=a(r);return new n(t,o,s,c)})}()}catch(e){return r(e)}return i&&i.then?i.then(void 0,r):i}(0,function(e){var n;throw null==(n=t)||n.close(),e}))}catch(e){return Promise.reject(e)}};var o=n.prototype;return o.close=function(){this.socket.close()},o.sendMessage=function(e){this.socket.send(JSON.stringify(e))},o.setMicMuted=function(e){try{return console.warn("WebSocket connection setMicMuted called with "+e+", but this is handled by VoiceConversation"),Promise.resolve()}catch(e){return Promise.reject(e)}},n}(s);function f(e){var n=new Uint8Array(e);return window.btoa(String.fromCharCode.apply(String,n))}function p(e){for(var n=window.atob(e),t=n.length,o=new Uint8Array(t),r=0;r<t;r++)o[r]=n.charCodeAt(r);return o.buffer}function v(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var m=new Map;function g(e,n){return function(t){try{var o,r=function(r){return o?r:v(function(){var o="data:application/javascript;base64,"+btoa(n);return Promise.resolve(t.addModule(o)).then(function(){m.set(e,o)})},function(){throw new Error("Failed to load the "+e+" worklet module. Make sure the browser supports AudioWorklets.")})},i=m.get(e);if(i)return Promise.resolve(t.addModule(i));var s=new Blob([n],{type:"application/javascript"}),a=URL.createObjectURL(s),c=v(function(){return Promise.resolve(t.addModule(a)).then(function(){m.set(e,a),o=1})},function(){URL.revokeObjectURL(a)});return Promise.resolve(c&&c.then?c.then(r):r(c))}catch(e){return Promise.reject(e)}}}var y=g("raw-audio-processor",'\nconst BIAS = 0x84;\nconst CLIP = 32635;\nconst encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n];\n\nfunction encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n}\n\nclass RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.isMuted = false;\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n this.format = data.format;\n\n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n }\n break;\n case "setMuted":\n this.isMuted = data.isMuted;\n break;\n }\n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = this.isMuted \n ? new Float32Array(this.buffer.length)\n : new Float32Array(this.buffer);\n\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n}\nregisterProcessor("raw-audio-processor", RawAudioProcessor);\n');function _(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var b=/*#__PURE__*/function(n){function o(e,t,o,r,i){var s;return void 0===i&&(i={}),(s=n.call(this,i)||this).conversationId=void 0,s.inputFormat=void 0,s.outputFormat=void 0,s.room=void 0,s.isConnected=!1,s.audioEventId=1,s.audioCaptureContext=null,s.room=e,s.conversationId=t,s.inputFormat=o,s.outputFormat=r,s.setupRoomEventListeners(),s}t(o,n),o.create=function(n){try{var t,r=function(r){var i=new e.Room;return _(function(){var r="room_"+Date.now(),s=a("pcm_48000"),c=a("pcm_48000"),u=new o(i,r,s,c,n);return Promise.resolve(i.connect(n.livekitUrl||"wss://livekit.rtc.elevenlabs.io",t)).then(function(){return Promise.resolve(new Promise(function(n){if(u.isConnected)n();else{var t=function(){i.off(e.RoomEvent.Connected,t),n()};i.on(e.RoomEvent.Connected,t)}})).then(function(){var e;return i.name&&(u.conversationId=(null==(e=i.name.match(/(conv_[a-zA-Z0-9]+)/))?void 0:e[0])||i.name),Promise.resolve(i.localParticipant.setMicrophoneEnabled(!0)).then(function(){var e=d(n);return u.debug({type:l,message:e}),Promise.resolve(u.sendMessage(e)).then(function(){return u})})})})},function(e){return Promise.resolve(i.disconnect()).then(function(){throw e})})},i=function(){if(!("conversationToken"in n)||!n.conversationToken)return function(){if("agentId"in n&&n.agentId)return _(function(){var e,o,r,i=(null==(e=n.overrides)||null==(e=e.client)?void 0:e.version)||c,s=(null==(o=n.overrides)||null==(o=o.client)?void 0:o.source)||"js_sdk",a=function(e){return e.replace(/^wss:\/\//,"https://")}(null!=(r=n.origin)?r:"https://api.elevenlabs.io");return Promise.resolve(fetch(a+"/v1/convai/conversation/token?agent_id="+n.agentId+"&source="+s+"&version="+i)).then(function(e){if(!e.ok)throw new Error("ElevenLabs API returned "+e.status+" "+e.statusText);return Promise.resolve(e.json()).then(function(e){if(!(t=e.token))throw new Error("No conversation token received from API")})})},function(e){var t=e instanceof Error?e.message:String(e);throw e instanceof Error&&e.message.includes("401")&&(t="Your agent has authentication enabled, but no signed URL or conversation token was provided."),new Error("Failed to fetch conversation token for agent "+n.agentId+": "+t)});throw new Error("Either conversationToken or agentId is required for WebRTC connection")}();t=n.conversationToken}();return Promise.resolve(i&&i.then?i.then(r):r())}catch(e){return Promise.reject(e)}};var r=o.prototype;return r.setupRoomEventListeners=function(){var n=this,t=this,o=this,r=this;this.room.on(e.RoomEvent.Connected,function(){try{return t.isConnected=!0,console.info("WebRTC room connected"),Promise.resolve()}catch(e){return Promise.reject(e)}}),this.room.on(e.RoomEvent.Disconnected,function(e){n.isConnected=!1,n.disconnect({reason:"agent",context:new CloseEvent("close",{reason:null==e?void 0:e.toString()})})}),this.room.on(e.RoomEvent.ConnectionStateChanged,function(t){t===e.ConnectionState.Disconnected&&(n.isConnected=!1,n.disconnect({reason:"error",message:"LiveKit connection state changed to "+t,context:new Event("connection_state_changed")}))}),this.room.on(e.RoomEvent.DataReceived,function(e,t){try{var o=JSON.parse((new TextDecoder).decode(e));if("audio"===o.type)return;u(o)?n.handleMessage(o):console.warn("Invalid socket event received:",o)}catch(n){console.warn("Failed to parse incoming data message:",n),console.warn("Raw payload:",(new TextDecoder).decode(e))}}),this.room.on(e.RoomEvent.TrackSubscribed,function(n,t,r){try{var i=function(){if(n.kind===e.Track.Kind.Audio&&r.identity.includes("agent")){var t=n,i=t.attach();return i.autoplay=!0,i.controls=!1,i.style.display="none",document.body.appendChild(i),Promise.resolve(o.setupAudioCapture(t)).then(function(){})}}();return Promise.resolve(i&&i.then?i.then(function(){}):void 0)}catch(e){return Promise.reject(e)}}),this.room.on(e.RoomEvent.ActiveSpeakersChanged,function(e){try{return e.length>0?e[0].identity.includes("agent")&&r.updateMode("speaking"):r.updateMode("listening"),Promise.resolve()}catch(e){return Promise.reject(e)}})},r.close=function(){if(this.isConnected){try{this.room.localParticipant.audioTrackPublications.forEach(function(e){e.track&&e.track.stop()})}catch(e){console.warn("Error stopping local tracks:",e)}this.audioCaptureContext&&(this.audioCaptureContext.close().catch(function(e){console.warn("Error closing audio capture context:",e)}),this.audioCaptureContext=null),this.room.disconnect()}},r.sendMessage=function(e){try{var n=this;if(!n.isConnected||!n.room.localParticipant)return console.warn("Cannot send message: room not connected or no local participant"),Promise.resolve();if("user_audio_chunk"in e)return Promise.resolve();var t=_(function(){var t=(new TextEncoder).encode(JSON.stringify(e));return Promise.resolve(n.room.localParticipant.publishData(t,{reliable:!0})).then(function(){})},function(t){n.debug({type:"send_message_error",message:{message:e,error:t}}),console.error("Failed to send message via WebRTC:",t)});return Promise.resolve(t&&t.then?t.then(function(){}):void 0)}catch(e){return Promise.reject(e)}},r.getRoom=function(){return this.room},r.setMicMuted=function(n){try{var t=this;if(!t.isConnected||!t.room.localParticipant)return console.warn("Cannot set microphone muted: room not connected or no local participant"),Promise.resolve();var o=t.room.localParticipant.getTrackPublication(e.Track.Source.Microphone);return Promise.resolve(null!=o&&o.track?_(function(){var e=n?Promise.resolve(o.track.mute()).then(function(){}):Promise.resolve(o.track.unmute()).then(function(){});if(e&&e.then)return e.then(function(){})},function(){return Promise.resolve(t.room.localParticipant.setMicrophoneEnabled(!n)).then(function(){})}):Promise.resolve(t.room.localParticipant.setMicrophoneEnabled(!n)).then(function(){}))}catch(e){return Promise.reject(e)}},r.setupAudioCapture=function(e){try{var n=this,t=_(function(){var t=new AudioContext;n.audioCaptureContext=t;var o=new MediaStream([e.mediaStreamTrack]),r=t.createMediaStreamSource(o);return Promise.resolve(y(t.audioWorklet)).then(function(){var e=new AudioWorkletNode(t,"raw-audio-processor");e.port.postMessage({type:"setFormat",format:n.outputFormat.format,sampleRate:n.outputFormat.sampleRate}),e.port.onmessage=function(e){var t=e.data;if(t[1]>.01){var o=f(t[0].buffer),r=n.audioEventId++;n.handleMessage({type:"audio",audio_event:{audio_base_64:o,event_id:r}})}},r.connect(e)})},function(e){console.warn("Failed to set up audio capture:",e)});return Promise.resolve(t&&t.then?t.then(function(){}):void 0)}catch(e){return Promise.reject(e)}},o}(s),k=function(e){try{var n=function(e){return e.connectionType?e.connectionType:"conversationToken"in e&&e.conversationToken?"webrtc":"websocket"}(e);switch(n){case"websocket":return Promise.resolve(h.create(e));case"webrtc":return Promise.resolve(b.create(e));default:throw new Error("Unknown connection type: "+n)}}catch(e){return Promise.reject(e)}};function w(){return["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document}var P=function(e){void 0===e&&(e={default:0,android:3e3});try{var n,t=e.default;if(/android/i.test(navigator.userAgent))t=null!=(n=e.android)?n:t;else if(w()){var o;t=null!=(o=e.ios)?o:t}var r=function(){if(t>0)return Promise.resolve(new Promise(function(e){return setTimeout(e,t)})).then(function(){})}();return Promise.resolve(r&&r.then?r.then(function(){}):void 0)}catch(e){return Promise.reject(e)}},C=/*#__PURE__*/function(e){function n(){return e.apply(this,arguments)||this}return t(n,e),n.startSession=function(e){try{var t=i.getFullOptions(e);t.onStatusChange({status:"connecting"}),t.onCanSendFeedbackChange({canSendFeedback:!1});var o=null;return Promise.resolve(function(r,i){try{var s=Promise.resolve(P(t.connectionDelay)).then(function(){return Promise.resolve(k(e)).then(function(e){return new n(t,o=e)})})}catch(e){return i(e)}return s&&s.then?s.then(void 0,i):s}(0,function(e){var n;throw t.onStatusChange({status:"disconnected"}),null==(n=o)||n.close(),e}))}catch(e){return Promise.reject(e)}},n}(i),M=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=n,this.worklet=t,this.inputStream=o}e.create=function(t){var o=t.sampleRate,r=t.format,i=t.preferHeadphonesForIosDevices;try{var s=null,a=null;return Promise.resolve(function(t,c){try{var u=function(){function t(){function t(){return Promise.resolve(y(s.audioWorklet)).then(function(){var t=n({voiceIsolation:!0},c);return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:t})).then(function(n){var t=s.createMediaStreamSource(a=n),i=new AudioWorkletNode(s,"raw-audio-processor");return i.port.postMessage({type:"setFormat",format:r,sampleRate:o}),t.connect(u),u.connect(i),Promise.resolve(s.resume()).then(function(){return new e(s,u,i,a)})})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,u=(s=new window.AudioContext(i?{sampleRate:o}:{})).createAnalyser(),l=function(){if(!i)return Promise.resolve(s.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return l&&l.then?l.then(t):t()}var c={sampleRate:{ideal:o},echoCancellation:!0,noiseSuppression:!0},u=function(){if(w()&&i)return Promise.resolve(window.navigator.mediaDevices.enumerateDevices()).then(function(e){var n=e.find(function(e){return"audioinput"===e.kind&&["airpod","headphone","earphone"].find(function(n){return e.label.toLowerCase().includes(n)})});n&&(c.deviceId={ideal:n.deviceId})})}();return u&&u.then?u.then(t):t()}()}catch(e){return c(e)}return u&&u.then?u.then(void 0,c):u}(0,function(e){var n,t;throw null==(n=a)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=s)||t.close(),e}))}catch(e){return Promise.reject(e)}};var t=e.prototype;return t.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},t.setMuted=function(e){this.worklet.port.postMessage({type:"setMuted",isMuted:e})},e}(),S=g("audio-concat-processor",'\nconst decodeTable = [0,132,396,924,1980,4092,8316,16764];\n\nexport function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n}\n\nclass AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n}\n\nregisterProcessor("audio-concat-processor", AudioConcatProcessor);\n'),F=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=n,this.gain=t,this.worklet=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var r=null;return Promise.resolve(function(n,i){try{var s=(a=(r=new AudioContext({sampleRate:t})).createAnalyser(),(c=r.createGain()).connect(a),a.connect(r.destination),Promise.resolve(S(r.audioWorklet)).then(function(){var n=new AudioWorkletNode(r,"audio-concat-processor");return n.port.postMessage({type:"setFormat",format:o}),n.connect(c),Promise.resolve(r.resume()).then(function(){return new e(r,a,c,n)})}))}catch(e){return i(e)}var a,c;return s&&s.then?s.then(void 0,i):s}(0,function(e){var n;throw null==(n=r)||n.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function I(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var x=/*#__PURE__*/function(e){function o(n,t,o,r,i){var s;return(s=e.call(this,n,t)||this).input=void 0,s.output=void 0,s.wakeLock=void 0,s.inputFrequencyData=void 0,s.outputFrequencyData=void 0,s.onInputWorkletMessage=function(e){"connected"===s.status&&s.connection.sendMessage({user_audio_chunk:f(e.data[0].buffer)})},s.onOutputWorkletMessage=function(e){var n=e.data;"process"===n.type&&s.updateMode(n.finished?"listening":"speaking")},s.addAudioBase64Chunk=function(e){s.output.gain.gain.value=s.volume,s.output.worklet.port.postMessage({type:"clearInterrupted"}),s.output.worklet.port.postMessage({type:"buffer",buffer:p(e)})},s.fadeOutAudio=function(){s.updateMode("listening"),s.output.worklet.port.postMessage({type:"interrupt"}),s.output.gain.gain.exponentialRampToValueAtTime(1e-4,s.output.context.currentTime+2),setTimeout(function(){s.output.gain.gain.value=s.volume,s.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},s.calculateVolume=function(e){if(0===e.length)return 0;for(var n=0,t=0;t<e.length;t++)n+=e[t]/255;return(n/=e.length)<0?0:n>1?1:n},s.input=o,s.output=r,s.wakeLock=i,s.input.worklet.port.onmessage=s.onInputWorkletMessage,s.output.worklet.port.onmessage=s.onOutputWorkletMessage,s}t(o,e),o.startSession=function(e){try{var t=function(){return I(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:!0})).then(function(t){return u=t,Promise.resolve(P(r.connectionDelay)).then(function(){return Promise.resolve(k(e)).then(function(t){return a=t,Promise.resolve(Promise.all([M.create(n({},a.inputFormat,{preferHeadphonesForIosDevices:e.preferHeadphonesForIosDevices})),F.create(a.outputFormat)])).then(function(e){var n;return s=e[0],c=e[1],null==(n=u)||n.getTracks().forEach(function(e){return e.stop()}),u=null,new o(r,a,s,c,l)})})})})},function(e){var n,t,o;return r.onStatusChange({status:"disconnected"}),null==(n=u)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=a)||t.close(),Promise.resolve(null==(o=s)?void 0:o.close()).then(function(){var n;return Promise.resolve(null==(n=c)?void 0:n.close()).then(function(){function n(){throw e}var t=I(function(){var e;return Promise.resolve(null==(e=l)?void 0:e.release()).then(function(){l=null})},function(){});return t&&t.then?t.then(n):n()})})})},r=i.getFullOptions(e);r.onStatusChange({status:"connecting"}),r.onCanSendFeedbackChange({canSendFeedback:!1});var s=null,a=null,c=null,u=null,l=null,d=function(n){if(null==(n=e.useWakeLock)||n){var t=I(function(){return Promise.resolve(navigator.wakeLock.request("screen")).then(function(e){l=e})},function(){});if(t&&t.then)return t.then(function(){})}}();return Promise.resolve(d&&d.then?d.then(t):t())}catch(e){return Promise.reject(e)}};var r=o.prototype;return r.handleEndSession=function(){try{var n=this;return Promise.resolve(e.prototype.handleEndSession.call(n)).then(function(){function e(){return Promise.resolve(n.input.close()).then(function(){return Promise.resolve(n.output.close()).then(function(){})})}var t=I(function(){var e;return Promise.resolve(null==(e=n.wakeLock)?void 0:e.release()).then(function(){n.wakeLock=null})},function(){});return t&&t.then?t.then(e):e()})}catch(e){return Promise.reject(e)}},r.handleInterruption=function(n){e.prototype.handleInterruption.call(this,n),this.fadeOutAudio()},r.handleAudio=function(e){var n,t;this.lastInterruptTimestamp<=e.audio_event.event_id&&(null==(n=(t=this.options).onAudio)||n.call(t,e.audio_event.audio_base_64),this.connection instanceof b||this.addAudioBase64Chunk(e.audio_event.audio_base_64),this.currentEventId=e.audio_event.event_id,this.updateCanSendFeedback(),this.updateMode("speaking"))},r.setMicMuted=function(e){this.connection instanceof b?this.connection.setMicMuted(e):this.input.setMuted(e)},r.getInputByteFrequencyData=function(){return null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData},r.getOutputByteFrequencyData=function(){return null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData},r.getInputVolume=function(){return this.calculateVolume(this.getInputByteFrequencyData())},r.getOutputVolume=function(){return this.calculateVolume(this.getOutputByteFrequencyData())},o}(i);exports.Conversation=/*#__PURE__*/function(e){function n(){return e.apply(this,arguments)||this}return t(n,e),n.startSession=function(e){return e.textOnly?C.startSession(e):x.startSession(e)},n}(i),exports.WebRTCConnection=b,exports.WebSocketConnection=h,exports.createConnection=k,exports.postOverallFeedback=function(e,n,t){return void 0===t&&(t="https://api.elevenlabs.io"),fetch(t+"/v1/convai/conversations/"+e+"/feedback",{method:"POST",body:JSON.stringify({feedback:n?"like":"dislike"}),headers:{"Content-Type":"application/json"}})};
2
+ //# sourceMappingURL=lib.cjs.map