@11labs/client 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +160 -0
- package/dist/index.d.ts +59 -0
- package/dist/lib.cjs +2 -0
- package/dist/lib.cjs.map +1 -0
- package/dist/lib.modern.js +2 -0
- package/dist/lib.modern.js.map +1 -0
- package/dist/lib.module.js +2 -0
- package/dist/lib.module.js.map +1 -0
- package/dist/lib.umd.js +2 -0
- package/dist/lib.umd.js.map +1 -0
- package/dist/utils/audio.d.ts +2 -0
- package/dist/utils/audioConcatProcessor.d.ts +1 -0
- package/dist/utils/connection.d.ts +15 -0
- package/dist/utils/events.d.ts +47 -0
- package/dist/utils/input.d.ts +9 -0
- package/dist/utils/output.d.ts +9 -0
- package/dist/utils/rawAudioProcessor.d.ts +1 -0
- package/package.json +47 -0
package/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) 2024 ElevenLabs
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
@@ -0,0 +1,160 @@
|
|
1
|
+
# ElevenLabs JavaScript Client Library
|
2
|
+
|
3
|
+
> Note that Conversational AI is in alpha. This library, as well as REST API spec is a subject to change.
|
4
|
+
|
5
|
+

|
6
|
+
[](https://discord.gg/elevenlabs)
|
7
|
+
[](https://twitter.com/elevenlabsio)
|
8
|
+
|
9
|
+
## Installation
|
10
|
+
|
11
|
+
Install the package in your project through package manager.
|
12
|
+
|
13
|
+
```shell
|
14
|
+
npm install @11labs/client
|
15
|
+
# or
|
16
|
+
yarn add @11labs/client
|
17
|
+
# or
|
18
|
+
pnpm install @11labs/client
|
19
|
+
```
|
20
|
+
|
21
|
+
## Usage
|
22
|
+
|
23
|
+
This library is primarily meant for development in vanilla JavaScript projects, or as a base for libraries tailored to specific frameworks.
|
24
|
+
It is recommended to check whether your specific framework has it's own library.
|
25
|
+
However, you can use this library in any JavaScript-based project.
|
26
|
+
|
27
|
+
### Initialize conversation
|
28
|
+
|
29
|
+
First, initialize the Conversation instance:
|
30
|
+
|
31
|
+
```js
|
32
|
+
const conversation = await Conversation.startSession(options);
|
33
|
+
```
|
34
|
+
|
35
|
+
This will kick off the websocket connection and start using microphone to communicate with the ElevenLabs Conversational AI agent. Consider explaining and allowing microphone access in your apps UI before the Conversation kicks off:
|
36
|
+
|
37
|
+
```js
|
38
|
+
// call after explaning to the user why the microphone access is needed
|
39
|
+
await navigator.mediaDevices.getUserMedia();
|
40
|
+
```
|
41
|
+
|
42
|
+
#### Session configuration
|
43
|
+
|
44
|
+
The options passed to `startSession` specifiy how the session is established. There are two ways to start a session:
|
45
|
+
|
46
|
+
##### Using Agent ID
|
47
|
+
|
48
|
+
Agent ID can be acquired through [ElevenLabs UI](https://elevenlabs.io/app/conversational-ai).
|
49
|
+
For public agents, you can use the ID directly:
|
50
|
+
|
51
|
+
```js
|
52
|
+
const conversation = await Conversation.startSession({
|
53
|
+
agentId: "<your-agent-id>",
|
54
|
+
});
|
55
|
+
```
|
56
|
+
|
57
|
+
##### Using a signed URL
|
58
|
+
|
59
|
+
If the conversation requires authorization, you will need to add a dedicated endpoint to your server that
|
60
|
+
will request a signed url using the [ElevenLabs API](https://elevenlabs.io/docs/introduction) and pass it back to the client.
|
61
|
+
|
62
|
+
Here's an example of how it could be set up:
|
63
|
+
|
64
|
+
```js
|
65
|
+
// Node.js server
|
66
|
+
|
67
|
+
app.get("/signed-url", yourAuthMiddleware, async (req, res) => {
|
68
|
+
const response = await fetch(
|
69
|
+
`https://api.elevenlabs.io/v1/convai/conversation/get_signed_url?agent_id=${process.env.AGENT_ID}`,
|
70
|
+
{
|
71
|
+
method: "GET",
|
72
|
+
headers: {
|
73
|
+
// Requesting a signed url requires your ElevenLabs API key
|
74
|
+
// Do NOT expose your API key to the client!
|
75
|
+
"xi-api-key": process.env.XI_API_KEY,
|
76
|
+
},
|
77
|
+
}
|
78
|
+
);
|
79
|
+
|
80
|
+
if (!response.ok) {
|
81
|
+
return res.status(500).send("Failed to get signed URL");
|
82
|
+
}
|
83
|
+
|
84
|
+
const body = await response.json();
|
85
|
+
res.send(body.signed_url);
|
86
|
+
});
|
87
|
+
```
|
88
|
+
|
89
|
+
```js
|
90
|
+
// Client
|
91
|
+
|
92
|
+
const response = await fetch("/signed-url", yourAuthHeaders);
|
93
|
+
const signedUrl = await response.text();
|
94
|
+
|
95
|
+
const conversation = await Conversation.startSession({ signedUrl });
|
96
|
+
```
|
97
|
+
|
98
|
+
#### Optional callbacks
|
99
|
+
|
100
|
+
The options passed to `startSession` can also be used to register optional callbacks:
|
101
|
+
|
102
|
+
- **onConnect** - handler called when the conversation websocket connection is established.
|
103
|
+
- **onDisconnect** - handler called when the conversation websocket connection is ended.
|
104
|
+
- **onMessage** - handler called when a new text message is received. These can be tentative or final transcriptions of user voice, replies produced by LLM. Primarily used for handling conversation transcription.
|
105
|
+
- **onError** - handler called when an error is encountered.
|
106
|
+
- **onStatusChange** - handler called whenever connection status changes. Can be `connected`, `connecting` and `disconnected` (initial).
|
107
|
+
- **onModeChange** - handler called when a status changes, eg. agent switches from `speaking` to `listening`, or the other way around.
|
108
|
+
|
109
|
+
#### Return value
|
110
|
+
|
111
|
+
`startSession` returns a `Conversation` instance that can be used to control the session. The method will throw an error if the session cannot be established. This can happen if the user denies microphone access, or if the websocket connection
|
112
|
+
fails.
|
113
|
+
|
114
|
+
##### endSession
|
115
|
+
|
116
|
+
A method to manually end the conversation. The method will end the conversation and disconnect from websocket.
|
117
|
+
Afterwards the conversation instance will be unusable and can be safely discarded.
|
118
|
+
|
119
|
+
```js
|
120
|
+
await conversation.endSession();
|
121
|
+
```
|
122
|
+
|
123
|
+
##### getId
|
124
|
+
|
125
|
+
A method returning the conversation ID.
|
126
|
+
|
127
|
+
```js
|
128
|
+
const id = conversation.geId();
|
129
|
+
```
|
130
|
+
|
131
|
+
##### setVolume
|
132
|
+
|
133
|
+
A method to set the output volume of the conversation. Accepts object with volume field between 0 and 1.
|
134
|
+
|
135
|
+
```js
|
136
|
+
await conversation.setVolume({ volume: 0.5 });
|
137
|
+
```
|
138
|
+
|
139
|
+
##### getInputVolume / getOutputVolume
|
140
|
+
|
141
|
+
Methods that return the current input/output volume on a scale from `0` to `1` where `0` is -100 dB and `1` is -30 dB.
|
142
|
+
|
143
|
+
```js
|
144
|
+
const inputVolume = await conversation.getInputVolume();
|
145
|
+
const outputVolume = await conversation.getOutputVolume();
|
146
|
+
```
|
147
|
+
|
148
|
+
##### getInputByteFrequencyData / getOutputByteFrequencyData
|
149
|
+
|
150
|
+
Methods that return `Uint8Array`s containg the current input/output frequency data. See [AnalyserNode.getByteFrequencyData](https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/getByteFrequencyData) for more information.
|
151
|
+
|
152
|
+
## Development
|
153
|
+
|
154
|
+
Please, refer to the README.md file in the root of this repository.
|
155
|
+
|
156
|
+
## Contributing
|
157
|
+
|
158
|
+
Please, create an issue first to discuss the proposed changes. Any contributions are welcome!
|
159
|
+
|
160
|
+
Remember, if merged, your code will be used as part of a MIT licensed project. By submitting a Pull Request, you are giving your consent for your code to be integrated into this library.
|
package/dist/index.d.ts
ADDED
@@ -0,0 +1,59 @@
|
|
1
|
+
import { Input } from "./utils/input";
|
2
|
+
import { Output } from "./utils/output";
|
3
|
+
import { SessionConfig } from "./utils/connection";
|
4
|
+
export type { SocketEvent } from "./utils/events";
|
5
|
+
export type { SessionConfig } from "./utils/connection";
|
6
|
+
export type Role = "user" | "ai";
|
7
|
+
export type Mode = "speaking" | "listening";
|
8
|
+
export type Status = "connecting" | "connected" | "disconnecting" | "disconnected";
|
9
|
+
export type Options = SessionConfig & Callbacks;
|
10
|
+
export type Callbacks = {
|
11
|
+
onConnect: (props: {
|
12
|
+
conversationId: string;
|
13
|
+
}) => void;
|
14
|
+
onDisconnect: () => void;
|
15
|
+
onMessage: (props: {
|
16
|
+
message: string;
|
17
|
+
source: Role;
|
18
|
+
}) => void;
|
19
|
+
onDebug: (props: any) => void;
|
20
|
+
onError: (message: string, context?: any) => void;
|
21
|
+
onStatusChange: (prop: {
|
22
|
+
status: Status;
|
23
|
+
}) => void;
|
24
|
+
onModeChange: (prop: {
|
25
|
+
mode: Mode;
|
26
|
+
}) => void;
|
27
|
+
};
|
28
|
+
export declare class Conversation {
|
29
|
+
private readonly options;
|
30
|
+
private readonly connection;
|
31
|
+
readonly input: Input;
|
32
|
+
readonly output: Output;
|
33
|
+
static startSession(options: SessionConfig & Partial<Callbacks>): Promise<Conversation>;
|
34
|
+
private lastInterruptTimestamp;
|
35
|
+
private mode;
|
36
|
+
private status;
|
37
|
+
private inputFrequencyData?;
|
38
|
+
private outputFrequencyData?;
|
39
|
+
private volume;
|
40
|
+
private constructor();
|
41
|
+
endSession: () => Promise<void>;
|
42
|
+
private updateMode;
|
43
|
+
private updateStatus;
|
44
|
+
private onEvent;
|
45
|
+
private onInputWorkletMessage;
|
46
|
+
private onOutputWorkletMessage;
|
47
|
+
private addAudioBase64Chunk;
|
48
|
+
private fadeOutAudio;
|
49
|
+
private onError;
|
50
|
+
private calculateVolume;
|
51
|
+
getId: () => string;
|
52
|
+
setVolume: ({ volume }: {
|
53
|
+
volume: number;
|
54
|
+
}) => void;
|
55
|
+
getInputByteFrequencyData: () => Uint8Array;
|
56
|
+
getOutputByteFrequencyData: () => Uint8Array;
|
57
|
+
getInputVolume: () => number;
|
58
|
+
getOutputVolume: () => number;
|
59
|
+
}
|
package/dist/lib.cjs
ADDED
@@ -0,0 +1,2 @@
|
|
1
|
+
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)({}).hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},e.apply(null,arguments)}function t(e){for(var t=window.atob(e),n=t.length,r=new Uint8Array(n),o=0;o<n;o++)r[o]=t.charCodeAt(o);return r.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(n),o=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=r}return e.create=function(t){try{var n=null,o=null;return Promise.resolve(function(s,i){try{var a=function(){function s(){return Promise.resolve(n.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var r=n.createMediaStreamSource(o=t),s=new AudioWorkletNode(n,"raw-audio-processor");return r.connect(a),a.connect(s),new e(n,a,s,o)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!i)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(s):s()}()}catch(e){return i(e)}return a&&a.then?a.then(void 0,i):a}(0,function(e){var t,r;throw null==(t=o)||t.getTracks().forEach(function(e){return e.stop()}),null==(r=n)||r.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),s=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),i=URL.createObjectURL(s),a=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=r}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(a=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(i)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,a,u,t)}))}catch(e){return o(e)}var a,u;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(c=null!=(i="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN:null)?i:"wss://api.elevenlabs.io",l=null!=(a="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME:null)?a:"/v1/convai/conversation?agent_id=",n=new WebSocket(t.signedUrl?t.signedUrl:c+l+t.agentId),Promise.resolve(new Promise(function(e,t){n.addEventListener("error",t),n.addEventListener("close",t),n.addEventListener("message",function(t){var n=JSON.parse(t.data);u(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var r=t.conversation_id,o=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,r,o)}))}catch(e){return o(e)}var i,a,c,l;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){this.socket.close()},e}(),l={onConnect:function(){},onDisconnect:function(){},onError:function(){},onDebug:function(){},onMessage:function(){},onStatusChange:function(){},onModeChange:function(){}};exports.Conversation=/*#__PURE__*/function(){function n(e,n,r,o){var s=this,i=this,a=this,c=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==i.status?Promise.resolve():(i.updateStatus("disconnecting"),i.connection.close(),Promise.resolve(i.input.close()).then(function(){return Promise.resolve(i.output.close()).then(function(){i.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==s.mode&&(s.mode=e,s.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==s.status&&(s.status=e,s.options.onStatusChange({status:e}))},this.onEvent=function(e){try{var t=JSON.parse(e.data);if(!u(t))return;switch(t.type){case"interruption":t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio();break;case"agent_response":s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response});break;case"user_transcript":s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.updateMode("speaking"));break;case"ping":s.connection.socket.send(JSON.stringify({type:"pong",event_id:t.ping_event.event_id}));break;default:s.options.onDebug(t)}}catch(t){return void s.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=function(e){var t,n,r=JSON.stringify({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))});"connected"===s.status&&s.connection.socket.send(r)},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&s.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return a.output.gain.gain.value=a.volume,a.output.worklet.port.postMessage({type:"clearInterrupted"}),a.output.worklet.port.postMessage({type:"buffer",buffer:t(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return c.updateMode("listening"),c.output.worklet.port.postMessage({type:"interrupt"}),c.output.gain.gain.exponentialRampToValueAtTime(1e-4,c.output.context.currentTime+2),setTimeout(function(){c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),s.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return s.connection.conversationId},this.setVolume=function(e){s.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=s.inputFrequencyData||(s.inputFrequencyData=new Uint8Array(s.input.analyser.frequencyBinCount)),s.input.analyser.getByteFrequencyData(s.inputFrequencyData),s.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=s.outputFrequencyData||(s.outputFrequencyData=new Uint8Array(s.output.analyser.frequencyBinCount)),s.output.analyser.getByteFrequencyData(s.outputFrequencyData),s.outputFrequencyData},this.getInputVolume=function(){return s.calculateVolume(s.getInputByteFrequencyData())},this.getOutputVolume=function(){return s.calculateVolume(s.getOutputByteFrequencyData())},this.options=e,this.connection=n,this.input=r,this.output=o,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(e){s.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){s.updateStatus("disconnected"),s.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){s.updateStatus("disconnected"),s.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(t){try{var r=e({},l,t);r.onStatusChange({status:"connecting"});var s=null,i=null,u=null;return Promise.resolve(function(e,l){try{var h=Promise.resolve(o.create(16e3)).then(function(e){return s=e,Promise.resolve(c.create(t)).then(function(e){return i=e,Promise.resolve(a.create(i.sampleRate)).then(function(e){return new n(r,i,s,u=e)})})})}catch(e){return l(e)}return h&&h.then?h.then(void 0,l):h}(0,function(e){var t,n;return r.onStatusChange({status:"disconnected"}),null==(t=i)||t.close(),Promise.resolve(null==(n=s)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=u)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},n}();
|
2
|
+
//# sourceMappingURL=lib.cjs.map
|
package/dist/lib.cjs.map
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"lib.cjs","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","export type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n average_ping_ms?: number;\n };\n};\n\n// TODO correction missing\nexport type SocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent;\n\nexport function isValidSocketEvent(event: any): event is SocketEvent {\n return !!event.type;\n}\n","import { ConfigEvent, isValidSocketEvent } from \"./events\";\n\ndeclare const process: {\n env: {\n ELEVENLABS_CONVAI_SERVER_ORIGIN?: string;\n ELEVENLABS_CONVAI_SERVER_PATHNAME?: string;\n };\n};\n\nexport type SessionConfig =\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined };\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN\n : null) ?? WSS_API_ORIGIN;\n\n const pathname =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME\n : null) ?? WSS_API_PATHNAME;\n\n const url = config.signedUrl\n ? config.signedUrl\n : origin + pathname + config.agentId;\n\n socket = new WebSocket(url);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport { isValidSocketEvent, PingEvent } from \"./utils/events\";\n\nexport type { SocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks;\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n onDisconnect: () => void;\n onMessage: (props: { message: string; source: Role }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onError: (message: string, context?: any) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onDebug: () => {},\n onMessage: () => {},\n onStatusChange: () => {},\n onModeChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.socket.send(\n JSON.stringify({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n })\n );\n // parsedEvent.ping_event.average_ping_ms can be used on client side, for\n // example to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n const audioMessage = JSON.stringify({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n if (this.status === \"connected\") {\n this.connection.socket.send(audioMessage);\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","sampleRate","Promise","resolve","_temp2","audioWorklet","addModule","then","navigator","mediaDevices","getUserMedia","audio","ideal","echoCancellation","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","_catch","error","_inputStream","_context","getTracks","forEach","track","stop","close","e","reject","_proto","prototype","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","socket","conversationId","config","origin","_ref","process","env","ELEVENLABS_CONVAI_SERVER_ORIGIN","pathname","_ref2","ELEVENLABS_CONVAI_SERVER_PATHNAME","WebSocket","signedUrl","agentId","addEventListener","message","JSON","parse","data","conversation_initiation_metadata_event","console","warn","once","conversationConfig","conversation_id","parseInt","agent_output_audio_format","replace","_socket","defaultCallbacks","onConnect","onDisconnect","onError","onDebug","onMessage","onStatusChange","onModeChange","Conversation","options","connection","input","output","_this2","_this","_this3","_this4","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","endSession","updateStatus","updateMode","onEvent","parsedEvent","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","audio_event","addAudioBase64Chunk","audio_base_64","send","stringify","ping_event","_unused","onInputWorkletMessage","b","audioMessage","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","finished","chunk","value","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","getInputByteFrequencyData","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage","startSession","fullOptions","_extends","_Input$create","_Connection$create","_Output$create","_connection","_input","_output"],"mappings":"wNAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCfA,IAAMC,EAAO,IAAIC,KACf,CA2DC,i6EACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC3DxCM,eAuCX,WAAA,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAHAH,KAAAA,aACAC,EAAAA,KAAAA,cACAC,EAAAA,KAAAA,oBACAC,iBAAA,EAHAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,CAKF,OALGJ,EA3CgBM,OAAA,SAAOC,GAAkB,IAC3C,IAAIN,EAA+B,KAC/BG,EAAkC,KAAK,OAAAI,QAAAC,gCAEvC,WAAA,SAAAC,IAAAF,OAAAA,QAAAC,QAWIR,EAAQU,aAAaC,UAAUf,IAAkBgB,KAAA,WAAA,OAAAL,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAO,CACLV,WAAY,CAAEW,MAAOX,GACrBY,iBAAkB,CAAED,OAAO,OAE7BL,KAAA,SAAAO,GAEF,IAAMC,EAASpB,EAAQqB,wBAPvBlB,EAAWgB,GAQLjB,EAAU,IAAIoB,iBAAiBtB,EAAS,uBAK9C,OAHAoB,EAAOG,QAAQtB,GACfA,EAASsB,QAAQrB,GAEN,IAAAH,EAAMC,EAASC,EAAUC,EAASC,EAAa,EAzB1D,EAAA,CAAA,IAAMqB,EACJX,UAAUC,aAAaW,0BAA0BnB,WAK7CL,GAHND,EAAU,IAAIhB,OAAO0C,aACnBF,EAA+B,CAAElB,WAAAA,GAAe,CAAE,IAE3BqB,iBAAiBC,EACtC,WAAA,IAACJ,EAA4BjB,OAAAA,QAAAC,QACzBR,EAAQU,aAAaC,UAhBjC,sGAgB4DC,KAAAgB,WAAAA,EAAAA,CADpD,GACoDA,OAAAA,GAAAA,EAAAhB,KAAAgB,EAAAhB,KAAAH,GAAAA,GAkB1D,6DA7B2CoB,CAEvC,EA2BKC,SAAAA,GAAO,IAAAC,EAAAC,EAGd,MAFW,OAAXD,EAAA5B,IAAA4B,EAAaE,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GACtDJ,OAAAA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,GAAA,OAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAzC,EAAA0C,UASYJ,iBAAK,IAC4C,OAA5DjC,KAAKD,YAAY8B,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GAAE7B,QAAAC,QAA5DJ,KACWJ,QAAQqC,SAAOzB,KAAA,WAAA,EAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAvC,CAAA,CAVD,GC5CIN,EAAO,IAAIC,KACf,CA6DC,03DACD,CAAEC,KAAM,2BAGG+C,EAAuB7C,IAAIC,gBAAgBL,GChE3CkD,eAoBX,WAAA,SAAAA,EACkB3C,EACAC,EACA2C,EACA1C,GAAyBE,KAHzBJ,aACAC,EAAAA,KAAAA,cACA2C,EAAAA,KAAAA,UACA1C,EAAAA,KAAAA,aAHA,EAAAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIwC,KAAJA,EACAxC,KAAOF,QAAPA,CACf,QAACyC,EAxBgBtC,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAAK,OAAAO,QAAAC,iCAGhCP,GADND,EAAU,IAAI0B,aAAa,CAAEpB,WAAAA,KACJqB,kBACnBiB,EAAO5C,EAAQ6C,cAChBtB,QAAQtB,GACbA,EAASsB,QAAQvB,EAAQ8C,aAAavC,QAAAC,QAChCR,EAAQU,aAAaC,UAAU+B,IAAqB9B,KAC1D,WAAA,IAAMV,EAAU,IAAIoB,iBAAiBtB,EAAS,0BAG9C,OAFAE,EAAQqB,QAAQqB,GAET,IAAID,EAAO3C,EAASC,EAAU2C,EAAM1C,EAAS,yBAXd,IAGhCD,EACA2C,sCAJgCf,CAAA,EAY/BC,SAAAA,GAAO,IAAAE,EAEd,MADO,OAAPA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAG,EAAAF,UASYJ,MAAK,WAAA,IACN9B,OAAAA,QAAAC,QAAJJ,KAAKJ,QAAQqC,SAAOzB,kBAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,KAAAK,CAAA,CATD,GC8Bc,SAAAI,EAAmBC,GACjC,QAASA,EAAMrD,IACjB,CCzCA,IAGasD,eAAU,WA0DrB,SAAAA,EACkBC,EACAC,EACA7C,QAFA4C,YAAA,EAAA9C,KACA+C,oBAAA,EAAA/C,KACAE,gBAAA,EAFAF,KAAM8C,OAANA,EACA9C,KAAc+C,eAAdA,EACA/C,KAAUE,WAAVA,CACf,CAIF,OAJG2C,EA7DgB5C,OAAM,SAAC+C,OACzB,IAAIF,EAA2B,KAAK,OAAA3C,QAAAC,iCAG5B6C,SAAMC,EACU,oBAAZC,QACJA,QAAQC,IAAIC,gCACZ,MAAIH,EAXO,0BAaXI,EAGIC,OAHIA,EACQ,oBAAZJ,QACJA,QAAQC,IAAII,kCACZ,MAAID,EAfS,oCAqBnBT,EAAS,IAAIW,UAJDT,EAAOU,UACfV,EAAOU,UACPT,EAASK,EAAWN,EAAOW,SAEHxD,QAAAC,QACK,IAAID,QAEnC,SAACC,EAAS+B,GACVW,EAAQc,iBAAiB,QAASzB,GAClCW,EAAQc,iBAAiB,QAASzB,GAClCW,EAAQc,iBACN,UACA,SAAChB,GACC,IAAMiB,EAAUC,KAAKC,MAAMnB,EAAMoB,MAE5BrB,EAAmBkB,KAIH,qCAAjBA,EAAQtE,KACVa,EAAQyD,EAAQI,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEC,MAAM,GAEZ,IAAE5D,KAxBI6D,SAAAA,GA0BN,IAAMtB,EAAiBsB,EAAmBC,gBACpCpE,EAAaqE,SACjBF,EAAmBG,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAO,IAAI5B,EAAWC,EAAQC,EAAgB7C,EAAY,yBA/CxD,IAAAgD,EAAAK,EACIN,EAKAK,sCAR4B7B,CAEhC,EAgDKC,SAAAA,GAAO,IAAAgD,EAEd,MADAA,OAAAA,EAAA5B,IAAA4B,EAAQzC,QACFP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAW,EAAAR,UAQMJ,MAAA,WACLjC,KAAK8C,OAAOb,OACd,EAACY,CAAA,CAlEoB,GCcjB8B,EAA8B,CAClCC,UAAW,WAAQ,EACnBC,aAAc,WAAK,EACnBC,QAAS,WAAK,EACdC,QAAS,WAAQ,EACjBC,UAAW,aACXC,eAAgB,WAAK,EACrBC,aAAc,gDAGS,WAqCvB,SAAAC,EACmBC,EACAC,EACDC,EACAC,GAAcC,IAAAA,OAAAC,EAsB1BzF,KAAI0F,EA8HR1F,KAAI2F,EAUJ3F,KAAIA,KAjKaoF,aACAC,EAAAA,KAAAA,uBACDC,WAAA,EAAAtF,KACAuF,YAXVK,EAAAA,KAAAA,uBAAiC,EAAC5F,KAClC6F,KAAa,iBACbC,OAAiB,aACjBC,KAAAA,+BACAC,yBAAmB,EAAAhG,KACnBiG,OAAiB,OA2BlBC,WAAU,WAAA,IACf,MAAoB,cAAhBT,EAAKK,OAAwB3F,QAAAC,WACjCqF,EAAKU,aAAa,iBAElBV,EAAKJ,WAAWpD,QAAQ9B,QAAAC,QAClBqF,EAAKH,MAAMrD,SAAOzB,uBAAAL,QAAAC,QAClBqF,EAAKF,OAAOtD,SAAOzB,KAAA,WAEzBiF,EAAKU,aAAa,eAAgB,EACpC,GAAA,CAAC,MAAAjE,UAAA/B,QAAAgC,OAAAD,UAEOkE,WAAa,SAACP,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKJ,QAAQF,aAAa,CAAEW,KAAAA,IAEhC,EAAC7F,KAEOmG,aAAe,SAACL,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKJ,QAAQH,eAAe,CAAEa,OAAAA,IAElC,EAEQO,KAAAA,QAAU,SAACzD,GACjB,IACE,IAAM0D,EAAcxC,KAAKC,MAAMnB,EAAMoB,MAErC,IAAKrB,EAAmB2D,GACtB,OAGF,OAAQA,EAAY/G,MAClB,IAAK,eACC+G,EAAYC,qBACdf,EAAKI,uBACHU,EAAYC,mBAAmBC,UAEnChB,EAAKiB,eACL,MAGF,IAAK,iBACHjB,EAAKJ,QAAQJ,UAAU,CACrBhE,OAAQ,KACR6C,QAASyC,EAAYI,qBAAqBC,iBAE5C,MAGF,IAAK,kBACHnB,EAAKJ,QAAQJ,UAAU,CACrBhE,OAAQ,OACR6C,QAASyC,EAAYM,yBAAyBC,kBAEhD,MAGF,IAAK,oCACHrB,EAAKJ,QAAQL,QAAQ,CACnBxF,KAAM,2BACNuH,SACER,EAAYS,wCACTC,2BAEP,MAGF,IAAK,QAEDxB,EAAKI,wBAA0BU,EAAYW,YAAYT,WAEvDhB,EAAK0B,oBAAoBZ,EAAYW,YAAYE,eACjD3B,EAAKY,WAAW,aAElB,MAGF,IAAK,OACHZ,EAAKH,WAAWvC,OAAOsE,KACrBtD,KAAKuD,UAAU,CACb9H,KAAM,OACNiH,SAAWF,EAA0BgB,WAAWd,YAKpD,MAIF,QACEhB,EAAKJ,QAAQL,QAAQuB,GAI3B,CAAE,MAAAiB,GAEA,YADA/B,EAAKV,QAAQ,6BAA8B,CAAElC,MAAAA,GAE/C,CACF,EAEQ4E,KAAAA,sBAAwB,SAAC5E,GAC/B,IP9MgC6E,EAC5BrI,EOmNEsI,EAAe5D,KAAKuD,UAAU,CAClCM,kBPrN8BF,EO8MR7E,EAAMoB,KAAK,GAOqB5E,OPpNpDA,EAAS,IAAIH,WAAWwI,GAEX7I,OAAOgJ,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuBzI,OOqNhC,cAAhBoG,EAAKM,QACPN,EAAKH,WAAWvC,OAAOsE,KAAKM,EAGhC,OAEQM,uBAAyB,SAAA9E,OAAGc,EAAId,EAAJc,KAChB,YAAdA,EAAKzE,MACPiG,EAAKY,WAAWpC,EAAKiE,SAAW,YAAc,WAElD,OAEQf,oBAAmB,SAAUgB,GAAiB,IAMjD,OALHxC,EAAKH,OAAO/C,KAAKA,KAAK2F,MAAQzC,EAAKO,OACnCP,EAAKH,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,qBAC7CmG,EAAKH,OAAOzF,QAAQsI,KAAKC,YAAY,CACnC9I,KAAM,SACNH,OAAQX,EAAoByJ,KAC3B/H,QAAAC,SACL,CAAC,MAAA8B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAlC,KAEOyG,aAA0B,WAAA,IAavB,OAXTd,EAAKS,WAAW,aAChBT,EAAKJ,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,cAC7CoG,EAAKJ,OAAO/C,KAAKA,KAAK8F,6BACpB,KACA3C,EAAKJ,OAAO3F,QAAQ2I,YAAc,GAIpCC,WAAW,WACT7C,EAAKJ,OAAO/C,KAAKA,KAAK2F,MAAQxC,EAAKM,OACnCN,EAAKJ,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,oBAC/C,EAAG,KAAMY,QAAAC,SACX,CAAC,MAAA8B,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEO4C,CAAAA,EAAAA,KAAAA,QAAU,SAACjB,EAAiBjE,GAClCsE,QAAQxC,MAAMmC,EAASjE,GACvB4F,EAAKJ,QAAQN,QAAQjB,EAASjE,EAChC,OAEQ6I,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAc3J,OAChB,OACF,EAKA,IADA,IAAIkH,EAAS,EACJ/G,EAAI,EAAGA,EAAIwJ,EAAc3J,OAAQG,IACxC+G,GAAUyC,EAAcxJ,GAAK,IAI/B,OAFA+G,GAAUyC,EAAc3J,QAER,EAAI,EAAIkH,EAAS,EAAI,EAAIA,CAC3C,EAACjG,KAEM2I,MAAQ,WAAA,OAAMnD,EAAKH,WAAWtC,cAAc,OAE5C6F,UAAY,SAAArF,GACjBiC,EAAKS,OADqB1C,EAAN0C,MAEtB,EAACjG,KAEM6I,0BAA4B,WAKjC,aAJArD,EAAKO,qBAALP,EAAKO,mBAAuB,IAAI9G,WAC9BuG,EAAKF,MAAMzF,SAASiJ,oBAEtBtD,EAAKF,MAAMzF,SAASkJ,qBAAqBvD,EAAKO,oBACvCP,EAAKO,kBACd,EAAC/F,KAEMgJ,2BAA6B,WAKlC,aAJAxD,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAI/G,WAC/BuG,EAAKD,OAAO1F,SAASiJ,oBAEvBtD,EAAKD,OAAO1F,SAASkJ,qBAAqBvD,EAAKQ,qBACxCR,EAAKQ,mBACd,OAEOiD,eAAiB,WACtB,OAAOzD,EAAKiD,gBAAgBjD,EAAKqD,4BACnC,OAEOK,gBAAkB,WACvB,OAAO1D,EAAKiD,gBAAgBjD,EAAKwD,6BACnC,EAhOmBhJ,KAAOoF,QAAPA,EACApF,KAAUqF,WAAVA,EACDrF,KAAKsF,MAALA,EACAtF,KAAMuF,OAANA,EAEhBvF,KAAKoF,QAAQR,UAAU,CAAE7B,eAAgBsC,EAAWtC,iBAEpD/C,KAAKqF,WAAWvC,OAAOc,iBAAiB,UAAW,SAAAhB,GACjD4C,EAAKa,QAAQzD,EACf,GACA5C,KAAKqF,WAAWvC,OAAOc,iBAAiB,QAAS,SAAAhB,GAC/C4C,EAAKW,aAAa,gBAClBX,EAAKV,QAAQ,eAAgBlC,EAC/B,GACA5C,KAAKqF,WAAWvC,OAAOc,iBAAiB,QAAS,WAC/C4B,EAAKW,aAAa,gBAClBX,EAAKJ,QAAQP,cACf,GAEA7E,KAAKsF,MAAMxF,QAAQsI,KAAKe,UAAYnJ,KAAKwH,sBACzCxH,KAAKuF,OAAOzF,QAAQsI,KAAKe,UAAYnJ,KAAKgI,uBAC1ChI,KAAKmG,aAAa,YACpB,CAhCC,OAgCAhB,EA3DmBiE,aAAY,SAC9BhE,GAA2C,IAE3C,IAAMiE,EAAWC,EAAA,GACZ3E,EACAS,GAGLiE,EAAYpE,eAAe,CAAEa,OAAQ,eAErC,IAAIR,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAApF,QAAAC,gCAE7BD,QAAAC,QACYT,EAAMM,OA5BE,OA4ByBO,KAAA,SAAA+I,GAAC,OAAhDjE,EAAKiE,EAA2CpJ,QAAAC,QAC7ByC,EAAW5C,OAAOmF,IAAQ5E,KAAAgJ,SAAAA,GAAC,OAA9CnE,EAAUmE,EAAoCrJ,QAAAC,QAC/BmC,EAAOtC,OAAOoF,EAAWnF,aAAWM,KAAA,SAAAiJ,GAEnD,WAAWtE,EAAakE,EAAahE,EAAYC,EAFjDC,EAAMkE,EAE0D,gEAPjChI,GAQhC,SAAQC,GAAO,IAAAgI,EAAAC,EAEM,OADpBN,EAAYpE,eAAe,CAAEa,OAAQ,iBACrC4D,OAAAA,EAAArE,IAAAqE,EAAYzH,QAAQ9B,QAAAC,QACduJ,OADcA,EACdrE,QAAAqE,EAAAA,EAAO1H,SAAOzB,oBAAAoJ,EAAA,OAAAzJ,QAAAC,QACdwJ,OADcA,EACdrE,QAAAqE,EAAAA,EAAQ3H,SAAOzB,gBACrB,MAAMkB,CAAM,EACd,EAAA,GACF,CAAC,MAAAQ,GAAA/B,OAAAA,QAAAgC,OAAAD,EAAA,CAAA,EAAAiD,CAAA,CA5BsB"}
|
@@ -0,0 +1,2 @@
|
|
1
|
+
function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)({}).hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},t.apply(null,arguments)}function e(t){const e=new Uint8Array(t);return window.btoa(String.fromCharCode(...e))}function n(t){const e=window.atob(t),n=e.length,s=new Uint8Array(n);for(let t=0;t<n;t++)s[t]=e.charCodeAt(t);return s.buffer}const s=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(s);class i{static async create(t){let e=null,n=null;try{const s=navigator.mediaDevices.getSupportedConstraints().sampleRate;e=new window.AudioContext(s?{sampleRate:t}:{});const o=e.createAnalyser();s||await e.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await e.audioWorklet.addModule(a),n=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}});const r=e.createMediaStreamSource(n),u=new AudioWorkletNode(e,"raw-audio-processor");return r.connect(o),o.connect(u),new i(e,o,u,n)}catch(t){var s,o;throw null==(s=n)||s.getTracks().forEach(t=>t.stop()),null==(o=e)||o.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=s}async close(){this.inputStream.getTracks().forEach(t=>t.stop()),await this.context.close()}}const o=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(o);class u{static async create(t){let e=null;try{e=new AudioContext({sampleRate:t});const n=e.createAnalyser(),s=e.createGain();s.connect(n),n.connect(e.destination),await e.audioWorklet.addModule(r);const a=new AudioWorkletNode(e,"audio-concat-processor");return a.connect(s),new u(e,n,s,a)}catch(t){var n;throw null==(n=e)||n.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=s}async close(){await this.context.close()}}function c(t){return!!t.type}class l{static async create(t){let e=null;try{var n,s;const a=null!=(n="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN:null)?n:"wss://api.elevenlabs.io",i=null!=(s="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME:null)?s:"/v1/convai/conversation?agent_id=";e=new WebSocket(t.signedUrl?t.signedUrl:a+i+t.agentId);const o=await new Promise((t,n)=>{e.addEventListener("error",n),e.addEventListener("close",n),e.addEventListener("message",e=>{const n=JSON.parse(e.data);c(n)&&("conversation_initiation_metadata"===n.type?t(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),r=o.conversation_id,u=parseInt(o.agent_output_audio_format.replace("pcm_",""));return new l(e,r,u)}catch(t){var a;throw null==(a=e)||a.close(),t}}constructor(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}close(){this.socket.close()}}const h={onConnect:()=>{},onDisconnect:()=>{},onError:()=>{},onDebug:()=>{},onMessage:()=>{},onStatusChange:()=>{},onModeChange:()=>{}};class p{static async startSession(e){const n=t({},h,e);n.onStatusChange({status:"connecting"});let s=null,a=null,o=null;try{return s=await i.create(16e3),a=await l.create(e),o=await u.create(a.sampleRate),new p(n,a,s,o)}catch(t){var r,c,d;throw n.onStatusChange({status:"disconnected"}),null==(r=a)||r.close(),await(null==(c=s)?void 0:c.close()),await(null==(d=o)?void 0:d.close()),t}}constructor(t,s,a,i){var o=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=async function(){"connected"===o.status&&(o.updateStatus("disconnecting"),o.connection.close(),await o.input.close(),await o.output.close(),o.updateStatus("disconnected"))},this.updateMode=t=>{t!==this.mode&&(this.mode=t,this.options.onModeChange({mode:t}))},this.updateStatus=t=>{t!==this.status&&(this.status=t,this.options.onStatusChange({status:t}))},this.onEvent=t=>{try{const e=JSON.parse(t.data);if(!c(e))return;switch(e.type){case"interruption":e.interruption_event&&(this.lastInterruptTimestamp=e.interruption_event.event_id),this.fadeOutAudio();break;case"agent_response":this.options.onMessage({source:"ai",message:e.agent_response_event.agent_response});break;case"user_transcript":this.options.onMessage({source:"user",message:e.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":this.options.onDebug({type:"tentative_agent_response",response:e.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":this.lastInterruptTimestamp<=e.audio_event.event_id&&(this.addAudioBase64Chunk(e.audio_event.audio_base_64),this.updateMode("speaking"));break;case"ping":this.connection.socket.send(JSON.stringify({type:"pong",event_id:e.ping_event.event_id}));break;default:this.options.onDebug(e)}}catch(e){return void this.onError("Failed to parse event data",{event:t})}},this.onInputWorkletMessage=t=>{const n=JSON.stringify({user_audio_chunk:e(t.data[0].buffer)});"connected"===this.status&&this.connection.socket.send(n)},this.onOutputWorkletMessage=({data:t})=>{"process"===t.type&&this.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(t){o.output.gain.gain.value=o.volume,o.output.worklet.port.postMessage({type:"clearInterrupted"}),o.output.worklet.port.postMessage({type:"buffer",buffer:n(t)})},this.fadeOutAudio=async function(){o.updateMode("listening"),o.output.worklet.port.postMessage({type:"interrupt"}),o.output.gain.gain.exponentialRampToValueAtTime(1e-4,o.output.context.currentTime+2),setTimeout(()=>{o.output.gain.gain.value=o.volume,o.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(t,e)=>{console.error(t,e),this.options.onError(t,e)},this.calculateVolume=t=>{if(0===t.length)return 0;let e=0;for(let n=0;n<t.length;n++)e+=t[n]/255;return e/=t.length,e<0?0:e>1?1:e},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:t})=>{this.volume=t},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.options=t,this.connection=s,this.input=a,this.output=i,this.options.onConnect({conversationId:s.conversationId}),this.connection.socket.addEventListener("message",t=>{this.onEvent(t)}),this.connection.socket.addEventListener("error",t=>{this.updateStatus("disconnected"),this.onError("Socket error",t)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}export{p as Conversation};
|
2
|
+
//# sourceMappingURL=lib.modern.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"lib.modern.js","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","export type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n average_ping_ms?: number;\n };\n};\n\n// TODO correction missing\nexport type SocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent;\n\nexport function isValidSocketEvent(event: any): event is SocketEvent {\n return !!event.type;\n}\n","import { ConfigEvent, isValidSocketEvent } from \"./events\";\n\ndeclare const process: {\n env: {\n ELEVENLABS_CONVAI_SERVER_ORIGIN?: string;\n ELEVENLABS_CONVAI_SERVER_PATHNAME?: string;\n };\n};\n\nexport type SessionConfig =\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined };\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN\n : null) ?? WSS_API_ORIGIN;\n\n const pathname =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME\n : null) ?? WSS_API_PATHNAME;\n\n const url = config.signedUrl\n ? config.signedUrl\n : origin + pathname + config.agentId;\n\n socket = new WebSocket(url);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport { isValidSocketEvent, PingEvent } from \"./utils/events\";\n\nexport type { SocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks;\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n onDisconnect: () => void;\n onMessage: (props: { message: string; source: Role }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onError: (message: string, context?: any) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onDebug: () => {},\n onMessage: () => {},\n onStatusChange: () => {},\n onModeChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.socket.send(\n JSON.stringify({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n })\n );\n // parsedEvent.ping_event.average_ping_ms can be used on client side, for\n // example to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n const audioMessage = JSON.stringify({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n if (this.status === \"connected\") {\n this.connection.socket.send(audioMessage);\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["arrayBufferToBase64","b","buffer","Uint8Array","window","btoa","String","fromCharCode","base64ToArrayBuffer","base64","binaryString","atob","len","length","bytes","i","charCodeAt","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","create","sampleRate","context","inputStream","supportsSampleRateConstraint","navigator","mediaDevices","getSupportedConstraints","AudioContext","analyser","createAnalyser","audioWorklet","addModule","getUserMedia","audio","ideal","echoCancellation","source","createMediaStreamSource","worklet","AudioWorkletNode","connect","error","_inputStream","_context","getTracks","forEach","track","stop","close","constructor","this","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","config","socket","_ref","_ref2","origin","process","env","ELEVENLABS_CONVAI_SERVER_ORIGIN","pathname","ELEVENLABS_CONVAI_SERVER_PATHNAME","WebSocket","signedUrl","agentId","conversationConfig","Promise","resolve","reject","addEventListener","message","JSON","parse","data","conversation_initiation_metadata_event","console","warn","once","conversationId","conversation_id","parseInt","agent_output_audio_format","replace","_socket","defaultCallbacks","onConnect","onDisconnect","onError","onDebug","onMessage","onStatusChange","onModeChange","Conversation","startSession","options","fullOptions","_extends","status","input","connection","output","_connection","_input","_output","_this","lastInterruptTimestamp","mode","inputFrequencyData","outputFrequencyData","volume","endSession","async","updateStatus","updateMode","onEvent","parsedEvent","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","audio_event","addAudioBase64Chunk","audio_base_64","send","stringify","ping_event","_unused","onInputWorkletMessage","audioMessage","user_audio_chunk","onOutputWorkletMessage","finished","chunk","value","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","getInputByteFrequencyData","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage"],"mappings":"wNAAM,SAAUA,EAAoBC,GAClC,MAAMC,EAAS,IAAIC,WAAWF,GAG9B,OADmBG,OAAOC,KAAKC,OAAOC,gBAAgBL,GAExD,UAEgBM,EAAoBC,GAClC,MAAMC,EAAeN,OAAOO,KAAKF,GAC3BG,EAAMF,EAAaG,OACnBC,EAAQ,IAAIX,WAAWS,GAC7B,IAAK,IAAIG,EAAI,EAAGA,EAAIH,EAAKG,IACvBD,EAAMC,GAAKL,EAAaM,WAAWD,GAErC,OAAOD,EAAMZ,MACf,CCfA,MAAMe,EAAO,IAAIC,KACf,CACE,i6EA2DF,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,SC3DxCM,EACJ,mBAAaC,CAAOC,GACzB,IAAIC,EAA+B,KAC/BC,EAAkC,KAEtC,IACE,MAAMC,EACJC,UAAUC,aAAaC,0BAA0BN,WAEnDC,EAAU,IAAItB,OAAO4B,aACnBJ,EAA+B,CAAEH,cAAe,CAAE,GAEpD,MAAMQ,EAAWP,EAAQQ,iBACpBN,SACGF,EAAQS,aAAaC,UAhBjC,2GAkBUV,EAAQS,aAAaC,UAAUhB,GAErCO,QAAoBE,UAAUC,aAAaO,aAAa,CACtDC,MAAO,CACLb,WAAY,CAAEc,MAAOd,GACrBe,iBAAkB,CAAED,OAAO,MAI/B,MAAME,EAASf,EAAQgB,wBAAwBf,GACzCgB,EAAU,IAAIC,iBAAiBlB,EAAS,uBAK9C,OAHAe,EAAOI,QAAQZ,GACfA,EAASY,QAAQF,OAENpB,EAAMG,EAASO,EAAUU,EAAShB,EAC/C,CAAE,MAAOmB,GAAOC,IAAAA,EAAAC,EAGd,aAFAD,EAAApB,IAAAoB,EAAaE,YAAYC,QAAQC,GAASA,EAAMC,QACzC,OAAPJ,EAAAtB,IAAAsB,EAASK,QACHP,CACR,CACF,CAEAQ,WAAAA,CACkB5B,EACAO,EACAU,EACAhB,QAHAD,aAAA,EAAA6B,KACAtB,cAAA,EAAAsB,KACAZ,aACAhB,EAAAA,KAAAA,iBAHA,EAAA4B,KAAO7B,QAAPA,EACA6B,KAAQtB,SAARA,EACAsB,KAAOZ,QAAPA,EACAY,KAAW5B,YAAXA,CACf,CAEI,WAAM0B,GACXE,KAAK5B,YAAYsB,YAAYC,QAAQC,GAASA,EAAMC,cAC9CG,KAAK7B,QAAQ2B,OACrB,ECtDF,MAAMpC,EAAO,IAAIC,KACf,CACE,03DA6DF,CAAEC,KAAM,2BAGGqC,EAAuBnC,IAAIC,gBAAgBL,GChE3C,MAAAwC,EACJ,mBAAajC,CAAOC,GACzB,IAAIC,EAA+B,KACnC,IACEA,EAAU,IAAIM,aAAa,CAAEP,eAC7B,MAAMQ,EAAWP,EAAQQ,iBACnBwB,EAAOhC,EAAQiC,aACrBD,EAAKb,QAAQZ,GACbA,EAASY,QAAQnB,EAAQkC,mBACnBlC,EAAQS,aAAaC,UAAUoB,GACrC,MAAMb,EAAU,IAAIC,iBAAiBlB,EAAS,0BAG9C,OAFAiB,EAAQE,QAAQa,OAELD,EAAO/B,EAASO,EAAUyB,EAAMf,EAC7C,CAAE,MAAOG,GAAO,IAAAE,EAEd,MADAA,OAAAA,EAAAtB,IAAAsB,EAASK,QACHP,CACR,CACF,CAEAQ,WAAAA,CACkB5B,EACAO,EACAyB,EACAf,GAHAjB,KAAAA,oBACAO,cAAA,EAAAsB,KACAG,UACAf,EAAAA,KAAAA,eAHAY,KAAO7B,QAAPA,EACA6B,KAAQtB,SAARA,EACAsB,KAAIG,KAAJA,EACAH,KAAOZ,QAAPA,CACf,CAEI,WAAMU,SACDE,KAAC7B,QAAQ2B,OACrB,ECqBc,SAAAQ,EAAmBC,GACjC,QAASA,EAAM3C,IACjB,OCtCa4C,EACJ,mBAAavC,CAAOwC,GACzB,IAAIC,EAA2B,KAE/B,QAAIC,EAAAC,EACF,MAAMC,EAGI,OAHEF,EACU,oBAAZG,QACJA,QAAQC,IAAIC,gCACZ,MAAIL,EAXO,0BAaXM,EAGI,OAHIL,EACQ,oBAAZE,QACJA,QAAQC,IAAIG,kCACZ,MAAIN,EAfS,oCAqBnBF,EAAS,IAAIS,UAJDV,EAAOW,UACfX,EAAOW,UACPP,EAASI,EAAWR,EAAOY,SAG/B,MAAMC,QAA+B,IAAAC,QAEnC,CAACC,EAASC,KACVf,EAAQgB,iBAAiB,QAASD,GAClCf,EAAQgB,iBAAiB,QAASD,GAClCf,EAAQgB,iBACN,UACCnB,IACC,MAAMoB,EAAUC,KAAKC,MAAMtB,EAAMuB,MAE5BxB,EAAmBqB,KAIH,qCAAjBA,EAAQ/D,KACV4D,EAAQG,EAAQI,wCAEhBC,QAAQC,KACN,wDAEJ,EAEF,CAAEC,MAAM,GAAM,GAIZC,EAAiBb,EAAmBc,gBACpClE,EAAamE,SACjBf,EAAmBgB,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAW,IAAA/B,EAAWE,EAAQyB,EAAgBjE,EAChD,CAAE,MAAOqB,OAAOiD,EAEd,MADM,OAANA,EAAA9B,IAAA8B,EAAQ1C,QACFP,CACR,CACF,CAEAQ,WAAAA,CACkBW,EACAyB,EACAjE,GAFAwC,KAAAA,YACAyB,EAAAA,KAAAA,2BACAjE,gBAAA,EAFA8B,KAAMU,OAANA,EACAV,KAAcmC,eAAdA,EACAnC,KAAU9B,WAAVA,CACf,CAEI4B,KAAAA,GACLE,KAAKU,OAAOZ,OACd,ECtDF,MAEM2C,EAA8B,CAClCC,UAAWA,OACXC,aAAcA,OACdC,QAASA,OACTC,QAASA,OACTC,UAAWA,OACXC,eAAgBA,OAChBC,aAAcA,QAGH,MAAAC,EACJ,yBAAaC,CAClBC,GAEA,MAAMC,EAAWC,EAAA,CAAA,EACZZ,EACAU,GAGLC,EAAYL,eAAe,CAAEO,OAAQ,eAErC,IAAIC,EAAsB,KACtBC,EAAgC,KAChCC,EAAwB,KAE5B,IAKE,OAJAF,QAAcvF,EAAMC,OA5BE,MA6BtBuF,QAAmBhD,EAAWvC,OAAOkF,GACrCM,QAAevD,EAAOjC,OAAOuF,EAAWtF,YAEjC,IAAI+E,EAAaG,EAAaI,EAAYD,EAAOE,EAC1D,CAAE,MAAOlE,GAAOmE,IAAAA,EAAAC,EAAAC,EAKd,MAJAR,EAAYL,eAAe,CAAEO,OAAQ,iBAC3B,OAAVI,EAAAF,IAAAE,EAAY5D,cACD,OAAX6D,EAAMJ,QAAK,EAALI,EAAO7D,sBACb8D,EAAMH,UAAAG,EAAQ9D,SACRP,CACR,CACF,CASAQ,WAAAA,CACmBoD,EACAK,EACDD,EACAE,GAAcI,IAAAA,EAHbV,KAAAA,KAAAA,oBACAK,gBAAA,EAAAxD,KACDuD,WAAA,EAAAvD,KACAyD,YAXVK,EAAAA,KAAAA,uBAAiC,OACjCC,KAAa,YAAW/D,KACxBsD,OAAiB,kBACjBU,wBAAkB,EAAAhE,KAClBiE,yBACAC,EAAAA,KAAAA,OAAiB,OA2BlBC,WAAaC,iBACE,cAAhBP,EAAKP,SACTO,EAAKQ,aAAa,iBAElBR,EAAKL,WAAW1D,cACV+D,EAAKN,MAAMzD,cACX+D,EAAKJ,OAAO3D,QAElB+D,EAAKQ,aAAa,gBACpB,EAEQC,KAAAA,WAAcP,IAChBA,IAAS/D,KAAK+D,OAChB/D,KAAK+D,KAAOA,EACZ/D,KAAKmD,QAAQH,aAAa,CAAEe,SAC9B,EACD/D,KAEOqE,aAAgBf,IAClBA,IAAWtD,KAAKsD,SAClBtD,KAAKsD,OAASA,EACdtD,KAAKmD,QAAQJ,eAAe,CAAEO,WAChC,EACDtD,KAEOuE,QAAWhE,IACjB,IACE,MAAMiE,EAAc5C,KAAKC,MAAMtB,EAAMuB,MAErC,IAAKxB,EAAmBkE,GACtB,OAGF,OAAQA,EAAY5G,MAClB,IAAK,eACC4G,EAAYC,qBACdzE,KAAK8D,uBACHU,EAAYC,mBAAmBC,UAEnC1E,KAAK2E,eACL,MAGF,IAAK,iBACH3E,KAAKmD,QAAQL,UAAU,CACrB5D,OAAQ,KACRyC,QAAS6C,EAAYI,qBAAqBC,iBAE5C,MAGF,IAAK,kBACH7E,KAAKmD,QAAQL,UAAU,CACrB5D,OAAQ,OACRyC,QAAS6C,EAAYM,yBAAyBC,kBAEhD,MAGF,IAAK,oCACH/E,KAAKmD,QAAQN,QAAQ,CACnBjF,KAAM,2BACNoH,SACER,EAAYS,wCACTC,2BAEP,MAGF,IAAK,QAEDlF,KAAK8D,wBAA0BU,EAAYW,YAAYT,WAEvD1E,KAAKoF,oBAAoBZ,EAAYW,YAAYE,eACjDrF,KAAKsE,WAAW,aAElB,MAGF,IAAK,OACHtE,KAAKwD,WAAW9C,OAAO4E,KACrB1D,KAAK2D,UAAU,CACb3H,KAAM,OACN8G,SAAWF,EAA0BgB,WAAWd,YAKpD,MAIF,QACE1E,KAAKmD,QAAQN,QAAQ2B,GAI3B,CAAE,MAAAiB,GAEA,YADAzF,KAAK4C,QAAQ,6BAA8B,CAAErC,SAE/C,GAGMmF,KAAAA,sBAAyBnF,IAC/B,MAMMoF,EAAe/D,KAAK2D,UAAU,CAClCK,iBAAkBnJ,EAPI8D,EAAMuB,KAAK,GAOqBnF,UAGpC,cAAhBqD,KAAKsD,QACPtD,KAAKwD,WAAW9C,OAAO4E,KAAKK,EAC9B,OAIME,uBAAyB,EAAG/D,WAChB,YAAdA,EAAKlE,MACPoC,KAAKsE,WAAWxC,EAAKgE,SAAW,YAAc,WAChD,OAGMV,oBAAsBhB,eAAO2B,GACnClC,EAAKJ,OAAOtD,KAAKA,KAAK6F,MAAQnC,EAAKK,OACnCL,EAAKJ,OAAOrE,QAAQ6G,KAAKC,YAAY,CAAEtI,KAAM,qBAC7CiG,EAAKJ,OAAOrE,QAAQ6G,KAAKC,YAAY,CACnCtI,KAAM,SACNjB,OAAQM,EAAoB8I,IAEhC,OAEQpB,aAAeP,iBAErBP,EAAKS,WAAW,aAChBT,EAAKJ,OAAOrE,QAAQ6G,KAAKC,YAAY,CAAEtI,KAAM,cAC7CiG,EAAKJ,OAAOtD,KAAKA,KAAKgG,6BACpB,KACAtC,EAAKJ,OAAOtF,QAAQiI,YAAc,GAIpCC,WAAW,KACTxC,EAAKJ,OAAOtD,KAAKA,KAAK6F,MAAQnC,EAAKK,OACnCL,EAAKJ,OAAOrE,QAAQ6G,KAAKC,YAAY,CAAEtI,KAAM,sBAC5C,IACL,EAACoC,KAEO4C,QAAU,CAACjB,EAAiBxD,KAClC6D,QAAQzC,MAAMoC,EAASxD,GACvB6B,KAAKmD,QAAQP,QAAQjB,EAASxD,EAChC,EAEQmI,KAAAA,gBAAmBC,IACzB,GAA6B,IAAzBA,EAAcjJ,OAChB,SAKF,IAAI4G,EAAS,EACb,IAAK,IAAI1G,EAAI,EAAGA,EAAI+I,EAAcjJ,OAAQE,IACxC0G,GAAUqC,EAAc/I,GAAK,IAI/B,OAFA0G,GAAUqC,EAAcjJ,OAEjB4G,EAAS,EAAI,EAAIA,EAAS,EAAI,EAAIA,GAC1ClE,KAEMwG,MAAQ,IAAMxG,KAAKwD,WAAWrB,eAE9BsE,KAAAA,UAAY,EAAGvC,aACpBlE,KAAKkE,OAASA,CAChB,EAAClE,KAEM0G,0BAA4B,KACV,MAAnB1G,KAACgE,qBAALhE,KAAKgE,mBAAuB,IAAIpH,WAC9BoD,KAAKuD,MAAM7E,SAASiI,oBAEtB3G,KAAKuD,MAAM7E,SAASkI,qBAAqB5G,KAAKgE,oBACnChE,KAACgE,yBAGP6C,2BAA6B,KACV,MAAxB7G,KAAKiE,sBAALjE,KAAKiE,oBAAwB,IAAIrH,WAC/BoD,KAAKyD,OAAO/E,SAASiI,oBAEvB3G,KAAKyD,OAAO/E,SAASkI,qBAAqB5G,KAAKiE,0BACnCA,qBAGP6C,KAAAA,eAAiB,IACf9G,KAAKsG,gBAAgBtG,KAAK0G,kCAG5BK,gBAAkB,SACXT,gBAAgBtG,KAAK6G,8BA/NhB7G,KAAOmD,QAAPA,EACAnD,KAAUwD,WAAVA,EACDxD,KAAKuD,MAALA,EACAvD,KAAMyD,OAANA,EAEhBzD,KAAKmD,QAAQT,UAAU,CAAEP,eAAgBqB,EAAWrB,iBAEpDnC,KAAKwD,WAAW9C,OAAOgB,iBAAiB,UAAWnB,IACjDP,KAAKuE,QAAQhE,EAAK,GAEpBP,KAAKwD,WAAW9C,OAAOgB,iBAAiB,QAASnB,IAC/CP,KAAKqE,aAAa,gBAClBrE,KAAK4C,QAAQ,eAAgBrC,EAC/B,GACAP,KAAKwD,WAAW9C,OAAOgB,iBAAiB,QAAS,KAC/C1B,KAAKqE,aAAa,gBAClBrE,KAAKmD,QAAQR,iBAGf3C,KAAKuD,MAAMnE,QAAQ6G,KAAKe,UAAYhH,KAAK0F,sBACzC1F,KAAKyD,OAAOrE,QAAQ6G,KAAKe,UAAYhH,KAAK6F,uBAC1C7F,KAAKqE,aAAa,YACpB"}
|
@@ -0,0 +1,2 @@
|
|
1
|
+
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)({}).hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},e.apply(null,arguments)}function t(e){for(var t=window.atob(e),n=t.length,r=new Uint8Array(n),o=0;o<n;o++)r[o]=t.charCodeAt(o);return r.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(n),o=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=r}return e.create=function(t){try{var n=null,o=null;return Promise.resolve(function(s,i){try{var a=function(){function s(){return Promise.resolve(n.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var r=n.createMediaStreamSource(o=t),s=new AudioWorkletNode(n,"raw-audio-processor");return r.connect(a),a.connect(s),new e(n,a,s,o)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!i)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(s):s()}()}catch(e){return i(e)}return a&&a.then?a.then(void 0,i):a}(0,function(e){var t,r;throw null==(t=o)||t.getTracks().forEach(function(e){return e.stop()}),null==(r=n)||r.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),s=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),i=URL.createObjectURL(s),a=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=r}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(a=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(i)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,a,u,t)}))}catch(e){return o(e)}var a,u;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(c=null!=(i="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN:null)?i:"wss://api.elevenlabs.io",l=null!=(a="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME:null)?a:"/v1/convai/conversation?agent_id=",n=new WebSocket(t.signedUrl?t.signedUrl:c+l+t.agentId),Promise.resolve(new Promise(function(e,t){n.addEventListener("error",t),n.addEventListener("close",t),n.addEventListener("message",function(t){var n=JSON.parse(t.data);u(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var r=t.conversation_id,o=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,r,o)}))}catch(e){return o(e)}var i,a,c,l;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){this.socket.close()},e}(),l={onConnect:function(){},onDisconnect:function(){},onError:function(){},onDebug:function(){},onMessage:function(){},onStatusChange:function(){},onModeChange:function(){}},h=/*#__PURE__*/function(){function n(e,n,r,o){var s=this,i=this,a=this,c=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==i.status?Promise.resolve():(i.updateStatus("disconnecting"),i.connection.close(),Promise.resolve(i.input.close()).then(function(){return Promise.resolve(i.output.close()).then(function(){i.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==s.mode&&(s.mode=e,s.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==s.status&&(s.status=e,s.options.onStatusChange({status:e}))},this.onEvent=function(e){try{var t=JSON.parse(e.data);if(!u(t))return;switch(t.type){case"interruption":t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio();break;case"agent_response":s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response});break;case"user_transcript":s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.updateMode("speaking"));break;case"ping":s.connection.socket.send(JSON.stringify({type:"pong",event_id:t.ping_event.event_id}));break;default:s.options.onDebug(t)}}catch(t){return void s.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=function(e){var t,n,r=JSON.stringify({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))});"connected"===s.status&&s.connection.socket.send(r)},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&s.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return a.output.gain.gain.value=a.volume,a.output.worklet.port.postMessage({type:"clearInterrupted"}),a.output.worklet.port.postMessage({type:"buffer",buffer:t(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return c.updateMode("listening"),c.output.worklet.port.postMessage({type:"interrupt"}),c.output.gain.gain.exponentialRampToValueAtTime(1e-4,c.output.context.currentTime+2),setTimeout(function(){c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),s.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return s.connection.conversationId},this.setVolume=function(e){s.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=s.inputFrequencyData||(s.inputFrequencyData=new Uint8Array(s.input.analyser.frequencyBinCount)),s.input.analyser.getByteFrequencyData(s.inputFrequencyData),s.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=s.outputFrequencyData||(s.outputFrequencyData=new Uint8Array(s.output.analyser.frequencyBinCount)),s.output.analyser.getByteFrequencyData(s.outputFrequencyData),s.outputFrequencyData},this.getInputVolume=function(){return s.calculateVolume(s.getInputByteFrequencyData())},this.getOutputVolume=function(){return s.calculateVolume(s.getOutputByteFrequencyData())},this.options=e,this.connection=n,this.input=r,this.output=o,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(e){s.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){s.updateStatus("disconnected"),s.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){s.updateStatus("disconnected"),s.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(t){try{var r=e({},l,t);r.onStatusChange({status:"connecting"});var s=null,i=null,u=null;return Promise.resolve(function(e,l){try{var h=Promise.resolve(o.create(16e3)).then(function(e){return s=e,Promise.resolve(c.create(t)).then(function(e){return i=e,Promise.resolve(a.create(i.sampleRate)).then(function(e){return new n(r,i,s,u=e)})})})}catch(e){return l(e)}return h&&h.then?h.then(void 0,l):h}(0,function(e){var t,n;return r.onStatusChange({status:"disconnected"}),null==(t=i)||t.close(),Promise.resolve(null==(n=s)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=u)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},n}();export{h as Conversation};
|
2
|
+
//# sourceMappingURL=lib.module.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"lib.module.js","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","export type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n average_ping_ms?: number;\n };\n};\n\n// TODO correction missing\nexport type SocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent;\n\nexport function isValidSocketEvent(event: any): event is SocketEvent {\n return !!event.type;\n}\n","import { ConfigEvent, isValidSocketEvent } from \"./events\";\n\ndeclare const process: {\n env: {\n ELEVENLABS_CONVAI_SERVER_ORIGIN?: string;\n ELEVENLABS_CONVAI_SERVER_PATHNAME?: string;\n };\n};\n\nexport type SessionConfig =\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined };\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN\n : null) ?? WSS_API_ORIGIN;\n\n const pathname =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME\n : null) ?? WSS_API_PATHNAME;\n\n const url = config.signedUrl\n ? config.signedUrl\n : origin + pathname + config.agentId;\n\n socket = new WebSocket(url);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport { isValidSocketEvent, PingEvent } from \"./utils/events\";\n\nexport type { SocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks;\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n onDisconnect: () => void;\n onMessage: (props: { message: string; source: Role }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onError: (message: string, context?: any) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onDebug: () => {},\n onMessage: () => {},\n onStatusChange: () => {},\n onModeChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.socket.send(\n JSON.stringify({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n })\n );\n // parsedEvent.ping_event.average_ping_ms can be used on client side, for\n // example to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n const audioMessage = JSON.stringify({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n if (this.status === \"connected\") {\n this.connection.socket.send(audioMessage);\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","sampleRate","Promise","resolve","_temp2","audioWorklet","addModule","then","navigator","mediaDevices","getUserMedia","audio","ideal","echoCancellation","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","_catch","error","_inputStream","_context","getTracks","forEach","track","stop","close","e","reject","_proto","prototype","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","socket","conversationId","config","origin","_ref","process","env","ELEVENLABS_CONVAI_SERVER_ORIGIN","pathname","_ref2","ELEVENLABS_CONVAI_SERVER_PATHNAME","WebSocket","signedUrl","agentId","addEventListener","message","JSON","parse","data","conversation_initiation_metadata_event","console","warn","once","conversationConfig","conversation_id","parseInt","agent_output_audio_format","replace","_socket","defaultCallbacks","onConnect","onDisconnect","onError","onDebug","onMessage","onStatusChange","onModeChange","Conversation","options","connection","input","output","_this2","_this","_this3","_this4","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","endSession","updateStatus","updateMode","onEvent","parsedEvent","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","audio_event","addAudioBase64Chunk","audio_base_64","send","stringify","ping_event","_unused","onInputWorkletMessage","b","audioMessage","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","finished","chunk","value","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","getInputByteFrequencyData","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage","startSession","fullOptions","_extends","_Input$create","_Connection$create","_Output$create","_connection","_input","_output"],"mappings":"wNAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCfA,IAAMC,EAAO,IAAIC,KACf,CA2DC,i6EACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC3DxCM,eAuCX,WAAA,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAHAH,KAAAA,aACAC,EAAAA,KAAAA,cACAC,EAAAA,KAAAA,oBACAC,iBAAA,EAHAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,CAKF,OALGJ,EA3CgBM,OAAA,SAAOC,GAAkB,IAC3C,IAAIN,EAA+B,KAC/BG,EAAkC,KAAK,OAAAI,QAAAC,gCAEvC,WAAA,SAAAC,IAAAF,OAAAA,QAAAC,QAWIR,EAAQU,aAAaC,UAAUf,IAAkBgB,KAAA,WAAA,OAAAL,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAO,CACLV,WAAY,CAAEW,MAAOX,GACrBY,iBAAkB,CAAED,OAAO,OAE7BL,KAAA,SAAAO,GAEF,IAAMC,EAASpB,EAAQqB,wBAPvBlB,EAAWgB,GAQLjB,EAAU,IAAIoB,iBAAiBtB,EAAS,uBAK9C,OAHAoB,EAAOG,QAAQtB,GACfA,EAASsB,QAAQrB,GAEN,IAAAH,EAAMC,EAASC,EAAUC,EAASC,EAAa,EAzB1D,EAAA,CAAA,IAAMqB,EACJX,UAAUC,aAAaW,0BAA0BnB,WAK7CL,GAHND,EAAU,IAAIhB,OAAO0C,aACnBF,EAA+B,CAAElB,WAAAA,GAAe,CAAE,IAE3BqB,iBAAiBC,EACtC,WAAA,IAACJ,EAA4BjB,OAAAA,QAAAC,QACzBR,EAAQU,aAAaC,UAhBjC,sGAgB4DC,KAAAgB,WAAAA,EAAAA,CADpD,GACoDA,OAAAA,GAAAA,EAAAhB,KAAAgB,EAAAhB,KAAAH,GAAAA,GAkB1D,6DA7B2CoB,CAEvC,EA2BKC,SAAAA,GAAO,IAAAC,EAAAC,EAGd,MAFW,OAAXD,EAAA5B,IAAA4B,EAAaE,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GACtDJ,OAAAA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,GAAA,OAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAzC,EAAA0C,UASYJ,iBAAK,IAC4C,OAA5DjC,KAAKD,YAAY8B,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GAAE7B,QAAAC,QAA5DJ,KACWJ,QAAQqC,SAAOzB,KAAA,WAAA,EAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAvC,CAAA,CAVD,GC5CIN,EAAO,IAAIC,KACf,CA6DC,03DACD,CAAEC,KAAM,2BAGG+C,EAAuB7C,IAAIC,gBAAgBL,GChE3CkD,eAoBX,WAAA,SAAAA,EACkB3C,EACAC,EACA2C,EACA1C,GAAyBE,KAHzBJ,aACAC,EAAAA,KAAAA,cACA2C,EAAAA,KAAAA,UACA1C,EAAAA,KAAAA,aAHA,EAAAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIwC,KAAJA,EACAxC,KAAOF,QAAPA,CACf,QAACyC,EAxBgBtC,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAAK,OAAAO,QAAAC,iCAGhCP,GADND,EAAU,IAAI0B,aAAa,CAAEpB,WAAAA,KACJqB,kBACnBiB,EAAO5C,EAAQ6C,cAChBtB,QAAQtB,GACbA,EAASsB,QAAQvB,EAAQ8C,aAAavC,QAAAC,QAChCR,EAAQU,aAAaC,UAAU+B,IAAqB9B,KAC1D,WAAA,IAAMV,EAAU,IAAIoB,iBAAiBtB,EAAS,0BAG9C,OAFAE,EAAQqB,QAAQqB,GAET,IAAID,EAAO3C,EAASC,EAAU2C,EAAM1C,EAAS,yBAXd,IAGhCD,EACA2C,sCAJgCf,CAAA,EAY/BC,SAAAA,GAAO,IAAAE,EAEd,MADO,OAAPA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAG,EAAAF,UASYJ,MAAK,WAAA,IACN9B,OAAAA,QAAAC,QAAJJ,KAAKJ,QAAQqC,SAAOzB,kBAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,KAAAK,CAAA,CATD,GC8Bc,SAAAI,EAAmBC,GACjC,QAASA,EAAMrD,IACjB,CCzCA,IAGasD,eAAU,WA0DrB,SAAAA,EACkBC,EACAC,EACA7C,QAFA4C,YAAA,EAAA9C,KACA+C,oBAAA,EAAA/C,KACAE,gBAAA,EAFAF,KAAM8C,OAANA,EACA9C,KAAc+C,eAAdA,EACA/C,KAAUE,WAAVA,CACf,CAIF,OAJG2C,EA7DgB5C,OAAM,SAAC+C,OACzB,IAAIF,EAA2B,KAAK,OAAA3C,QAAAC,iCAG5B6C,SAAMC,EACU,oBAAZC,QACJA,QAAQC,IAAIC,gCACZ,MAAIH,EAXO,0BAaXI,EAGIC,OAHIA,EACQ,oBAAZJ,QACJA,QAAQC,IAAII,kCACZ,MAAID,EAfS,oCAqBnBT,EAAS,IAAIW,UAJDT,EAAOU,UACfV,EAAOU,UACPT,EAASK,EAAWN,EAAOW,SAEHxD,QAAAC,QACK,IAAID,QAEnC,SAACC,EAAS+B,GACVW,EAAQc,iBAAiB,QAASzB,GAClCW,EAAQc,iBAAiB,QAASzB,GAClCW,EAAQc,iBACN,UACA,SAAChB,GACC,IAAMiB,EAAUC,KAAKC,MAAMnB,EAAMoB,MAE5BrB,EAAmBkB,KAIH,qCAAjBA,EAAQtE,KACVa,EAAQyD,EAAQI,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEC,MAAM,GAEZ,IAAE5D,KAxBI6D,SAAAA,GA0BN,IAAMtB,EAAiBsB,EAAmBC,gBACpCpE,EAAaqE,SACjBF,EAAmBG,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAO,IAAI5B,EAAWC,EAAQC,EAAgB7C,EAAY,yBA/CxD,IAAAgD,EAAAK,EACIN,EAKAK,sCAR4B7B,CAEhC,EAgDKC,SAAAA,GAAO,IAAAgD,EAEd,MADAA,OAAAA,EAAA5B,IAAA4B,EAAQzC,QACFP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAW,EAAAR,UAQMJ,MAAA,WACLjC,KAAK8C,OAAOb,OACd,EAACY,CAAA,CAlEoB,GCcjB8B,EAA8B,CAClCC,UAAW,WAAQ,EACnBC,aAAc,WAAK,EACnBC,QAAS,WAAK,EACdC,QAAS,WAAQ,EACjBC,UAAW,aACXC,eAAgB,WAAK,EACrBC,aAAc,cAGHC,eAAY,WAqCvB,SAAAA,EACmBC,EACAC,EACDC,EACAC,GAAcC,IAAAA,OAAAC,EAsB1BzF,KAAI0F,EA8HR1F,KAAI2F,EAUJ3F,KAAIA,KAjKaoF,aACAC,EAAAA,KAAAA,uBACDC,WAAA,EAAAtF,KACAuF,YAXVK,EAAAA,KAAAA,uBAAiC,EAAC5F,KAClC6F,KAAa,iBACbC,OAAiB,aACjBC,KAAAA,+BACAC,yBAAmB,EAAAhG,KACnBiG,OAAiB,OA2BlBC,WAAU,WAAA,IACf,MAAoB,cAAhBT,EAAKK,OAAwB3F,QAAAC,WACjCqF,EAAKU,aAAa,iBAElBV,EAAKJ,WAAWpD,QAAQ9B,QAAAC,QAClBqF,EAAKH,MAAMrD,SAAOzB,uBAAAL,QAAAC,QAClBqF,EAAKF,OAAOtD,SAAOzB,KAAA,WAEzBiF,EAAKU,aAAa,eAAgB,EACpC,GAAA,CAAC,MAAAjE,UAAA/B,QAAAgC,OAAAD,UAEOkE,WAAa,SAACP,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKJ,QAAQF,aAAa,CAAEW,KAAAA,IAEhC,EAAC7F,KAEOmG,aAAe,SAACL,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKJ,QAAQH,eAAe,CAAEa,OAAAA,IAElC,EAEQO,KAAAA,QAAU,SAACzD,GACjB,IACE,IAAM0D,EAAcxC,KAAKC,MAAMnB,EAAMoB,MAErC,IAAKrB,EAAmB2D,GACtB,OAGF,OAAQA,EAAY/G,MAClB,IAAK,eACC+G,EAAYC,qBACdf,EAAKI,uBACHU,EAAYC,mBAAmBC,UAEnChB,EAAKiB,eACL,MAGF,IAAK,iBACHjB,EAAKJ,QAAQJ,UAAU,CACrBhE,OAAQ,KACR6C,QAASyC,EAAYI,qBAAqBC,iBAE5C,MAGF,IAAK,kBACHnB,EAAKJ,QAAQJ,UAAU,CACrBhE,OAAQ,OACR6C,QAASyC,EAAYM,yBAAyBC,kBAEhD,MAGF,IAAK,oCACHrB,EAAKJ,QAAQL,QAAQ,CACnBxF,KAAM,2BACNuH,SACER,EAAYS,wCACTC,2BAEP,MAGF,IAAK,QAEDxB,EAAKI,wBAA0BU,EAAYW,YAAYT,WAEvDhB,EAAK0B,oBAAoBZ,EAAYW,YAAYE,eACjD3B,EAAKY,WAAW,aAElB,MAGF,IAAK,OACHZ,EAAKH,WAAWvC,OAAOsE,KACrBtD,KAAKuD,UAAU,CACb9H,KAAM,OACNiH,SAAWF,EAA0BgB,WAAWd,YAKpD,MAIF,QACEhB,EAAKJ,QAAQL,QAAQuB,GAI3B,CAAE,MAAAiB,GAEA,YADA/B,EAAKV,QAAQ,6BAA8B,CAAElC,MAAAA,GAE/C,CACF,EAEQ4E,KAAAA,sBAAwB,SAAC5E,GAC/B,IP9MgC6E,EAC5BrI,EOmNEsI,EAAe5D,KAAKuD,UAAU,CAClCM,kBPrN8BF,EO8MR7E,EAAMoB,KAAK,GAOqB5E,OPpNpDA,EAAS,IAAIH,WAAWwI,GAEX7I,OAAOgJ,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuBzI,OOqNhC,cAAhBoG,EAAKM,QACPN,EAAKH,WAAWvC,OAAOsE,KAAKM,EAGhC,OAEQM,uBAAyB,SAAA9E,OAAGc,EAAId,EAAJc,KAChB,YAAdA,EAAKzE,MACPiG,EAAKY,WAAWpC,EAAKiE,SAAW,YAAc,WAElD,OAEQf,oBAAmB,SAAUgB,GAAiB,IAMjD,OALHxC,EAAKH,OAAO/C,KAAKA,KAAK2F,MAAQzC,EAAKO,OACnCP,EAAKH,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,qBAC7CmG,EAAKH,OAAOzF,QAAQsI,KAAKC,YAAY,CACnC9I,KAAM,SACNH,OAAQX,EAAoByJ,KAC3B/H,QAAAC,SACL,CAAC,MAAA8B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAlC,KAEOyG,aAA0B,WAAA,IAavB,OAXTd,EAAKS,WAAW,aAChBT,EAAKJ,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,cAC7CoG,EAAKJ,OAAO/C,KAAKA,KAAK8F,6BACpB,KACA3C,EAAKJ,OAAO3F,QAAQ2I,YAAc,GAIpCC,WAAW,WACT7C,EAAKJ,OAAO/C,KAAKA,KAAK2F,MAAQxC,EAAKM,OACnCN,EAAKJ,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,oBAC/C,EAAG,KAAMY,QAAAC,SACX,CAAC,MAAA8B,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEO4C,CAAAA,EAAAA,KAAAA,QAAU,SAACjB,EAAiBjE,GAClCsE,QAAQxC,MAAMmC,EAASjE,GACvB4F,EAAKJ,QAAQN,QAAQjB,EAASjE,EAChC,OAEQ6I,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAc3J,OAChB,OACF,EAKA,IADA,IAAIkH,EAAS,EACJ/G,EAAI,EAAGA,EAAIwJ,EAAc3J,OAAQG,IACxC+G,GAAUyC,EAAcxJ,GAAK,IAI/B,OAFA+G,GAAUyC,EAAc3J,QAER,EAAI,EAAIkH,EAAS,EAAI,EAAIA,CAC3C,EAACjG,KAEM2I,MAAQ,WAAA,OAAMnD,EAAKH,WAAWtC,cAAc,OAE5C6F,UAAY,SAAArF,GACjBiC,EAAKS,OADqB1C,EAAN0C,MAEtB,EAACjG,KAEM6I,0BAA4B,WAKjC,aAJArD,EAAKO,qBAALP,EAAKO,mBAAuB,IAAI9G,WAC9BuG,EAAKF,MAAMzF,SAASiJ,oBAEtBtD,EAAKF,MAAMzF,SAASkJ,qBAAqBvD,EAAKO,oBACvCP,EAAKO,kBACd,EAAC/F,KAEMgJ,2BAA6B,WAKlC,aAJAxD,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAI/G,WAC/BuG,EAAKD,OAAO1F,SAASiJ,oBAEvBtD,EAAKD,OAAO1F,SAASkJ,qBAAqBvD,EAAKQ,qBACxCR,EAAKQ,mBACd,OAEOiD,eAAiB,WACtB,OAAOzD,EAAKiD,gBAAgBjD,EAAKqD,4BACnC,OAEOK,gBAAkB,WACvB,OAAO1D,EAAKiD,gBAAgBjD,EAAKwD,6BACnC,EAhOmBhJ,KAAOoF,QAAPA,EACApF,KAAUqF,WAAVA,EACDrF,KAAKsF,MAALA,EACAtF,KAAMuF,OAANA,EAEhBvF,KAAKoF,QAAQR,UAAU,CAAE7B,eAAgBsC,EAAWtC,iBAEpD/C,KAAKqF,WAAWvC,OAAOc,iBAAiB,UAAW,SAAAhB,GACjD4C,EAAKa,QAAQzD,EACf,GACA5C,KAAKqF,WAAWvC,OAAOc,iBAAiB,QAAS,SAAAhB,GAC/C4C,EAAKW,aAAa,gBAClBX,EAAKV,QAAQ,eAAgBlC,EAC/B,GACA5C,KAAKqF,WAAWvC,OAAOc,iBAAiB,QAAS,WAC/C4B,EAAKW,aAAa,gBAClBX,EAAKJ,QAAQP,cACf,GAEA7E,KAAKsF,MAAMxF,QAAQsI,KAAKe,UAAYnJ,KAAKwH,sBACzCxH,KAAKuF,OAAOzF,QAAQsI,KAAKe,UAAYnJ,KAAKgI,uBAC1ChI,KAAKmG,aAAa,YACpB,CAhCC,OAgCAhB,EA3DmBiE,aAAY,SAC9BhE,GAA2C,IAE3C,IAAMiE,EAAWC,EAAA,GACZ3E,EACAS,GAGLiE,EAAYpE,eAAe,CAAEa,OAAQ,eAErC,IAAIR,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAApF,QAAAC,gCAE7BD,QAAAC,QACYT,EAAMM,OA5BE,OA4ByBO,KAAA,SAAA+I,GAAC,OAAhDjE,EAAKiE,EAA2CpJ,QAAAC,QAC7ByC,EAAW5C,OAAOmF,IAAQ5E,KAAAgJ,SAAAA,GAAC,OAA9CnE,EAAUmE,EAAoCrJ,QAAAC,QAC/BmC,EAAOtC,OAAOoF,EAAWnF,aAAWM,KAAA,SAAAiJ,GAEnD,WAAWtE,EAAakE,EAAahE,EAAYC,EAFjDC,EAAMkE,EAE0D,gEAPjChI,GAQhC,SAAQC,GAAO,IAAAgI,EAAAC,EAEM,OADpBN,EAAYpE,eAAe,CAAEa,OAAQ,iBACrC4D,OAAAA,EAAArE,IAAAqE,EAAYzH,QAAQ9B,QAAAC,QACduJ,OADcA,EACdrE,QAAAqE,EAAAA,EAAO1H,SAAOzB,oBAAAoJ,EAAA,OAAAzJ,QAAAC,QACdwJ,OADcA,EACdrE,QAAAqE,EAAAA,EAAQ3H,SAAOzB,gBACrB,MAAMkB,CAAM,EACd,EAAA,GACF,CAAC,MAAAQ,GAAA/B,OAAAA,QAAAgC,OAAAD,EAAA,CAAA,EAAAiD,CAAA,CA5BsB"}
|
package/dist/lib.umd.js
ADDED
@@ -0,0 +1,2 @@
|
|
1
|
+
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e||self).client={})}(this,function(e){function t(){return t=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)({}).hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},t.apply(null,arguments)}function n(e){for(var t=window.atob(e),n=t.length,r=new Uint8Array(n),o=0;o<n;o++)r[o]=t.charCodeAt(o);return r.buffer}var r=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(r),s=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=r}return e.create=function(t){try{var n=null,r=null;return Promise.resolve(function(s,i){try{var a=function(){function s(){return Promise.resolve(n.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var o=n.createMediaStreamSource(r=t),s=new AudioWorkletNode(n,"raw-audio-processor");return o.connect(a),a.connect(s),new e(n,a,s,r)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!i)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(s):s()}()}catch(e){return i(e)}return a&&a.then?a.then(void 0,i):a}(0,function(e){var t,o;throw null==(t=r)||t.getTracks().forEach(function(e){return e.stop()}),null==(o=n)||o.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(i),u=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=r}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(i=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(i),i.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(a)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,i,u,t)}))}catch(e){return o(e)}var i,u;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function c(e){return!!e.type}var l=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(u=null!=(i="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN:null)?i:"wss://api.elevenlabs.io",l=null!=(a="undefined"!=typeof process?process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME:null)?a:"/v1/convai/conversation?agent_id=",n=new WebSocket(t.signedUrl?t.signedUrl:u+l+t.agentId),Promise.resolve(new Promise(function(e,t){n.addEventListener("error",t),n.addEventListener("close",t),n.addEventListener("message",function(t){var n=JSON.parse(t.data);c(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var r=t.conversation_id,o=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,r,o)}))}catch(e){return o(e)}var i,a,u,l;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){this.socket.close()},e}(),h={onConnect:function(){},onDisconnect:function(){},onError:function(){},onDebug:function(){},onMessage:function(){},onStatusChange:function(){},onModeChange:function(){}};e.Conversation=/*#__PURE__*/function(){function e(e,t,r,o){var s=this,i=this,a=this,u=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==i.status?Promise.resolve():(i.updateStatus("disconnecting"),i.connection.close(),Promise.resolve(i.input.close()).then(function(){return Promise.resolve(i.output.close()).then(function(){i.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==s.mode&&(s.mode=e,s.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==s.status&&(s.status=e,s.options.onStatusChange({status:e}))},this.onEvent=function(e){try{var t=JSON.parse(e.data);if(!c(t))return;switch(t.type){case"interruption":t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio();break;case"agent_response":s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response});break;case"user_transcript":s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.updateMode("speaking"));break;case"ping":s.connection.socket.send(JSON.stringify({type:"pong",event_id:t.ping_event.event_id}));break;default:s.options.onDebug(t)}}catch(t){return void s.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=function(e){var t,n,r=JSON.stringify({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))});"connected"===s.status&&s.connection.socket.send(r)},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&s.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return a.output.gain.gain.value=a.volume,a.output.worklet.port.postMessage({type:"clearInterrupted"}),a.output.worklet.port.postMessage({type:"buffer",buffer:n(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return u.updateMode("listening"),u.output.worklet.port.postMessage({type:"interrupt"}),u.output.gain.gain.exponentialRampToValueAtTime(1e-4,u.output.context.currentTime+2),setTimeout(function(){u.output.gain.gain.value=u.volume,u.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),s.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return s.connection.conversationId},this.setVolume=function(e){s.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=s.inputFrequencyData||(s.inputFrequencyData=new Uint8Array(s.input.analyser.frequencyBinCount)),s.input.analyser.getByteFrequencyData(s.inputFrequencyData),s.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=s.outputFrequencyData||(s.outputFrequencyData=new Uint8Array(s.output.analyser.frequencyBinCount)),s.output.analyser.getByteFrequencyData(s.outputFrequencyData),s.outputFrequencyData},this.getInputVolume=function(){return s.calculateVolume(s.getInputByteFrequencyData())},this.getOutputVolume=function(){return s.calculateVolume(s.getOutputByteFrequencyData())},this.options=e,this.connection=t,this.input=r,this.output=o,this.options.onConnect({conversationId:t.conversationId}),this.connection.socket.addEventListener("message",function(e){s.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){s.updateStatus("disconnected"),s.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){s.updateStatus("disconnected"),s.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return e.startSession=function(n){try{var r=t({},h,n);r.onStatusChange({status:"connecting"});var o=null,i=null,a=null;return Promise.resolve(function(t,c){try{var h=Promise.resolve(s.create(16e3)).then(function(t){return o=t,Promise.resolve(l.create(n)).then(function(t){return i=t,Promise.resolve(u.create(i.sampleRate)).then(function(t){return new e(r,i,o,a=t)})})})}catch(e){return c(e)}return h&&h.then?h.then(void 0,c):h}(0,function(e){var t,n;return r.onStatusChange({status:"disconnected"}),null==(t=i)||t.close(),Promise.resolve(null==(n=o)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=a)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},e}()});
|
2
|
+
//# sourceMappingURL=lib.umd.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"lib.umd.js","sources":["../src/utils/audio.ts","../src/utils/rawAudioProcessor.ts","../src/utils/input.ts","../src/utils/audioConcatProcessor.ts","../src/utils/output.ts","../src/utils/events.ts","../src/utils/connection.ts","../src/index.ts"],"sourcesContent":["export function arrayBufferToBase64(b: ArrayBufferLike) {\n const buffer = new Uint8Array(b);\n // @ts-ignore\n const base64Data = window.btoa(String.fromCharCode(...buffer));\n return base64Data;\n}\n\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binaryString = window.atob(base64);\n const len = binaryString.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes.buffer;\n}\n","const blob = new Blob(\n [\n `\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel's data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor(\"raw-audio-processor\", RawAudioProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const rawAudioProcessor = URL.createObjectURL(blob);\n","import { rawAudioProcessor } from \"./rawAudioProcessor\";\n\nconst LIBSAMPLERATE_JS =\n \"https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js\";\n\nexport class Input {\n public static async create(sampleRate: number): Promise<Input> {\n let context: AudioContext | null = null;\n let inputStream: MediaStream | null = null;\n\n try {\n const supportsSampleRateConstraint =\n navigator.mediaDevices.getSupportedConstraints().sampleRate;\n\n context = new window.AudioContext(\n supportsSampleRateConstraint ? { sampleRate } : {}\n );\n const analyser = context.createAnalyser();\n if (!supportsSampleRateConstraint) {\n await context.audioWorklet.addModule(LIBSAMPLERATE_JS);\n }\n await context.audioWorklet.addModule(rawAudioProcessor);\n\n inputStream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: sampleRate },\n echoCancellation: { ideal: true },\n },\n });\n\n const source = context.createMediaStreamSource(inputStream);\n const worklet = new AudioWorkletNode(context, \"raw-audio-processor\");\n\n source.connect(analyser);\n analyser.connect(worklet);\n\n return new Input(context, analyser, worklet, inputStream);\n } catch (error) {\n inputStream?.getTracks().forEach(track => track.stop());\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly worklet: AudioWorkletNode,\n public readonly inputStream: MediaStream\n ) {}\n\n public async close() {\n this.inputStream.getTracks().forEach(track => track.stop());\n await this.context.close();\n }\n}\n","const blob = new Blob(\n [\n `\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case \"buffer\":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case \"interrupt\":\n this.wasInterrupted = true;\n break;\n case \"clearInterrupted\":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: \"process\", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor(\"audio-concat-processor\", AudioConcatProcessor);\n `,\n ],\n { type: \"application/javascript\" }\n);\n\nexport const audioConcatProcessor = URL.createObjectURL(blob);\n","import { audioConcatProcessor } from \"./audioConcatProcessor\";\n\nexport class Output {\n public static async create(sampleRate: number): Promise<Output> {\n let context: AudioContext | null = null;\n try {\n context = new AudioContext({ sampleRate });\n const analyser = context.createAnalyser();\n const gain = context.createGain();\n gain.connect(analyser);\n analyser.connect(context.destination);\n await context.audioWorklet.addModule(audioConcatProcessor);\n const worklet = new AudioWorkletNode(context, \"audio-concat-processor\");\n worklet.connect(gain);\n\n return new Output(context, analyser, gain, worklet);\n } catch (error) {\n context?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly context: AudioContext,\n public readonly analyser: AnalyserNode,\n public readonly gain: GainNode,\n public readonly worklet: AudioWorkletNode\n ) {}\n\n public async close() {\n await this.context.close();\n }\n}\n","export type UserTranscriptionEvent = {\n type: \"user_transcript\";\n user_transcription_event: { user_transcript: string };\n};\nexport type AgentResponseEvent = {\n type: \"agent_response\";\n agent_response_event: { agent_response: string };\n};\nexport type AudioEvent = {\n type: \"audio\";\n audio_event: {\n audio_base_64: string;\n event_id: number;\n };\n};\nexport type InterruptionEvent = {\n type: \"interruption\";\n interruption_event: {\n event_id: number;\n };\n};\nexport type InternalTentativeAgentResponseEvent = {\n type: \"internal_tentative_agent_response\";\n tentative_agent_response_internal_event: {\n tentative_agent_response: string;\n };\n};\nexport type ConfigEvent = {\n type: \"conversation_initiation_metadata\";\n conversation_initiation_metadata_event: {\n conversation_id: string;\n agent_output_audio_format: string;\n };\n};\nexport type PingEvent = {\n type: \"ping\";\n ping_event: {\n event_id: number;\n average_ping_ms?: number;\n };\n};\n\n// TODO correction missing\nexport type SocketEvent =\n | UserTranscriptionEvent\n | AgentResponseEvent\n | AudioEvent\n | InterruptionEvent\n | InternalTentativeAgentResponseEvent\n | ConfigEvent\n | PingEvent;\n\nexport function isValidSocketEvent(event: any): event is SocketEvent {\n return !!event.type;\n}\n","import { ConfigEvent, isValidSocketEvent } from \"./events\";\n\ndeclare const process: {\n env: {\n ELEVENLABS_CONVAI_SERVER_ORIGIN?: string;\n ELEVENLABS_CONVAI_SERVER_PATHNAME?: string;\n };\n};\n\nexport type SessionConfig =\n | { signedUrl: string; agentId?: undefined }\n | { agentId: string; signedUrl?: undefined };\n\nconst WSS_API_ORIGIN = \"wss://api.elevenlabs.io\";\nconst WSS_API_PATHNAME = \"/v1/convai/conversation?agent_id=\";\n\nexport class Connection {\n public static async create(config: SessionConfig): Promise<Connection> {\n let socket: WebSocket | null = null;\n\n try {\n const origin =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_ORIGIN\n : null) ?? WSS_API_ORIGIN;\n\n const pathname =\n (typeof process !== \"undefined\"\n ? process.env.ELEVENLABS_CONVAI_SERVER_PATHNAME\n : null) ?? WSS_API_PATHNAME;\n\n const url = config.signedUrl\n ? config.signedUrl\n : origin + pathname + config.agentId;\n\n socket = new WebSocket(url);\n const conversationConfig = await new Promise<\n ConfigEvent[\"conversation_initiation_metadata_event\"]\n >((resolve, reject) => {\n socket!.addEventListener(\"error\", reject);\n socket!.addEventListener(\"close\", reject);\n socket!.addEventListener(\n \"message\",\n (event: MessageEvent) => {\n const message = JSON.parse(event.data);\n\n if (!isValidSocketEvent(message)) {\n return;\n }\n\n if (message.type === \"conversation_initiation_metadata\") {\n resolve(message.conversation_initiation_metadata_event);\n } else {\n console.warn(\n \"First received message is not conversation metadata.\"\n );\n }\n },\n { once: true }\n );\n });\n\n const conversationId = conversationConfig.conversation_id;\n const sampleRate = parseInt(\n conversationConfig.agent_output_audio_format.replace(\"pcm_\", \"\")\n );\n\n return new Connection(socket, conversationId, sampleRate);\n } catch (error) {\n socket?.close();\n throw error;\n }\n }\n\n private constructor(\n public readonly socket: WebSocket,\n public readonly conversationId: string,\n public readonly sampleRate: number\n ) {}\n\n public close() {\n this.socket.close();\n }\n}\n","import { arrayBufferToBase64, base64ToArrayBuffer } from \"./utils/audio\";\nimport { Input } from \"./utils/input\";\nimport { Output } from \"./utils/output\";\nimport { Connection, SessionConfig } from \"./utils/connection\";\nimport { isValidSocketEvent, PingEvent } from \"./utils/events\";\n\nexport type { SocketEvent } from \"./utils/events\";\nexport type { SessionConfig } from \"./utils/connection\";\n\nexport type Role = \"user\" | \"ai\";\nexport type Mode = \"speaking\" | \"listening\";\nexport type Status =\n | \"connecting\"\n | \"connected\"\n | \"disconnecting\"\n | \"disconnected\";\nexport type Options = SessionConfig & Callbacks;\nexport type Callbacks = {\n onConnect: (props: { conversationId: string }) => void;\n onDisconnect: () => void;\n onMessage: (props: { message: string; source: Role }) => void;\n // internal debug events, not to be used\n onDebug: (props: any) => void;\n onError: (message: string, context?: any) => void;\n onStatusChange: (prop: { status: Status }) => void;\n onModeChange: (prop: { mode: Mode }) => void;\n};\n\nconst DEFAULT_SAMPLE_RATE = 16000;\n\nconst defaultCallbacks: Callbacks = {\n onConnect: () => {},\n onDisconnect: () => {},\n onError: () => {},\n onDebug: () => {},\n onMessage: () => {},\n onStatusChange: () => {},\n onModeChange: () => {},\n};\n\nexport class Conversation {\n public static async startSession(\n options: SessionConfig & Partial<Callbacks>\n ): Promise<Conversation> {\n const fullOptions: Options = {\n ...defaultCallbacks,\n ...options,\n };\n\n fullOptions.onStatusChange({ status: \"connecting\" });\n\n let input: Input | null = null;\n let connection: Connection | null = null;\n let output: Output | null = null;\n\n try {\n input = await Input.create(DEFAULT_SAMPLE_RATE);\n connection = await Connection.create(options);\n output = await Output.create(connection.sampleRate);\n\n return new Conversation(fullOptions, connection, input, output);\n } catch (error) {\n fullOptions.onStatusChange({ status: \"disconnected\" });\n connection?.close();\n await input?.close();\n await output?.close();\n throw error;\n }\n }\n\n private lastInterruptTimestamp: number = 0;\n private mode: Mode = \"listening\";\n private status: Status = \"connecting\";\n private inputFrequencyData?: Uint8Array;\n private outputFrequencyData?: Uint8Array;\n private volume: number = 1;\n\n private constructor(\n private readonly options: Options,\n private readonly connection: Connection,\n public readonly input: Input,\n public readonly output: Output\n ) {\n this.options.onConnect({ conversationId: connection.conversationId });\n\n this.connection.socket.addEventListener(\"message\", event => {\n this.onEvent(event);\n });\n this.connection.socket.addEventListener(\"error\", event => {\n this.updateStatus(\"disconnected\");\n this.onError(\"Socket error\", event);\n });\n this.connection.socket.addEventListener(\"close\", () => {\n this.updateStatus(\"disconnected\");\n this.options.onDisconnect();\n });\n\n this.input.worklet.port.onmessage = this.onInputWorkletMessage;\n this.output.worklet.port.onmessage = this.onOutputWorkletMessage;\n this.updateStatus(\"connected\");\n }\n\n public endSession = async () => {\n if (this.status !== \"connected\") return;\n this.updateStatus(\"disconnecting\");\n\n this.connection.close();\n await this.input.close();\n await this.output.close();\n\n this.updateStatus(\"disconnected\");\n };\n\n private updateMode = (mode: Mode) => {\n if (mode !== this.mode) {\n this.mode = mode;\n this.options.onModeChange({ mode });\n }\n };\n\n private updateStatus = (status: Status) => {\n if (status !== this.status) {\n this.status = status;\n this.options.onStatusChange({ status });\n }\n };\n\n private onEvent = (event: MessageEvent) => {\n try {\n const parsedEvent = JSON.parse(event.data);\n\n if (!isValidSocketEvent(parsedEvent)) {\n return;\n }\n\n switch (parsedEvent.type) {\n case \"interruption\": {\n if (parsedEvent.interruption_event) {\n this.lastInterruptTimestamp =\n parsedEvent.interruption_event.event_id;\n }\n this.fadeOutAudio();\n break;\n }\n\n case \"agent_response\": {\n this.options.onMessage({\n source: \"ai\",\n message: parsedEvent.agent_response_event.agent_response,\n });\n break;\n }\n\n case \"user_transcript\": {\n this.options.onMessage({\n source: \"user\",\n message: parsedEvent.user_transcription_event.user_transcript,\n });\n break;\n }\n\n case \"internal_tentative_agent_response\": {\n this.options.onDebug({\n type: \"tentative_agent_response\",\n response:\n parsedEvent.tentative_agent_response_internal_event\n .tentative_agent_response,\n });\n break;\n }\n\n case \"audio\": {\n if (\n this.lastInterruptTimestamp <= parsedEvent.audio_event.event_id!\n ) {\n this.addAudioBase64Chunk(parsedEvent.audio_event.audio_base_64);\n this.updateMode(\"speaking\");\n }\n break;\n }\n\n case \"ping\": {\n this.connection.socket.send(\n JSON.stringify({\n type: \"pong\",\n event_id: (parsedEvent as PingEvent).ping_event.event_id,\n })\n );\n // parsedEvent.ping_event.average_ping_ms can be used on client side, for\n // example to warn if ping is too high that experience might be degraded.\n break;\n }\n\n // unhandled events are expected to be internal events\n default: {\n this.options.onDebug(parsedEvent);\n break;\n }\n }\n } catch {\n this.onError(\"Failed to parse event data\", { event });\n return;\n }\n };\n\n private onInputWorkletMessage = (event: MessageEvent): void => {\n const rawAudioPcmData = event.data[0];\n const maxVolume = event.data[1];\n\n // check if the sound was loud enough, so we don't send unnecessary chunks\n // then forward audio to websocket\n //if (maxVolume > 0.001) {\n const audioMessage = JSON.stringify({\n user_audio_chunk: arrayBufferToBase64(rawAudioPcmData.buffer),\n //sample_rate: this.inputAudioContext?.inputSampleRate || this.inputSampleRate,\n });\n if (this.status === \"connected\") {\n this.connection.socket.send(audioMessage);\n }\n //}\n };\n\n private onOutputWorkletMessage = ({ data }: MessageEvent): void => {\n if (data.type === \"process\") {\n this.updateMode(data.finished ? \"listening\" : \"speaking\");\n }\n };\n\n private addAudioBase64Chunk = async (chunk: string) => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n this.output.worklet.port.postMessage({\n type: \"buffer\",\n buffer: base64ToArrayBuffer(chunk),\n });\n };\n\n private fadeOutAudio = async () => {\n // mute agent\n this.updateMode(\"listening\");\n this.output.worklet.port.postMessage({ type: \"interrupt\" });\n this.output.gain.gain.exponentialRampToValueAtTime(\n 0.0001,\n this.output.context.currentTime + 2\n );\n\n // reset volume back\n setTimeout(() => {\n this.output.gain.gain.value = this.volume;\n this.output.worklet.port.postMessage({ type: \"clearInterrupted\" });\n }, 2000); // Adjust the duration as needed\n };\n\n private onError = (message: string, context?: any) => {\n console.error(message, context);\n this.options.onError(message, context);\n };\n\n private calculateVolume = (frequencyData: Uint8Array) => {\n if (frequencyData.length === 0) {\n return 0;\n }\n\n // TODO: Currently this averages all frequencies, but we should probably\n // bias towards the frequencies that are more typical for human voice\n let volume = 0;\n for (let i = 0; i < frequencyData.length; i++) {\n volume += frequencyData[i] / 255;\n }\n volume /= frequencyData.length;\n\n return volume < 0 ? 0 : volume > 1 ? 1 : volume;\n };\n\n public getId = () => this.connection.conversationId;\n\n public setVolume = ({ volume }: { volume: number }) => {\n this.volume = volume;\n };\n\n public getInputByteFrequencyData = () => {\n this.inputFrequencyData ??= new Uint8Array(\n this.input.analyser.frequencyBinCount\n );\n this.input.analyser.getByteFrequencyData(this.inputFrequencyData);\n return this.inputFrequencyData;\n };\n\n public getOutputByteFrequencyData = () => {\n this.outputFrequencyData ??= new Uint8Array(\n this.output.analyser.frequencyBinCount\n );\n this.output.analyser.getByteFrequencyData(this.outputFrequencyData);\n return this.outputFrequencyData;\n };\n\n public getInputVolume = () => {\n return this.calculateVolume(this.getInputByteFrequencyData());\n };\n\n public getOutputVolume = () => {\n return this.calculateVolume(this.getOutputByteFrequencyData());\n };\n}\n"],"names":["base64ToArrayBuffer","base64","binaryString","window","atob","len","length","bytes","Uint8Array","i","charCodeAt","buffer","blob","Blob","type","rawAudioProcessor","URL","createObjectURL","Input","context","analyser","worklet","inputStream","this","create","sampleRate","Promise","resolve","_temp2","audioWorklet","addModule","then","navigator","mediaDevices","getUserMedia","audio","ideal","echoCancellation","_navigator$mediaDevic","source","createMediaStreamSource","AudioWorkletNode","connect","supportsSampleRateConstraint","getSupportedConstraints","AudioContext","createAnalyser","_temp","_catch","error","_inputStream","_context","getTracks","forEach","track","stop","close","e","reject","_proto","prototype","audioConcatProcessor","Output","gain","createGain","destination","isValidSocketEvent","event","Connection","socket","conversationId","config","origin","_ref","process","env","ELEVENLABS_CONVAI_SERVER_ORIGIN","pathname","_ref2","ELEVENLABS_CONVAI_SERVER_PATHNAME","WebSocket","signedUrl","agentId","addEventListener","message","JSON","parse","data","conversation_initiation_metadata_event","console","warn","once","conversationConfig","conversation_id","parseInt","agent_output_audio_format","replace","_socket","defaultCallbacks","onConnect","onDisconnect","onError","onDebug","onMessage","onStatusChange","onModeChange","Conversation","options","connection","input","output","_this2","_this","_this3","_this4","lastInterruptTimestamp","mode","status","inputFrequencyData","outputFrequencyData","volume","endSession","updateStatus","updateMode","onEvent","parsedEvent","interruption_event","event_id","fadeOutAudio","agent_response_event","agent_response","user_transcription_event","user_transcript","response","tentative_agent_response_internal_event","tentative_agent_response","audio_event","addAudioBase64Chunk","audio_base_64","send","stringify","ping_event","_unused","onInputWorkletMessage","b","audioMessage","user_audio_chunk","btoa","String","fromCharCode","apply","onOutputWorkletMessage","finished","chunk","value","port","postMessage","exponentialRampToValueAtTime","currentTime","setTimeout","calculateVolume","frequencyData","getId","setVolume","getInputByteFrequencyData","frequencyBinCount","getByteFrequencyData","getOutputByteFrequencyData","getInputVolume","getOutputVolume","onmessage","startSession","fullOptions","_extends","_Input$create","_Connection$create","_Output$create","_connection","_input","_output"],"mappings":"wbAOgB,SAAAA,EAAoBC,GAIlC,IAHA,IAAMC,EAAeC,OAAOC,KAAKH,GAC3BI,EAAMH,EAAaI,OACnBC,EAAQ,IAAIC,WAAWH,GACpBI,EAAI,EAAGA,EAAIJ,EAAKI,IACvBF,EAAME,GAAKP,EAAaQ,WAAWD,GAErC,OAAOF,EAAMI,MACf,CCfA,IAAMC,EAAO,IAAIC,KACf,CA2DC,i6EACD,CAAEC,KAAM,2BAGGC,EAAoBC,IAAIC,gBAAgBL,GC3DxCM,eAuCX,WAAA,SAAAA,EACkBC,EACAC,EACAC,EACAC,GAHAH,KAAAA,aACAC,EAAAA,KAAAA,cACAC,EAAAA,KAAAA,oBACAC,iBAAA,EAHAC,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAOF,QAAPA,EACAE,KAAWD,YAAXA,CACf,CAKF,OALGJ,EA3CgBM,OAAA,SAAOC,GAAkB,IAC3C,IAAIN,EAA+B,KAC/BG,EAAkC,KAAK,OAAAI,QAAAC,gCAEvC,WAAA,SAAAC,IAAAF,OAAAA,QAAAC,QAWIR,EAAQU,aAAaC,UAAUf,IAAkBgB,KAAA,WAAA,OAAAL,QAAAC,QAEnCK,UAAUC,aAAaC,aAAa,CACtDC,MAAO,CACLV,WAAY,CAAEW,MAAOX,GACrBY,iBAAkB,CAAED,OAAO,OAE7BL,KAAA,SAAAO,GAEF,IAAMC,EAASpB,EAAQqB,wBAPvBlB,EAAWgB,GAQLjB,EAAU,IAAIoB,iBAAiBtB,EAAS,uBAK9C,OAHAoB,EAAOG,QAAQtB,GACfA,EAASsB,QAAQrB,GAEN,IAAAH,EAAMC,EAASC,EAAUC,EAASC,EAAa,EAzB1D,EAAA,CAAA,IAAMqB,EACJX,UAAUC,aAAaW,0BAA0BnB,WAK7CL,GAHND,EAAU,IAAIhB,OAAO0C,aACnBF,EAA+B,CAAElB,WAAAA,GAAe,CAAE,IAE3BqB,iBAAiBC,EACtC,WAAA,IAACJ,EAA4BjB,OAAAA,QAAAC,QACzBR,EAAQU,aAAaC,UAhBjC,sGAgB4DC,KAAAgB,WAAAA,EAAAA,CADpD,GACoDA,OAAAA,GAAAA,EAAAhB,KAAAgB,EAAAhB,KAAAH,GAAAA,GAkB1D,6DA7B2CoB,CAEvC,EA2BKC,SAAAA,GAAO,IAAAC,EAAAC,EAGd,MAFW,OAAXD,EAAA5B,IAAA4B,EAAaE,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GACtDJ,OAAAA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,GAAA,OAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAzC,EAAA0C,UASYJ,iBAAK,IAC4C,OAA5DjC,KAAKD,YAAY8B,YAAYC,QAAQ,SAAAC,GAAK,OAAIA,EAAMC,MAAM,GAAE7B,QAAAC,QAA5DJ,KACWJ,QAAQqC,SAAOzB,KAAA,WAAA,EAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAvC,CAAA,CAVD,GC5CIN,EAAO,IAAIC,KACf,CA6DC,03DACD,CAAEC,KAAM,2BAGG+C,EAAuB7C,IAAIC,gBAAgBL,GChE3CkD,eAoBX,WAAA,SAAAA,EACkB3C,EACAC,EACA2C,EACA1C,GAAyBE,KAHzBJ,aACAC,EAAAA,KAAAA,cACA2C,EAAAA,KAAAA,UACA1C,EAAAA,KAAAA,aAHA,EAAAE,KAAOJ,QAAPA,EACAI,KAAQH,SAARA,EACAG,KAAIwC,KAAJA,EACAxC,KAAOF,QAAPA,CACf,QAACyC,EAxBgBtC,OAAM,SAACC,GAAkB,IAC3C,IAAIN,EAA+B,KAAK,OAAAO,QAAAC,iCAGhCP,GADND,EAAU,IAAI0B,aAAa,CAAEpB,WAAAA,KACJqB,kBACnBiB,EAAO5C,EAAQ6C,cAChBtB,QAAQtB,GACbA,EAASsB,QAAQvB,EAAQ8C,aAAavC,QAAAC,QAChCR,EAAQU,aAAaC,UAAU+B,IAAqB9B,KAC1D,WAAA,IAAMV,EAAU,IAAIoB,iBAAiBtB,EAAS,0BAG9C,OAFAE,EAAQqB,QAAQqB,GAET,IAAID,EAAO3C,EAASC,EAAU2C,EAAM1C,EAAS,yBAXd,IAGhCD,EACA2C,sCAJgCf,CAAA,EAY/BC,SAAAA,GAAO,IAAAE,EAEd,MADO,OAAPA,EAAAhC,IAAAgC,EAASK,QACHP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAAE,CAAAA,EAAAG,EAAAF,UASYJ,MAAK,WAAA,IACN9B,OAAAA,QAAAC,QAAJJ,KAAKJ,QAAQqC,SAAOzB,kBAC5B,CAAC,MAAA0B,GAAA,OAAA/B,QAAAgC,OAAAD,KAAAK,CAAA,CATD,GC8Bc,SAAAI,EAAmBC,GACjC,QAASA,EAAMrD,IACjB,CCzCA,IAGasD,eAAU,WA0DrB,SAAAA,EACkBC,EACAC,EACA7C,QAFA4C,YAAA,EAAA9C,KACA+C,oBAAA,EAAA/C,KACAE,gBAAA,EAFAF,KAAM8C,OAANA,EACA9C,KAAc+C,eAAdA,EACA/C,KAAUE,WAAVA,CACf,CAIF,OAJG2C,EA7DgB5C,OAAM,SAAC+C,OACzB,IAAIF,EAA2B,KAAK,OAAA3C,QAAAC,iCAG5B6C,SAAMC,EACU,oBAAZC,QACJA,QAAQC,IAAIC,gCACZ,MAAIH,EAXO,0BAaXI,EAGIC,OAHIA,EACQ,oBAAZJ,QACJA,QAAQC,IAAII,kCACZ,MAAID,EAfS,oCAqBnBT,EAAS,IAAIW,UAJDT,EAAOU,UACfV,EAAOU,UACPT,EAASK,EAAWN,EAAOW,SAEHxD,QAAAC,QACK,IAAID,QAEnC,SAACC,EAAS+B,GACVW,EAAQc,iBAAiB,QAASzB,GAClCW,EAAQc,iBAAiB,QAASzB,GAClCW,EAAQc,iBACN,UACA,SAAChB,GACC,IAAMiB,EAAUC,KAAKC,MAAMnB,EAAMoB,MAE5BrB,EAAmBkB,KAIH,qCAAjBA,EAAQtE,KACVa,EAAQyD,EAAQI,wCAEhBC,QAAQC,KACN,wDAGN,EACA,CAAEC,MAAM,GAEZ,IAAE5D,KAxBI6D,SAAAA,GA0BN,IAAMtB,EAAiBsB,EAAmBC,gBACpCpE,EAAaqE,SACjBF,EAAmBG,0BAA0BC,QAAQ,OAAQ,KAG/D,OAAO,IAAI5B,EAAWC,EAAQC,EAAgB7C,EAAY,yBA/CxD,IAAAgD,EAAAK,EACIN,EAKAK,sCAR4B7B,CAEhC,EAgDKC,SAAAA,GAAO,IAAAgD,EAEd,MADAA,OAAAA,EAAA5B,IAAA4B,EAAQzC,QACFP,CACR,GACF,CAAC,MAAAQ,UAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAW,EAAAR,UAQMJ,MAAA,WACLjC,KAAK8C,OAAOb,OACd,EAACY,CAAA,CAlEoB,GCcjB8B,EAA8B,CAClCC,UAAW,WAAQ,EACnBC,aAAc,WAAK,EACnBC,QAAS,WAAK,EACdC,QAAS,WAAQ,EACjBC,UAAW,aACXC,eAAgB,WAAK,EACrBC,aAAc,0CAGS,WAqCvB,SAAAC,EACmBC,EACAC,EACDC,EACAC,GAAcC,IAAAA,OAAAC,EAsB1BzF,KAAI0F,EA8HR1F,KAAI2F,EAUJ3F,KAAIA,KAjKaoF,aACAC,EAAAA,KAAAA,uBACDC,WAAA,EAAAtF,KACAuF,YAXVK,EAAAA,KAAAA,uBAAiC,EAAC5F,KAClC6F,KAAa,iBACbC,OAAiB,aACjBC,KAAAA,+BACAC,yBAAmB,EAAAhG,KACnBiG,OAAiB,OA2BlBC,WAAU,WAAA,IACf,MAAoB,cAAhBT,EAAKK,OAAwB3F,QAAAC,WACjCqF,EAAKU,aAAa,iBAElBV,EAAKJ,WAAWpD,QAAQ9B,QAAAC,QAClBqF,EAAKH,MAAMrD,SAAOzB,uBAAAL,QAAAC,QAClBqF,EAAKF,OAAOtD,SAAOzB,KAAA,WAEzBiF,EAAKU,aAAa,eAAgB,EACpC,GAAA,CAAC,MAAAjE,UAAA/B,QAAAgC,OAAAD,UAEOkE,WAAa,SAACP,GAChBA,IAASL,EAAKK,OAChBL,EAAKK,KAAOA,EACZL,EAAKJ,QAAQF,aAAa,CAAEW,KAAAA,IAEhC,EAAC7F,KAEOmG,aAAe,SAACL,GAClBA,IAAWN,EAAKM,SAClBN,EAAKM,OAASA,EACdN,EAAKJ,QAAQH,eAAe,CAAEa,OAAAA,IAElC,EAEQO,KAAAA,QAAU,SAACzD,GACjB,IACE,IAAM0D,EAAcxC,KAAKC,MAAMnB,EAAMoB,MAErC,IAAKrB,EAAmB2D,GACtB,OAGF,OAAQA,EAAY/G,MAClB,IAAK,eACC+G,EAAYC,qBACdf,EAAKI,uBACHU,EAAYC,mBAAmBC,UAEnChB,EAAKiB,eACL,MAGF,IAAK,iBACHjB,EAAKJ,QAAQJ,UAAU,CACrBhE,OAAQ,KACR6C,QAASyC,EAAYI,qBAAqBC,iBAE5C,MAGF,IAAK,kBACHnB,EAAKJ,QAAQJ,UAAU,CACrBhE,OAAQ,OACR6C,QAASyC,EAAYM,yBAAyBC,kBAEhD,MAGF,IAAK,oCACHrB,EAAKJ,QAAQL,QAAQ,CACnBxF,KAAM,2BACNuH,SACER,EAAYS,wCACTC,2BAEP,MAGF,IAAK,QAEDxB,EAAKI,wBAA0BU,EAAYW,YAAYT,WAEvDhB,EAAK0B,oBAAoBZ,EAAYW,YAAYE,eACjD3B,EAAKY,WAAW,aAElB,MAGF,IAAK,OACHZ,EAAKH,WAAWvC,OAAOsE,KACrBtD,KAAKuD,UAAU,CACb9H,KAAM,OACNiH,SAAWF,EAA0BgB,WAAWd,YAKpD,MAIF,QACEhB,EAAKJ,QAAQL,QAAQuB,GAI3B,CAAE,MAAAiB,GAEA,YADA/B,EAAKV,QAAQ,6BAA8B,CAAElC,MAAAA,GAE/C,CACF,EAEQ4E,KAAAA,sBAAwB,SAAC5E,GAC/B,IP9MgC6E,EAC5BrI,EOmNEsI,EAAe5D,KAAKuD,UAAU,CAClCM,kBPrN8BF,EO8MR7E,EAAMoB,KAAK,GAOqB5E,OPpNpDA,EAAS,IAAIH,WAAWwI,GAEX7I,OAAOgJ,KAAKC,OAAOC,aAAYC,MAAnBF,OAAuBzI,OOqNhC,cAAhBoG,EAAKM,QACPN,EAAKH,WAAWvC,OAAOsE,KAAKM,EAGhC,OAEQM,uBAAyB,SAAA9E,OAAGc,EAAId,EAAJc,KAChB,YAAdA,EAAKzE,MACPiG,EAAKY,WAAWpC,EAAKiE,SAAW,YAAc,WAElD,OAEQf,oBAAmB,SAAUgB,GAAiB,IAMjD,OALHxC,EAAKH,OAAO/C,KAAKA,KAAK2F,MAAQzC,EAAKO,OACnCP,EAAKH,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,qBAC7CmG,EAAKH,OAAOzF,QAAQsI,KAAKC,YAAY,CACnC9I,KAAM,SACNH,OAAQX,EAAoByJ,KAC3B/H,QAAAC,SACL,CAAC,MAAA8B,GAAA,OAAA/B,QAAAgC,OAAAD,EAAA,CAAA,EAAAlC,KAEOyG,aAA0B,WAAA,IAavB,OAXTd,EAAKS,WAAW,aAChBT,EAAKJ,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,cAC7CoG,EAAKJ,OAAO/C,KAAKA,KAAK8F,6BACpB,KACA3C,EAAKJ,OAAO3F,QAAQ2I,YAAc,GAIpCC,WAAW,WACT7C,EAAKJ,OAAO/C,KAAKA,KAAK2F,MAAQxC,EAAKM,OACnCN,EAAKJ,OAAOzF,QAAQsI,KAAKC,YAAY,CAAE9I,KAAM,oBAC/C,EAAG,KAAMY,QAAAC,SACX,CAAC,MAAA8B,GAAA/B,OAAAA,QAAAgC,OAAAD,EAEO4C,CAAAA,EAAAA,KAAAA,QAAU,SAACjB,EAAiBjE,GAClCsE,QAAQxC,MAAMmC,EAASjE,GACvB4F,EAAKJ,QAAQN,QAAQjB,EAASjE,EAChC,OAEQ6I,gBAAkB,SAACC,GACzB,GAA6B,IAAzBA,EAAc3J,OAChB,OACF,EAKA,IADA,IAAIkH,EAAS,EACJ/G,EAAI,EAAGA,EAAIwJ,EAAc3J,OAAQG,IACxC+G,GAAUyC,EAAcxJ,GAAK,IAI/B,OAFA+G,GAAUyC,EAAc3J,QAER,EAAI,EAAIkH,EAAS,EAAI,EAAIA,CAC3C,EAACjG,KAEM2I,MAAQ,WAAA,OAAMnD,EAAKH,WAAWtC,cAAc,OAE5C6F,UAAY,SAAArF,GACjBiC,EAAKS,OADqB1C,EAAN0C,MAEtB,EAACjG,KAEM6I,0BAA4B,WAKjC,aAJArD,EAAKO,qBAALP,EAAKO,mBAAuB,IAAI9G,WAC9BuG,EAAKF,MAAMzF,SAASiJ,oBAEtBtD,EAAKF,MAAMzF,SAASkJ,qBAAqBvD,EAAKO,oBACvCP,EAAKO,kBACd,EAAC/F,KAEMgJ,2BAA6B,WAKlC,aAJAxD,EAAKQ,sBAALR,EAAKQ,oBAAwB,IAAI/G,WAC/BuG,EAAKD,OAAO1F,SAASiJ,oBAEvBtD,EAAKD,OAAO1F,SAASkJ,qBAAqBvD,EAAKQ,qBACxCR,EAAKQ,mBACd,OAEOiD,eAAiB,WACtB,OAAOzD,EAAKiD,gBAAgBjD,EAAKqD,4BACnC,OAEOK,gBAAkB,WACvB,OAAO1D,EAAKiD,gBAAgBjD,EAAKwD,6BACnC,EAhOmBhJ,KAAOoF,QAAPA,EACApF,KAAUqF,WAAVA,EACDrF,KAAKsF,MAALA,EACAtF,KAAMuF,OAANA,EAEhBvF,KAAKoF,QAAQR,UAAU,CAAE7B,eAAgBsC,EAAWtC,iBAEpD/C,KAAKqF,WAAWvC,OAAOc,iBAAiB,UAAW,SAAAhB,GACjD4C,EAAKa,QAAQzD,EACf,GACA5C,KAAKqF,WAAWvC,OAAOc,iBAAiB,QAAS,SAAAhB,GAC/C4C,EAAKW,aAAa,gBAClBX,EAAKV,QAAQ,eAAgBlC,EAC/B,GACA5C,KAAKqF,WAAWvC,OAAOc,iBAAiB,QAAS,WAC/C4B,EAAKW,aAAa,gBAClBX,EAAKJ,QAAQP,cACf,GAEA7E,KAAKsF,MAAMxF,QAAQsI,KAAKe,UAAYnJ,KAAKwH,sBACzCxH,KAAKuF,OAAOzF,QAAQsI,KAAKe,UAAYnJ,KAAKgI,uBAC1ChI,KAAKmG,aAAa,YACpB,CAhCC,OAgCAhB,EA3DmBiE,aAAY,SAC9BhE,GAA2C,IAE3C,IAAMiE,EAAWC,EAAA,GACZ3E,EACAS,GAGLiE,EAAYpE,eAAe,CAAEa,OAAQ,eAErC,IAAIR,EAAsB,KACtBD,EAAgC,KAChCE,EAAwB,KAAK,OAAApF,QAAAC,gCAE7BD,QAAAC,QACYT,EAAMM,OA5BE,OA4ByBO,KAAA,SAAA+I,GAAC,OAAhDjE,EAAKiE,EAA2CpJ,QAAAC,QAC7ByC,EAAW5C,OAAOmF,IAAQ5E,KAAAgJ,SAAAA,GAAC,OAA9CnE,EAAUmE,EAAoCrJ,QAAAC,QAC/BmC,EAAOtC,OAAOoF,EAAWnF,aAAWM,KAAA,SAAAiJ,GAEnD,WAAWtE,EAAakE,EAAahE,EAAYC,EAFjDC,EAAMkE,EAE0D,gEAPjChI,GAQhC,SAAQC,GAAO,IAAAgI,EAAAC,EAEM,OADpBN,EAAYpE,eAAe,CAAEa,OAAQ,iBACrC4D,OAAAA,EAAArE,IAAAqE,EAAYzH,QAAQ9B,QAAAC,QACduJ,OADcA,EACdrE,QAAAqE,EAAAA,EAAO1H,SAAOzB,oBAAAoJ,EAAA,OAAAzJ,QAAAC,QACdwJ,OADcA,EACdrE,QAAAqE,EAAAA,EAAQ3H,SAAOzB,gBACrB,MAAMkB,CAAM,EACd,EAAA,GACF,CAAC,MAAAQ,GAAA/B,OAAAA,QAAAgC,OAAAD,EAAA,CAAA,EAAAiD,CAAA,CA5BsB"}
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare const audioConcatProcessor: string;
|
@@ -0,0 +1,15 @@
|
|
1
|
+
export type SessionConfig = {
|
2
|
+
signedUrl: string;
|
3
|
+
agentId?: undefined;
|
4
|
+
} | {
|
5
|
+
agentId: string;
|
6
|
+
signedUrl?: undefined;
|
7
|
+
};
|
8
|
+
export declare class Connection {
|
9
|
+
readonly socket: WebSocket;
|
10
|
+
readonly conversationId: string;
|
11
|
+
readonly sampleRate: number;
|
12
|
+
static create(config: SessionConfig): Promise<Connection>;
|
13
|
+
private constructor();
|
14
|
+
close(): void;
|
15
|
+
}
|
@@ -0,0 +1,47 @@
|
|
1
|
+
export type UserTranscriptionEvent = {
|
2
|
+
type: "user_transcript";
|
3
|
+
user_transcription_event: {
|
4
|
+
user_transcript: string;
|
5
|
+
};
|
6
|
+
};
|
7
|
+
export type AgentResponseEvent = {
|
8
|
+
type: "agent_response";
|
9
|
+
agent_response_event: {
|
10
|
+
agent_response: string;
|
11
|
+
};
|
12
|
+
};
|
13
|
+
export type AudioEvent = {
|
14
|
+
type: "audio";
|
15
|
+
audio_event: {
|
16
|
+
audio_base_64: string;
|
17
|
+
event_id: number;
|
18
|
+
};
|
19
|
+
};
|
20
|
+
export type InterruptionEvent = {
|
21
|
+
type: "interruption";
|
22
|
+
interruption_event: {
|
23
|
+
event_id: number;
|
24
|
+
};
|
25
|
+
};
|
26
|
+
export type InternalTentativeAgentResponseEvent = {
|
27
|
+
type: "internal_tentative_agent_response";
|
28
|
+
tentative_agent_response_internal_event: {
|
29
|
+
tentative_agent_response: string;
|
30
|
+
};
|
31
|
+
};
|
32
|
+
export type ConfigEvent = {
|
33
|
+
type: "conversation_initiation_metadata";
|
34
|
+
conversation_initiation_metadata_event: {
|
35
|
+
conversation_id: string;
|
36
|
+
agent_output_audio_format: string;
|
37
|
+
};
|
38
|
+
};
|
39
|
+
export type PingEvent = {
|
40
|
+
type: "ping";
|
41
|
+
ping_event: {
|
42
|
+
event_id: number;
|
43
|
+
average_ping_ms?: number;
|
44
|
+
};
|
45
|
+
};
|
46
|
+
export type SocketEvent = UserTranscriptionEvent | AgentResponseEvent | AudioEvent | InterruptionEvent | InternalTentativeAgentResponseEvent | ConfigEvent | PingEvent;
|
47
|
+
export declare function isValidSocketEvent(event: any): event is SocketEvent;
|
@@ -0,0 +1,9 @@
|
|
1
|
+
export declare class Input {
|
2
|
+
readonly context: AudioContext;
|
3
|
+
readonly analyser: AnalyserNode;
|
4
|
+
readonly worklet: AudioWorkletNode;
|
5
|
+
readonly inputStream: MediaStream;
|
6
|
+
static create(sampleRate: number): Promise<Input>;
|
7
|
+
private constructor();
|
8
|
+
close(): Promise<void>;
|
9
|
+
}
|
@@ -0,0 +1,9 @@
|
|
1
|
+
export declare class Output {
|
2
|
+
readonly context: AudioContext;
|
3
|
+
readonly analyser: AnalyserNode;
|
4
|
+
readonly gain: GainNode;
|
5
|
+
readonly worklet: AudioWorkletNode;
|
6
|
+
static create(sampleRate: number): Promise<Output>;
|
7
|
+
private constructor();
|
8
|
+
close(): Promise<void>;
|
9
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare const rawAudioProcessor: string;
|
package/package.json
ADDED
@@ -0,0 +1,47 @@
|
|
1
|
+
{
|
2
|
+
"name": "@11labs/client",
|
3
|
+
"version": "0.0.0",
|
4
|
+
"description": "ElevenLabs JavaScript Client Library",
|
5
|
+
"main": "./dist/lib.umd.js",
|
6
|
+
"module": "./dist/lib.module.js",
|
7
|
+
"source": "src/index.ts",
|
8
|
+
"type": "module",
|
9
|
+
"unpkg": "./dist/lib.umd.js",
|
10
|
+
"types": "./dist/index.d.ts",
|
11
|
+
"exports": {
|
12
|
+
".": {
|
13
|
+
"types": "./dist/index.d.ts",
|
14
|
+
"import": "./dist/lib.modern.js",
|
15
|
+
"require": "./dist/lib.cjs"
|
16
|
+
}
|
17
|
+
},
|
18
|
+
"keywords": [],
|
19
|
+
"author": "ElevenLabs",
|
20
|
+
"license": "MIT",
|
21
|
+
"devDependencies": {
|
22
|
+
"@types/node-wav": "^0.0.3",
|
23
|
+
"@vitest/browser": "^2.0.5",
|
24
|
+
"eslint": "^9.8.0",
|
25
|
+
"microbundle": "^0.15.1",
|
26
|
+
"mock-socket": "^9.3.1",
|
27
|
+
"node-wav": "^0.0.2",
|
28
|
+
"playwright": "^1.46.1",
|
29
|
+
"typescript": "^5.5.4",
|
30
|
+
"vitest": "^2.0.5"
|
31
|
+
},
|
32
|
+
"repository": {
|
33
|
+
"type": "git",
|
34
|
+
"url": "git+https://github.com/elevenlabs/packages.git",
|
35
|
+
"directory": "packages/client"
|
36
|
+
},
|
37
|
+
"scripts": {
|
38
|
+
"build": "BROWSERSLIST_ENV=modern microbundle --jsx React.createElement --jsxFragment React.Fragment --jsxImportSource react src/index.ts",
|
39
|
+
"clean": "rm -rf ./dist",
|
40
|
+
"dev": "npm run clean && BROWSERSLIST_ENV=development microbundle --jsx React.createElement --jsxFragment React.Fragment --jsxImportSource react src/index.ts -w -f modern",
|
41
|
+
"lint": "npm run lint:ts && npm run lint:es",
|
42
|
+
"lint:ts": "tsc --noEmit --skipLibCheck",
|
43
|
+
"lint:es": "npx eslint .",
|
44
|
+
"lint:prettier": "prettier 'src/**/*.ts'",
|
45
|
+
"test": "vitest"
|
46
|
+
}
|
47
|
+
}
|