@convai/web-sdk 0.2.3-beta.1 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1025 -529
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,6 +1,47 @@
|
|
|
1
1
|
# @convai/web-sdk
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
[](https://www.npmjs.com/package/@convai/web-sdk)
|
|
4
|
+
|
|
5
|
+
JavaScript/TypeScript SDK for building AI voice assistants with real-time audio/video streaming. Drop-in widgets for **React** and **Vanilla JavaScript/TypeScript** with customizable UI components.
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## 📑 Table of Contents
|
|
10
|
+
|
|
11
|
+
- [Installation](#installation)
|
|
12
|
+
- [Quick Start](#quick-start)
|
|
13
|
+
- [React - ConvaiWidget](#react---convaiwidget)
|
|
14
|
+
- [Vanilla JS/TS - ConvaiWidget](#vanilla-jsts---convaiwidget)
|
|
15
|
+
- [Core Concepts](#core-concepts)
|
|
16
|
+
- [React SDK](#react-sdk)
|
|
17
|
+
- [useConvaiClient Hook](#useconvaiclient-hook)
|
|
18
|
+
- [AudioRenderer Component](#audiorenderer-component)
|
|
19
|
+
- [AudioContext](#audiocontext)
|
|
20
|
+
- [React Exports Reference](#react-exports-reference)
|
|
21
|
+
- [Vanilla SDK](#vanilla-sdk)
|
|
22
|
+
- [ConvaiClient Class](#convaiclient-class)
|
|
23
|
+
- [AudioRenderer Class](#audiorenderer-class)
|
|
24
|
+
- [Vanilla Exports Reference](#vanilla-exports-reference)
|
|
25
|
+
- [Video & Screen Share](#video--screen-share)
|
|
26
|
+
- [Critical Requirements](#critical-requirements)
|
|
27
|
+
- [Enabling Video](#enabling-video)
|
|
28
|
+
- [Enabling Screen Share](#enabling-screen-share)
|
|
29
|
+
- [Building Custom UIs](#building-custom-uis)
|
|
30
|
+
- [Custom Chat Interface](#custom-chat-interface)
|
|
31
|
+
- [Audio Visualizer](#audio-visualizer)
|
|
32
|
+
- [Message Types](#message-types)
|
|
33
|
+
- [API Reference](#api-reference)
|
|
34
|
+
- [Configuration](#configuration)
|
|
35
|
+
- [Connection Management](#connection-management)
|
|
36
|
+
- [Messaging](#messaging)
|
|
37
|
+
- [Audio Controls](#audio-controls)
|
|
38
|
+
- [Video Controls](#video-controls)
|
|
39
|
+
- [Screen Share Controls](#screen-share-controls)
|
|
40
|
+
- [Getting Credentials](#getting-credentials)
|
|
41
|
+
- [TypeScript Support](#typescript-support)
|
|
42
|
+
- [Support](#support)
|
|
43
|
+
|
|
44
|
+
---
|
|
4
45
|
|
|
5
46
|
## Installation
|
|
6
47
|
|
|
@@ -8,14 +49,25 @@ JavaScript/TypeScript SDK for Convai AI voice assistants. Build voice-powered AI
|
|
|
8
49
|
npm install @convai/web-sdk
|
|
9
50
|
```
|
|
10
51
|
|
|
11
|
-
|
|
52
|
+
**Peer Dependencies (React only):**
|
|
12
53
|
|
|
13
|
-
|
|
54
|
+
```bash
|
|
55
|
+
npm install react@^18.0.0 react-dom@^18.0.0
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
---
|
|
59
|
+
|
|
60
|
+
## Quick Start
|
|
61
|
+
|
|
62
|
+
### React - ConvaiWidget
|
|
63
|
+
|
|
64
|
+
The `ConvaiWidget` is a complete, pre-built chat interface with voice/video capabilities.
|
|
14
65
|
|
|
15
66
|
```tsx
|
|
16
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
67
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
17
68
|
|
|
18
69
|
function App() {
|
|
70
|
+
// Initialize the Convai client
|
|
19
71
|
const convaiClient = useConvaiClient({
|
|
20
72
|
apiKey: "your-api-key",
|
|
21
73
|
characterId: "your-character-id",
|
|
@@ -25,7 +77,166 @@ function App() {
|
|
|
25
77
|
}
|
|
26
78
|
```
|
|
27
79
|
|
|
28
|
-
|
|
80
|
+
**That's it!** The widget auto-connects on first user interaction and handles all UI/audio for you.
|
|
81
|
+
|
|
82
|
+
---
|
|
83
|
+
|
|
84
|
+
## 🤖 For AI Code Generators (v0, Lovable, Bolt, etc.)
|
|
85
|
+
|
|
86
|
+
**If you're using an AI coding assistant to add Convai to your project, use this exact template to avoid errors:**
|
|
87
|
+
|
|
88
|
+
### Copy-Paste Template (Works Every Time)
|
|
89
|
+
|
|
90
|
+
```tsx
|
|
91
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
92
|
+
|
|
93
|
+
export default function App() {
|
|
94
|
+
// Step 1: Create the client with your credentials
|
|
95
|
+
const convaiClient = useConvaiClient({
|
|
96
|
+
apiKey: "your-api-key-here",
|
|
97
|
+
characterId: "your-character-id-here"
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
// Step 2: Pass ONLY the client to the widget
|
|
101
|
+
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
102
|
+
}
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
### Common Mistakes AI Tools Make
|
|
106
|
+
|
|
107
|
+
❌ **DON'T DO THIS:**
|
|
108
|
+
```tsx
|
|
109
|
+
// Wrong: Passing props directly to ConvaiWidget
|
|
110
|
+
<ConvaiWidget apiKey="..." characterId="..." />
|
|
111
|
+
|
|
112
|
+
// Wrong: Stringifying the client
|
|
113
|
+
<ConvaiWidget convaiClient={JSON.stringify(convaiClient)} />
|
|
114
|
+
|
|
115
|
+
// Wrong: Spreading client properties
|
|
116
|
+
<ConvaiWidget {...convaiClient} />
|
|
117
|
+
|
|
118
|
+
// Wrong: Using client in string context
|
|
119
|
+
const info = `Client: ${convaiClient}`; // "Cannot convert object to primitive value"
|
|
120
|
+
|
|
121
|
+
// Wrong: Passing client through env vars
|
|
122
|
+
const client = process.env.CONVAI_CLIENT; // This won't work
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
✅ **DO THIS:**
|
|
126
|
+
```tsx
|
|
127
|
+
// Correct: Client created in component, passed as object
|
|
128
|
+
const convaiClient = useConvaiClient({
|
|
129
|
+
apiKey: "your-api-key",
|
|
130
|
+
characterId: "your-character-id"
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
### If You Get "Cannot convert object to primitive value"
|
|
137
|
+
|
|
138
|
+
This error means you're using the client object in a primitive context. Check for:
|
|
139
|
+
|
|
140
|
+
1. **String concatenation:**
|
|
141
|
+
```tsx
|
|
142
|
+
// ❌ Wrong
|
|
143
|
+
console.log("Client: " + convaiClient);
|
|
144
|
+
|
|
145
|
+
// ✅ Correct
|
|
146
|
+
console.log("Connected:", convaiClient.state.isConnected);
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
2. **Template literals:**
|
|
150
|
+
```tsx
|
|
151
|
+
// ❌ Wrong
|
|
152
|
+
const text = `Client: ${convaiClient}`;
|
|
153
|
+
|
|
154
|
+
// ✅ Correct
|
|
155
|
+
const text = `Status: ${convaiClient.state.agentState}`;
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
3. **Using as a key:**
|
|
159
|
+
```tsx
|
|
160
|
+
// ❌ Wrong
|
|
161
|
+
<div key={convaiClient}>...</div>
|
|
162
|
+
|
|
163
|
+
// ✅ Correct
|
|
164
|
+
<div key="convai-widget">...</div>
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
### Complete Working Example for AI Tools
|
|
168
|
+
|
|
169
|
+
```tsx
|
|
170
|
+
import React from 'react';
|
|
171
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
172
|
+
|
|
173
|
+
function App() {
|
|
174
|
+
// Initialize the client
|
|
175
|
+
const convaiClient = useConvaiClient({
|
|
176
|
+
apiKey: "your-api-key-here",
|
|
177
|
+
characterId: "your-character-id-here",
|
|
178
|
+
enableVideo: false, // Set to true if you need video
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
// Optional: Add debug logging
|
|
182
|
+
console.log("Convai client initialized:", !!convaiClient);
|
|
183
|
+
console.log("Connection status:", convaiClient?.state?.isConnected);
|
|
184
|
+
|
|
185
|
+
return (
|
|
186
|
+
<div style={{ width: '100vw', height: '100vh' }}>
|
|
187
|
+
<ConvaiWidget convaiClient={convaiClient} />
|
|
188
|
+
</div>
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
export default App;
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
### Prompt for AI Code Generators
|
|
196
|
+
|
|
197
|
+
**Use this prompt to get correct code:**
|
|
198
|
+
|
|
199
|
+
```
|
|
200
|
+
Add Convai voice chat to this app using @convai/web-sdk.
|
|
201
|
+
|
|
202
|
+
CRITICAL RULES:
|
|
203
|
+
1. Import: import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
204
|
+
2. Create client INSIDE component: const convaiClient = useConvaiClient({ apiKey: "...", characterId: "..." });
|
|
205
|
+
3. Pass ONLY client to widget: <ConvaiWidget convaiClient={convaiClient} />
|
|
206
|
+
4. DO NOT pass apiKey or characterId directly to ConvaiWidget
|
|
207
|
+
5. DO NOT stringify, spread, or destructure the client object
|
|
208
|
+
6. DO NOT use the client object in string contexts
|
|
209
|
+
|
|
210
|
+
Example:
|
|
211
|
+
const convaiClient = useConvaiClient({ apiKey: "KEY", characterId: "ID" });
|
|
212
|
+
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
### Video & Screen Share with AI Tools
|
|
216
|
+
|
|
217
|
+
If you need video/screen share, add these TWO changes:
|
|
218
|
+
|
|
219
|
+
```tsx
|
|
220
|
+
// Step 1: Add enableVideo to client config
|
|
221
|
+
const convaiClient = useConvaiClient({
|
|
222
|
+
apiKey: "your-api-key",
|
|
223
|
+
characterId: "your-character-id",
|
|
224
|
+
enableVideo: true // ← Required for video features
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
// Step 2: Show controls in widget
|
|
228
|
+
<ConvaiWidget
|
|
229
|
+
convaiClient={convaiClient}
|
|
230
|
+
showVideo={true} // ← Shows video button
|
|
231
|
+
showScreenShare={true} // ← Shows screen share button
|
|
232
|
+
/>
|
|
233
|
+
```
|
|
234
|
+
|
|
235
|
+
**Without `enableVideo: true`, video and screen share will NOT work even if you show the buttons.**
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
### Vanilla JS/TS - ConvaiWidget
|
|
29
240
|
|
|
30
241
|
```typescript
|
|
31
242
|
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
@@ -36,7 +247,7 @@ const client = new ConvaiClient({
|
|
|
36
247
|
characterId: "your-character-id",
|
|
37
248
|
});
|
|
38
249
|
|
|
39
|
-
// Create widget - auto-connects on first user click
|
|
250
|
+
// Create and mount widget - auto-connects on first user click
|
|
40
251
|
const widget = createConvaiWidget(document.body, {
|
|
41
252
|
convaiClient: client,
|
|
42
253
|
});
|
|
@@ -45,770 +256,1025 @@ const widget = createConvaiWidget(document.body, {
|
|
|
45
256
|
widget.destroy();
|
|
46
257
|
```
|
|
47
258
|
|
|
48
|
-
|
|
259
|
+
---
|
|
260
|
+
|
|
261
|
+
## Core Concepts
|
|
49
262
|
|
|
50
|
-
###
|
|
263
|
+
### The Architecture
|
|
51
264
|
|
|
52
|
-
|
|
265
|
+
```
|
|
266
|
+
┌─────────────────────────────────────────────────┐
|
|
267
|
+
│ ConvaiWidget (UI Layer) │
|
|
268
|
+
│ ├─ Chat Interface │
|
|
269
|
+
│ ├─ Voice Mode │
|
|
270
|
+
│ └─ Video/Screen Share UI │
|
|
271
|
+
└─────────────────────────────────────────────────┘
|
|
272
|
+
▼
|
|
273
|
+
┌─────────────────────────────────────────────────┐
|
|
274
|
+
│ ConvaiClient (Core Logic) │
|
|
275
|
+
│ ├─ Connection Management │
|
|
276
|
+
│ ├─ Message Handling │
|
|
277
|
+
│ ├─ State Management │
|
|
278
|
+
│ └─ Audio/Video Controls │
|
|
279
|
+
└─────────────────────────────────────────────────┘
|
|
280
|
+
▼
|
|
281
|
+
┌─────────────────────────────────────────────────┐
|
|
282
|
+
│ WebRTC Room (Communication Layer) │
|
|
283
|
+
│ ├─ Real-time Audio/Video Streaming │
|
|
284
|
+
│ ├─ Track Management │
|
|
285
|
+
│ └─ Network Communication │
|
|
286
|
+
└─────────────────────────────────────────────────┘
|
|
287
|
+
▼
|
|
288
|
+
┌─────────────────────────────────────────────────┐
|
|
289
|
+
│ AudioRenderer (Critical for Playback) │
|
|
290
|
+
│ ├─ Attaches audio tracks to DOM │
|
|
291
|
+
│ ├─ Manages audio elements │
|
|
292
|
+
│ └─ Enables bot voice playback │
|
|
293
|
+
└─────────────────────────────────────────────────┘
|
|
294
|
+
```
|
|
53
295
|
|
|
54
|
-
|
|
296
|
+
### Key Principles
|
|
55
297
|
|
|
56
|
-
**
|
|
298
|
+
1. **ConvaiClient** - The brain. Manages connection, state, and communication with Convai servers.
|
|
299
|
+
2. **AudioRenderer** - **CRITICAL**: Without this, you won't hear the bot. It renders audio to the user's speakers.
|
|
300
|
+
3. **ConvaiWidget** - The complete UI. Uses both ConvaiClient and AudioRenderer internally.
|
|
301
|
+
4. **Connection Type** - Determines capabilities:
|
|
302
|
+
- `"audio"` (default) - Audio only
|
|
303
|
+
- `"video"` - Audio + Video + Screen Share
|
|
57
304
|
|
|
58
|
-
|
|
59
|
-
- `useCharacterInfo(characterId, apiKey)` - Fetch character metadata
|
|
60
|
-
- `useLocalCameraTrack()` - Get local camera track
|
|
305
|
+
---
|
|
61
306
|
|
|
62
|
-
|
|
307
|
+
## React SDK
|
|
63
308
|
|
|
64
|
-
|
|
309
|
+
### useConvaiClient Hook
|
|
65
310
|
|
|
66
|
-
**
|
|
311
|
+
**Purpose**: Returns a fully configured `ConvaiClient` instance with reactive state updates.
|
|
67
312
|
|
|
68
|
-
|
|
69
|
-
- `ConvaiClientState` - Client state interface
|
|
70
|
-
- `ChatMessage` - Message interface
|
|
71
|
-
- `IConvaiClient` - Client interface
|
|
72
|
-
- `AudioControls` - Audio control interface
|
|
73
|
-
- `VideoControls` - Video control interface
|
|
74
|
-
- `ScreenShareControls` - Screen share control interface
|
|
313
|
+
**When to Use**: Every React app using Convai needs this hook.
|
|
75
314
|
|
|
76
|
-
**
|
|
315
|
+
**What It Does**:
|
|
77
316
|
|
|
78
|
-
-
|
|
79
|
-
-
|
|
317
|
+
- Creates and manages a ConvaiClient instance
|
|
318
|
+
- Provides reactive state (connection, messages, activity)
|
|
319
|
+
- Handles connection lifecycle
|
|
320
|
+
- Exposes audio/video/screen share controls
|
|
321
|
+
|
|
322
|
+
```tsx
|
|
323
|
+
import { useConvaiClient } from "@convai/web-sdk/react";
|
|
324
|
+
|
|
325
|
+
function ChatbotWrapper() {
|
|
326
|
+
const convaiClient = useConvaiClient({
|
|
327
|
+
apiKey: "your-api-key",
|
|
328
|
+
characterId: "your-character-id",
|
|
329
|
+
enableVideo: false, // Default: audio only
|
|
330
|
+
startWithAudioOn: false, // Mic starts muted
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
// Access reactive state
|
|
334
|
+
const { state, chatMessages, userTranscription, isBotReady } = convaiClient;
|
|
335
|
+
|
|
336
|
+
// Use controls
|
|
337
|
+
const handleMute = () => convaiClient.audioControls.muteAudio();
|
|
338
|
+
const handleSend = () =>
|
|
339
|
+
convaiClient.sendUserTextMessage("Hello, character!");
|
|
340
|
+
|
|
341
|
+
return (
|
|
342
|
+
<div>
|
|
343
|
+
<p>Status: {state.agentState}</p>
|
|
344
|
+
<p>Messages: {chatMessages.length}</p>
|
|
345
|
+
<button onClick={handleMute}>Mute</button>
|
|
346
|
+
<button onClick={handleSend}>Send</button>
|
|
347
|
+
</div>
|
|
348
|
+
);
|
|
349
|
+
}
|
|
350
|
+
```
|
|
80
351
|
|
|
81
|
-
###
|
|
352
|
+
### AudioRenderer Component
|
|
82
353
|
|
|
83
|
-
**
|
|
354
|
+
**Purpose**: Renders remote audio tracks to the user's speakers.
|
|
84
355
|
|
|
85
|
-
|
|
86
|
-
- `destroyConvaiWidget(widget)` - Destroy widget instance
|
|
356
|
+
**⚠️ CRITICAL**: Without `AudioRenderer`, you will NOT hear the bot's voice.
|
|
87
357
|
|
|
88
|
-
**
|
|
358
|
+
**When to Use**:
|
|
89
359
|
|
|
90
|
-
-
|
|
91
|
-
- `
|
|
360
|
+
- Always when building custom UIs
|
|
361
|
+
- Already included in `ConvaiWidget` (no need to add separately)
|
|
92
362
|
|
|
93
|
-
**
|
|
363
|
+
**How It Works**:
|
|
94
364
|
|
|
95
|
-
-
|
|
96
|
-
-
|
|
97
|
-
-
|
|
98
|
-
- `ConvaiConfig` - Configuration interface
|
|
99
|
-
- `ConvaiClientState` - Client state interface
|
|
100
|
-
- `ChatMessage` - Message interface
|
|
365
|
+
- Attaches to the WebRTC room
|
|
366
|
+
- Automatically creates `<audio>` elements for remote participants (the bot)
|
|
367
|
+
- Manages audio playback lifecycle
|
|
101
368
|
|
|
102
|
-
|
|
369
|
+
```tsx
|
|
370
|
+
import { useConvaiClient, AudioRenderer } from "@convai/web-sdk/react";
|
|
103
371
|
|
|
104
|
-
|
|
372
|
+
function CustomChatUI() {
|
|
373
|
+
const convaiClient = useConvaiClient({
|
|
374
|
+
apiKey: "your-api-key",
|
|
375
|
+
characterId: "your-character-id",
|
|
376
|
+
});
|
|
105
377
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
378
|
+
return (
|
|
379
|
+
<div>
|
|
380
|
+
{/* CRITICAL: This component renders bot audio to speakers */}
|
|
381
|
+
<AudioRenderer />
|
|
382
|
+
|
|
383
|
+
{/* Your custom UI */}
|
|
384
|
+
<div>
|
|
385
|
+
{convaiClient.chatMessages.map((msg) => (
|
|
386
|
+
<div key={msg.id}>{msg.content}</div>
|
|
387
|
+
))}
|
|
388
|
+
</div>
|
|
389
|
+
</div>
|
|
390
|
+
);
|
|
391
|
+
}
|
|
392
|
+
```
|
|
112
393
|
|
|
113
|
-
|
|
394
|
+
### AudioContext
|
|
114
395
|
|
|
115
|
-
|
|
116
|
-
- `ConvaiClientType` - Type alias for ConvaiClient
|
|
396
|
+
**Purpose**: Provides the WebRTC Room to child components.
|
|
117
397
|
|
|
118
|
-
|
|
398
|
+
**When to Use**: When building deeply nested custom UIs that need direct access to the audio room.
|
|
119
399
|
|
|
120
|
-
|
|
400
|
+
**How It Works**: React Context that holds the active WebRTC room.
|
|
121
401
|
|
|
122
402
|
```tsx
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
403
|
+
import { useConvaiClient, AudioRenderer, AudioContext } from "@convai/web-sdk/react";
|
|
404
|
+
import { useContext } from "react";
|
|
405
|
+
|
|
406
|
+
function ChatbotWrapper() {
|
|
407
|
+
const convaiClient = useConvaiClient({
|
|
408
|
+
/* config */
|
|
409
|
+
});
|
|
410
|
+
|
|
411
|
+
return (
|
|
412
|
+
<AudioContext.Provider value={convaiClient.room}>
|
|
413
|
+
<AudioRenderer />
|
|
414
|
+
<ChildComponent />
|
|
415
|
+
</AudioContext.Provider>
|
|
416
|
+
);
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
function ChildComponent() {
|
|
420
|
+
const room = useContext(AudioContext);
|
|
421
|
+
// Access WebRTC room directly
|
|
422
|
+
console.log("Room state:", room?.state);
|
|
423
|
+
return <div>Child has access to Room</div>;
|
|
135
424
|
}
|
|
136
425
|
```
|
|
137
426
|
|
|
138
|
-
###
|
|
427
|
+
### React Exports Reference
|
|
428
|
+
|
|
429
|
+
```tsx
|
|
430
|
+
// Components
|
|
431
|
+
import { ConvaiWidget } from "@convai/web-sdk/react";
|
|
432
|
+
|
|
433
|
+
// Hooks
|
|
434
|
+
import { useConvaiClient, useCharacterInfo } from "@convai/web-sdk/react";
|
|
435
|
+
|
|
436
|
+
// Audio Rendering (Critical)
|
|
437
|
+
import { AudioRenderer, AudioContext } from "@convai/web-sdk/react";
|
|
438
|
+
|
|
439
|
+
// Core Client (for advanced usage)
|
|
440
|
+
import { ConvaiClient } from "@convai/web-sdk/react";
|
|
441
|
+
|
|
442
|
+
// Types
|
|
443
|
+
import type {
|
|
444
|
+
ConvaiConfig,
|
|
445
|
+
ConvaiClientState,
|
|
446
|
+
ChatMessage,
|
|
447
|
+
IConvaiClient,
|
|
448
|
+
AudioControls,
|
|
449
|
+
VideoControls,
|
|
450
|
+
ScreenShareControls,
|
|
451
|
+
} from "@convai/web-sdk/react";
|
|
452
|
+
```
|
|
453
|
+
|
|
454
|
+
---
|
|
455
|
+
|
|
456
|
+
## Vanilla SDK
|
|
457
|
+
|
|
458
|
+
### ConvaiClient Class
|
|
459
|
+
|
|
460
|
+
**Purpose**: Core client for managing Convai connections in vanilla JavaScript/TypeScript.
|
|
461
|
+
|
|
462
|
+
**When to Use**: Any non-React application or when you need full control.
|
|
463
|
+
|
|
464
|
+
**What It Provides**:
|
|
465
|
+
|
|
466
|
+
- Connection management
|
|
467
|
+
- Message handling
|
|
468
|
+
- State management (via events)
|
|
469
|
+
- Audio/video/screen share controls
|
|
139
470
|
|
|
140
471
|
```typescript
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
472
|
+
import { ConvaiClient } from "@convai/web-sdk/vanilla";
|
|
473
|
+
|
|
474
|
+
const client = new ConvaiClient({
|
|
475
|
+
apiKey: "your-api-key",
|
|
476
|
+
characterId: "your-character-id",
|
|
477
|
+
});
|
|
478
|
+
|
|
479
|
+
// Connect
|
|
480
|
+
await client.connect();
|
|
481
|
+
|
|
482
|
+
// Listen to events
|
|
483
|
+
client.on("stateChange", (state) => {
|
|
484
|
+
console.log("Agent state:", state.agentState);
|
|
485
|
+
});
|
|
486
|
+
|
|
487
|
+
client.on("message", (message) => {
|
|
488
|
+
console.log("New message:", message.content);
|
|
489
|
+
});
|
|
490
|
+
|
|
491
|
+
// Send messages
|
|
492
|
+
client.sendUserTextMessage("Hello!");
|
|
493
|
+
|
|
494
|
+
// Control audio
|
|
495
|
+
await client.audioControls.muteAudio();
|
|
496
|
+
await client.audioControls.unmuteAudio();
|
|
497
|
+
|
|
498
|
+
// Disconnect
|
|
499
|
+
await client.disconnect();
|
|
152
500
|
```
|
|
153
501
|
|
|
154
|
-
###
|
|
502
|
+
### AudioRenderer Class
|
|
503
|
+
|
|
504
|
+
**Purpose**: Manages audio playback for vanilla JavaScript/TypeScript applications.
|
|
505
|
+
|
|
506
|
+
**⚠️ CRITICAL**: Without this, you will NOT hear the bot's voice.
|
|
507
|
+
|
|
508
|
+
**When to Use**:
|
|
509
|
+
|
|
510
|
+
- Always when building custom vanilla UIs
|
|
511
|
+
- Already included in vanilla `ConvaiWidget` (no need to add separately)
|
|
512
|
+
|
|
513
|
+
**How It Works**:
|
|
514
|
+
|
|
515
|
+
- Attaches to the WebRTC room
|
|
516
|
+
- Automatically creates hidden `<audio>` elements
|
|
517
|
+
- Manages audio playback for remote participants (the bot)
|
|
155
518
|
|
|
156
519
|
```typescript
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
characterId:
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
*/
|
|
175
|
-
enableVideo?: boolean;
|
|
176
|
-
/**
|
|
177
|
-
* Start with video camera on when connecting (default: false).
|
|
178
|
-
* Only works if enableVideo is true.
|
|
179
|
-
*/
|
|
180
|
-
startWithVideoOn?: boolean;
|
|
181
|
-
/**
|
|
182
|
-
* Start with microphone on when connecting (default: false).
|
|
183
|
-
* If false, microphone stays off until user enables it.
|
|
184
|
-
*/
|
|
185
|
-
startWithAudioOn?: boolean;
|
|
186
|
-
/** Enable text-to-speech audio generation (default: true) */
|
|
187
|
-
ttsEnabled?: boolean;
|
|
188
|
-
}
|
|
520
|
+
import { ConvaiClient, AudioRenderer } from "@convai/web-sdk/vanilla";
|
|
521
|
+
|
|
522
|
+
const client = new ConvaiClient({
|
|
523
|
+
apiKey: "your-api-key",
|
|
524
|
+
characterId: "your-character-id",
|
|
525
|
+
});
|
|
526
|
+
|
|
527
|
+
await client.connect();
|
|
528
|
+
|
|
529
|
+
// CRITICAL: Create AudioRenderer to hear bot audio
|
|
530
|
+
const audioRenderer = new AudioRenderer(client.room);
|
|
531
|
+
|
|
532
|
+
// Your custom UI logic...
|
|
533
|
+
|
|
534
|
+
// Cleanup
|
|
535
|
+
audioRenderer.destroy();
|
|
536
|
+
await client.disconnect();
|
|
189
537
|
```
|
|
190
538
|
|
|
191
|
-
|
|
539
|
+
### Vanilla Exports Reference
|
|
192
540
|
|
|
193
|
-
|
|
541
|
+
```typescript
|
|
542
|
+
// Widget
|
|
543
|
+
import { createConvaiWidget, destroyConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
194
544
|
|
|
195
|
-
|
|
545
|
+
// Core Client
|
|
546
|
+
import { ConvaiClient } from "@convai/web-sdk/vanilla";
|
|
196
547
|
|
|
197
|
-
|
|
548
|
+
// Audio Rendering (Critical)
|
|
549
|
+
import { AudioRenderer } from "@convai/web-sdk/vanilla";
|
|
550
|
+
|
|
551
|
+
// Types
|
|
552
|
+
import type {
|
|
553
|
+
VanillaWidget,
|
|
554
|
+
VanillaWidgetOptions,
|
|
555
|
+
ConvaiConfig,
|
|
556
|
+
ConvaiClientState,
|
|
557
|
+
ChatMessage,
|
|
558
|
+
IConvaiClient,
|
|
559
|
+
} from "@convai/web-sdk/vanilla";
|
|
560
|
+
```
|
|
561
|
+
|
|
562
|
+
---
|
|
563
|
+
|
|
564
|
+
## Video & Screen Share
|
|
565
|
+
|
|
566
|
+
### Critical Requirements
|
|
567
|
+
|
|
568
|
+
> ⚠️ **IMPORTANT**: Video and Screen Share features require **TWO** configuration changes:
|
|
569
|
+
|
|
570
|
+
#### 1. Set `enableVideo: true` in Client Configuration
|
|
571
|
+
|
|
572
|
+
This sets the connection type to `"video"` which enables video capabilities.
|
|
573
|
+
|
|
574
|
+
#### 2. Set `showVideo` and/or `showScreenShare` in Widget Props
|
|
575
|
+
|
|
576
|
+
This shows the UI controls for video/screen share.
|
|
577
|
+
|
|
578
|
+
**Without both, video features will NOT work.**
|
|
579
|
+
|
|
580
|
+
---
|
|
581
|
+
|
|
582
|
+
### Enabling Video
|
|
583
|
+
|
|
584
|
+
#### React
|
|
198
585
|
|
|
199
586
|
```tsx
|
|
200
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
587
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
201
588
|
|
|
202
589
|
function App() {
|
|
590
|
+
// ✅ STEP 1: Enable video in client config
|
|
203
591
|
const convaiClient = useConvaiClient({
|
|
204
592
|
apiKey: "your-api-key",
|
|
205
593
|
characterId: "your-character-id",
|
|
206
|
-
enableVideo: true,
|
|
594
|
+
enableVideo: true, // ← REQUIRED for video
|
|
207
595
|
startWithVideoOn: false, // Camera off by default
|
|
208
596
|
});
|
|
209
597
|
|
|
210
598
|
return (
|
|
211
599
|
<ConvaiWidget
|
|
212
600
|
convaiClient={convaiClient}
|
|
213
|
-
showVideo={true}
|
|
214
|
-
showScreenShare={
|
|
601
|
+
showVideo={true} // ← STEP 2: Show video controls
|
|
602
|
+
showScreenShare={false} // Optional: hide screen share
|
|
215
603
|
/>
|
|
216
604
|
);
|
|
217
605
|
}
|
|
218
606
|
```
|
|
219
607
|
|
|
220
|
-
|
|
608
|
+
#### Vanilla
|
|
221
609
|
|
|
222
610
|
```typescript
|
|
223
611
|
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
224
612
|
|
|
613
|
+
// ✅ STEP 1: Enable video in client config
|
|
225
614
|
const client = new ConvaiClient({
|
|
226
615
|
apiKey: "your-api-key",
|
|
227
616
|
characterId: "your-character-id",
|
|
228
|
-
enableVideo: true,
|
|
617
|
+
enableVideo: true, // ← REQUIRED for video
|
|
229
618
|
startWithVideoOn: false,
|
|
230
619
|
});
|
|
231
620
|
|
|
232
621
|
const widget = createConvaiWidget(document.body, {
|
|
233
622
|
convaiClient: client,
|
|
234
|
-
showVideo: true,
|
|
235
|
-
showScreenShare:
|
|
623
|
+
showVideo: true, // ← STEP 2: Show video controls
|
|
624
|
+
showScreenShare: false,
|
|
236
625
|
});
|
|
237
626
|
```
|
|
238
627
|
|
|
239
|
-
|
|
628
|
+
#### Manual Video Control
|
|
240
629
|
|
|
241
630
|
```typescript
|
|
242
|
-
// Enable
|
|
631
|
+
// Enable camera
|
|
243
632
|
await convaiClient.videoControls.enableVideo();
|
|
244
633
|
|
|
245
|
-
// Disable
|
|
634
|
+
// Disable camera
|
|
246
635
|
await convaiClient.videoControls.disableVideo();
|
|
247
636
|
|
|
248
|
-
// Toggle
|
|
637
|
+
// Toggle camera
|
|
249
638
|
await convaiClient.videoControls.toggleVideo();
|
|
250
639
|
|
|
251
|
-
// Check
|
|
252
|
-
|
|
640
|
+
// Check state
|
|
641
|
+
console.log(convaiClient.isVideoEnabled);
|
|
253
642
|
|
|
254
643
|
// Set video quality
|
|
255
644
|
await convaiClient.videoControls.setVideoQuality("high"); // 'low' | 'medium' | 'high'
|
|
256
645
|
|
|
257
|
-
// Get available
|
|
646
|
+
// Get available cameras
|
|
258
647
|
const devices = await convaiClient.videoControls.getVideoDevices();
|
|
259
648
|
|
|
260
|
-
//
|
|
649
|
+
// Switch camera
|
|
261
650
|
await convaiClient.videoControls.setVideoDevice(deviceId);
|
|
262
651
|
```
|
|
263
652
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
```typescript
|
|
267
|
-
// Enable screen share
|
|
268
|
-
await convaiClient.screenShareControls.enableScreenShare();
|
|
269
|
-
|
|
270
|
-
// Enable screen share with audio
|
|
271
|
-
await convaiClient.screenShareControls.enableScreenShareWithAudio();
|
|
272
|
-
|
|
273
|
-
// Disable screen share
|
|
274
|
-
await convaiClient.screenShareControls.disableScreenShare();
|
|
275
|
-
|
|
276
|
-
// Toggle screen share
|
|
277
|
-
await convaiClient.screenShareControls.toggleScreenShare();
|
|
278
|
-
|
|
279
|
-
// Check screen share state
|
|
280
|
-
const isActive = convaiClient.screenShareControls.isScreenShareActive;
|
|
281
|
-
```
|
|
282
|
-
|
|
283
|
-
**Video State Monitoring:**
|
|
284
|
-
|
|
285
|
-
```typescript
|
|
286
|
-
// React
|
|
287
|
-
const { isVideoEnabled } = convaiClient;
|
|
288
|
-
|
|
289
|
-
// Core API (event-based)
|
|
290
|
-
convaiClient.videoControls.on("videoStateChange", (state) => {
|
|
291
|
-
console.log("Video enabled:", state.isVideoEnabled);
|
|
292
|
-
console.log("Video hidden:", state.isVideoHidden);
|
|
293
|
-
});
|
|
294
|
-
```
|
|
653
|
+
---
|
|
295
654
|
|
|
296
|
-
###
|
|
655
|
+
### Enabling Screen Share
|
|
297
656
|
|
|
298
|
-
|
|
657
|
+
Screen sharing **requires** `enableVideo: true` (connection type must be `"video"`).
|
|
299
658
|
|
|
300
|
-
|
|
659
|
+
#### React
|
|
301
660
|
|
|
302
661
|
```tsx
|
|
303
|
-
|
|
662
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
663
|
+
|
|
664
|
+
function App() {
|
|
665
|
+
// ✅ STEP 1: Enable video (required for screen share)
|
|
304
666
|
const convaiClient = useConvaiClient({
|
|
305
|
-
|
|
667
|
+
apiKey: "your-api-key",
|
|
668
|
+
characterId: "your-character-id",
|
|
669
|
+
enableVideo: true, // ← REQUIRED for screen share
|
|
306
670
|
});
|
|
307
671
|
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
672
|
+
return (
|
|
673
|
+
<ConvaiWidget
|
|
674
|
+
convaiClient={convaiClient}
|
|
675
|
+
showVideo={true} // Optional: show video controls
|
|
676
|
+
showScreenShare={true} // ← STEP 2: Show screen share controls
|
|
677
|
+
/>
|
|
678
|
+
);
|
|
314
679
|
}
|
|
315
680
|
```
|
|
316
681
|
|
|
317
|
-
|
|
682
|
+
#### Vanilla
|
|
318
683
|
|
|
319
684
|
```typescript
|
|
320
|
-
|
|
685
|
+
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
321
686
|
|
|
322
|
-
|
|
323
|
-
|
|
687
|
+
// ✅ STEP 1: Enable video (required for screen share)
|
|
688
|
+
const client = new ConvaiClient({
|
|
689
|
+
apiKey: "your-api-key",
|
|
690
|
+
characterId: "your-character-id",
|
|
691
|
+
enableVideo: true, // ← REQUIRED for screen share
|
|
324
692
|
});
|
|
325
|
-
```
|
|
326
693
|
|
|
327
|
-
|
|
694
|
+
const widget = createConvaiWidget(document.body, {
|
|
695
|
+
convaiClient: client,
|
|
696
|
+
showVideo: true,
|
|
697
|
+
showScreenShare: true, // ← STEP 2: Show screen share controls
|
|
698
|
+
});
|
|
699
|
+
```
|
|
328
700
|
|
|
329
|
-
|
|
701
|
+
#### Manual Screen Share Control
|
|
330
702
|
|
|
331
703
|
```typescript
|
|
332
|
-
//
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
704
|
+
// Start screen share
|
|
705
|
+
await convaiClient.screenShareControls.enableScreenShare();
|
|
706
|
+
|
|
707
|
+
// Start screen share with audio
|
|
708
|
+
await convaiClient.screenShareControls.enableScreenShareWithAudio();
|
|
336
709
|
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
};
|
|
710
|
+
// Stop screen share
|
|
711
|
+
await convaiClient.screenShareControls.disableScreenShare();
|
|
340
712
|
|
|
341
|
-
//
|
|
342
|
-
|
|
343
|
-
// Interrupt any ongoing bot response
|
|
344
|
-
convaiClient.sendInterruptMessage();
|
|
713
|
+
// Toggle screen share
|
|
714
|
+
await convaiClient.screenShareControls.toggleScreenShare();
|
|
345
715
|
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
};
|
|
716
|
+
// Check state
|
|
717
|
+
console.log(convaiClient.isScreenShareActive);
|
|
349
718
|
```
|
|
350
719
|
|
|
351
|
-
|
|
720
|
+
---
|
|
721
|
+
|
|
722
|
+
## Building Custom UIs
|
|
723
|
+
|
|
724
|
+
### Custom Chat Interface
|
|
352
725
|
|
|
353
|
-
|
|
726
|
+
Use the `chatMessages` array from ConvaiClient to build your own chat UI.
|
|
354
727
|
|
|
355
|
-
|
|
728
|
+
#### React Example
|
|
356
729
|
|
|
357
730
|
```tsx
|
|
358
|
-
|
|
731
|
+
import { useConvaiClient, AudioRenderer } from "@convai/web-sdk/react";
|
|
732
|
+
import { useState } from "react";
|
|
733
|
+
|
|
734
|
+
function CustomChatUI() {
|
|
359
735
|
const convaiClient = useConvaiClient({
|
|
360
|
-
|
|
736
|
+
apiKey: "your-api-key",
|
|
737
|
+
characterId: "your-character-id",
|
|
361
738
|
});
|
|
362
739
|
|
|
363
|
-
const
|
|
364
|
-
|
|
365
|
-
};
|
|
366
|
-
|
|
367
|
-
const handleUnmute = async () => {
|
|
368
|
-
await convaiClient.audioControls.unmuteAudio();
|
|
369
|
-
};
|
|
740
|
+
const { chatMessages, state } = convaiClient;
|
|
741
|
+
const [inputValue, setInputValue] = useState("");
|
|
370
742
|
|
|
371
|
-
const
|
|
372
|
-
|
|
743
|
+
const handleSend = () => {
|
|
744
|
+
if (inputValue.trim() && state.isConnected) {
|
|
745
|
+
convaiClient.sendUserTextMessage(inputValue);
|
|
746
|
+
setInputValue("");
|
|
747
|
+
}
|
|
373
748
|
};
|
|
374
749
|
|
|
375
750
|
return (
|
|
376
751
|
<div>
|
|
377
|
-
|
|
378
|
-
<
|
|
379
|
-
|
|
380
|
-
|
|
752
|
+
{/* CRITICAL: AudioRenderer for bot voice */}
|
|
753
|
+
<AudioRenderer />
|
|
754
|
+
|
|
755
|
+
{/* Chat Messages */}
|
|
756
|
+
<div className="chat-container">
|
|
757
|
+
{chatMessages.map((msg) => {
|
|
758
|
+
const isUser = msg.type.includes("user");
|
|
759
|
+
const displayMessage =
|
|
760
|
+
msg.type === "user-llm-text" || msg.type === "bot-llm-text";
|
|
761
|
+
|
|
762
|
+
if (!displayMessage) return null;
|
|
763
|
+
|
|
764
|
+
return (
|
|
765
|
+
<div
|
|
766
|
+
key={msg.id}
|
|
767
|
+
className={isUser ? "user-message" : "bot-message"}
|
|
768
|
+
>
|
|
769
|
+
<span className="sender">
|
|
770
|
+
{isUser ? "You" : "Character"}
|
|
771
|
+
</span>
|
|
772
|
+
<p>{msg.content}</p>
|
|
773
|
+
<span className="timestamp">
|
|
774
|
+
{new Date(msg.timestamp).toLocaleTimeString()}
|
|
775
|
+
</span>
|
|
776
|
+
</div>
|
|
777
|
+
);
|
|
778
|
+
})}
|
|
779
|
+
</div>
|
|
780
|
+
|
|
781
|
+
{/* Input */}
|
|
782
|
+
<div className="input-container">
|
|
783
|
+
<input
|
|
784
|
+
type="text"
|
|
785
|
+
value={inputValue}
|
|
786
|
+
onChange={(e) => setInputValue(e.target.value)}
|
|
787
|
+
onKeyPress={(e) => e.key === "Enter" && handleSend()}
|
|
788
|
+
placeholder="Type a message..."
|
|
789
|
+
disabled={!state.isConnected}
|
|
790
|
+
/>
|
|
791
|
+
<button onClick={handleSend} disabled={!state.isConnected}>
|
|
792
|
+
Send
|
|
793
|
+
</button>
|
|
794
|
+
</div>
|
|
795
|
+
|
|
796
|
+
{/* Status Indicator */}
|
|
797
|
+
<div className="status">
|
|
798
|
+
{state.isConnecting && "Connecting..."}
|
|
799
|
+
{state.isConnected && state.agentState}
|
|
800
|
+
{!state.isConnected && "Disconnected"}
|
|
801
|
+
</div>
|
|
381
802
|
</div>
|
|
382
803
|
);
|
|
383
804
|
}
|
|
384
805
|
```
|
|
385
806
|
|
|
386
|
-
|
|
807
|
+
#### Vanilla Example
|
|
387
808
|
|
|
388
809
|
```typescript
|
|
389
|
-
|
|
390
|
-
await client.audioControls.muteAudio();
|
|
810
|
+
import { ConvaiClient, AudioRenderer } from "@convai/web-sdk/vanilla";
|
|
391
811
|
|
|
392
|
-
|
|
393
|
-
|
|
812
|
+
const client = new ConvaiClient({
|
|
813
|
+
apiKey: "your-api-key",
|
|
814
|
+
characterId: "your-character-id",
|
|
815
|
+
});
|
|
394
816
|
|
|
395
|
-
|
|
396
|
-
await client.audioControls.toggleAudio();
|
|
817
|
+
await client.connect();
|
|
397
818
|
|
|
398
|
-
//
|
|
399
|
-
const
|
|
819
|
+
// CRITICAL: Create AudioRenderer for bot voice
|
|
820
|
+
const audioRenderer = new AudioRenderer(client.room);
|
|
400
821
|
|
|
401
|
-
|
|
402
|
-
|
|
822
|
+
const chatContainer = document.getElementById("chat-container");
|
|
823
|
+
const inputElement = document.getElementById("message-input");
|
|
824
|
+
const sendButton = document.getElementById("send-button");
|
|
403
825
|
|
|
404
|
-
//
|
|
405
|
-
|
|
406
|
-
|
|
826
|
+
// Render messages
|
|
827
|
+
client.on("messagesChange", (messages) => {
|
|
828
|
+
chatContainer.innerHTML = "";
|
|
407
829
|
|
|
408
|
-
|
|
830
|
+
messages.forEach((msg) => {
|
|
831
|
+
const isUser = msg.type.includes("user");
|
|
832
|
+
const displayMessage =
|
|
833
|
+
msg.type === "user-llm-text" || msg.type === "bot-llm-text";
|
|
409
834
|
|
|
410
|
-
|
|
411
|
-
// Get available audio devices
|
|
412
|
-
const devices = await convaiClient.audioControls.getAudioDevices();
|
|
835
|
+
if (!displayMessage) return;
|
|
413
836
|
|
|
414
|
-
|
|
415
|
-
|
|
837
|
+
const messageDiv = document.createElement("div");
|
|
838
|
+
messageDiv.className = isUser ? "user-message" : "bot-message";
|
|
416
839
|
|
|
417
|
-
|
|
418
|
-
|
|
840
|
+
const sender = document.createElement("span");
|
|
841
|
+
sender.textContent = isUser ? "You" : "Character";
|
|
842
|
+
sender.className = "sender";
|
|
419
843
|
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
// level is a number between 0 and 1
|
|
423
|
-
});
|
|
844
|
+
const content = document.createElement("p");
|
|
845
|
+
content.textContent = msg.content;
|
|
424
846
|
|
|
425
|
-
|
|
426
|
-
|
|
847
|
+
const timestamp = document.createElement("span");
|
|
848
|
+
timestamp.textContent = new Date(msg.timestamp).toLocaleTimeString();
|
|
849
|
+
timestamp.className = "timestamp";
|
|
427
850
|
|
|
428
|
-
|
|
851
|
+
messageDiv.appendChild(sender);
|
|
852
|
+
messageDiv.appendChild(content);
|
|
853
|
+
messageDiv.appendChild(timestamp);
|
|
854
|
+
chatContainer.appendChild(messageDiv);
|
|
855
|
+
});
|
|
429
856
|
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
//
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
857
|
+
// Auto-scroll
|
|
858
|
+
chatContainer.scrollTop = chatContainer.scrollHeight;
|
|
859
|
+
});
|
|
860
|
+
|
|
861
|
+
// Send message
|
|
862
|
+
sendButton.addEventListener("click", () => {
|
|
863
|
+
const text = inputElement.value.trim();
|
|
864
|
+
if (text && client.state.isConnected) {
|
|
865
|
+
client.sendUserTextMessage(text);
|
|
866
|
+
inputElement.value = "";
|
|
867
|
+
}
|
|
868
|
+
});
|
|
869
|
+
|
|
870
|
+
inputElement.addEventListener("keypress", (e) => {
|
|
871
|
+
if (e.key === "Enter") {
|
|
872
|
+
sendButton.click();
|
|
873
|
+
}
|
|
439
874
|
});
|
|
875
|
+
|
|
876
|
+
// Cleanup
|
|
877
|
+
// audioRenderer.destroy();
|
|
878
|
+
// await client.disconnect();
|
|
440
879
|
```
|
|
441
880
|
|
|
442
|
-
|
|
881
|
+
---
|
|
443
882
|
|
|
444
|
-
|
|
883
|
+
### Audio Visualizer
|
|
445
884
|
|
|
446
|
-
|
|
885
|
+
Create real-time audio visualizers using the WebRTC room's audio tracks.
|
|
886
|
+
|
|
887
|
+
#### React Example
|
|
447
888
|
|
|
448
889
|
```tsx
|
|
449
|
-
|
|
890
|
+
import { useConvaiClient } from "@convai/web-sdk/react";
|
|
891
|
+
import { useEffect, useRef, useState } from "react";
|
|
892
|
+
|
|
893
|
+
function AudioVisualizer() {
|
|
450
894
|
const convaiClient = useConvaiClient({
|
|
451
|
-
|
|
895
|
+
apiKey: "your-api-key",
|
|
896
|
+
characterId: "your-character-id",
|
|
452
897
|
});
|
|
453
898
|
|
|
454
|
-
const
|
|
455
|
-
|
|
456
|
-
};
|
|
899
|
+
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
900
|
+
const [audioLevel, setAudioLevel] = useState(0);
|
|
457
901
|
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
<button onClick={() => handleToggleTTS(true)}>Enable TTS</button>
|
|
461
|
-
<button onClick={() => handleToggleTTS(false)}>Disable TTS</button>
|
|
462
|
-
</div>
|
|
463
|
-
);
|
|
464
|
-
}
|
|
465
|
-
```
|
|
902
|
+
useEffect(() => {
|
|
903
|
+
if (!convaiClient.room) return;
|
|
466
904
|
|
|
467
|
-
|
|
905
|
+
let animationId: number;
|
|
906
|
+
let analyzer: AnalyserNode | null = null;
|
|
907
|
+
let dataArray: Uint8Array | null = null;
|
|
468
908
|
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
client.toggleTts(true);
|
|
909
|
+
const setupAnalyzer = async () => {
|
|
910
|
+
const audioContext = new AudioContext();
|
|
472
911
|
|
|
473
|
-
//
|
|
474
|
-
|
|
475
|
-
|
|
912
|
+
// Get remote participant (bot)
|
|
913
|
+
const remoteParticipants = Array.from(
|
|
914
|
+
convaiClient.room.remoteParticipants.values()
|
|
915
|
+
);
|
|
476
916
|
|
|
477
|
-
|
|
917
|
+
if (remoteParticipants.length === 0) return;
|
|
478
918
|
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
characterId: "your-character-id",
|
|
484
|
-
ttsEnabled: true, // Enable TTS by default
|
|
485
|
-
});
|
|
919
|
+
const participant = remoteParticipants[0];
|
|
920
|
+
const audioTracks = Array.from(
|
|
921
|
+
participant.audioTrackPublications.values()
|
|
922
|
+
);
|
|
486
923
|
|
|
487
|
-
|
|
488
|
-
const client = new ConvaiClient({
|
|
489
|
-
apiKey: "your-api-key",
|
|
490
|
-
characterId: "your-character-id",
|
|
491
|
-
ttsEnabled: false, // Disable TTS
|
|
492
|
-
});
|
|
493
|
-
```
|
|
924
|
+
if (audioTracks.length === 0) return;
|
|
494
925
|
|
|
495
|
-
|
|
926
|
+
const audioTrack = audioTracks[0].track;
|
|
927
|
+
if (!audioTrack) return;
|
|
496
928
|
|
|
497
|
-
|
|
929
|
+
// Get MediaStream from track
|
|
930
|
+
const mediaStream = new MediaStream([audioTrack.mediaStreamTrack]);
|
|
498
931
|
|
|
499
|
-
|
|
932
|
+
// Create analyzer
|
|
933
|
+
const source = audioContext.createMediaStreamSource(mediaStream);
|
|
934
|
+
analyzer = audioContext.createAnalyser();
|
|
935
|
+
analyzer.fftSize = 256;
|
|
500
936
|
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
import { useState, useEffect } from "react";
|
|
937
|
+
source.connect(analyzer);
|
|
938
|
+
dataArray = new Uint8Array(analyzer.frequencyBinCount);
|
|
504
939
|
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
});
|
|
509
|
-
const [isVoiceMode, setIsVoiceMode] = useState(false);
|
|
940
|
+
// Animate
|
|
941
|
+
const animate = () => {
|
|
942
|
+
if (!analyzer || !dataArray) return;
|
|
510
943
|
|
|
511
|
-
|
|
512
|
-
// Interrupt any ongoing bot response
|
|
513
|
-
convaiClient.sendInterruptMessage();
|
|
944
|
+
analyzer.getByteFrequencyData(dataArray);
|
|
514
945
|
|
|
515
|
-
|
|
516
|
-
|
|
946
|
+
// Calculate average volume
|
|
947
|
+
const sum = dataArray.reduce((a, b) => a + b, 0);
|
|
948
|
+
const average = sum / dataArray.length;
|
|
949
|
+
const normalizedLevel = average / 255;
|
|
517
950
|
|
|
518
|
-
|
|
519
|
-
};
|
|
951
|
+
setAudioLevel(normalizedLevel);
|
|
520
952
|
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
convaiClient.sendInterruptMessage();
|
|
953
|
+
// Draw visualization
|
|
954
|
+
drawVisualizer(dataArray);
|
|
524
955
|
|
|
525
|
-
|
|
526
|
-
|
|
956
|
+
animationId = requestAnimationFrame(animate);
|
|
957
|
+
};
|
|
527
958
|
|
|
528
|
-
|
|
529
|
-
|
|
959
|
+
animate();
|
|
960
|
+
};
|
|
530
961
|
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
962
|
+
const drawVisualizer = (dataArray: Uint8Array) => {
|
|
963
|
+
const canvas = canvasRef.current;
|
|
964
|
+
if (!canvas) return;
|
|
965
|
+
|
|
966
|
+
const ctx = canvas.getContext("2d");
|
|
967
|
+
if (!ctx) return;
|
|
968
|
+
|
|
969
|
+
const width = canvas.width;
|
|
970
|
+
const height = canvas.height;
|
|
971
|
+
|
|
972
|
+
ctx.clearRect(0, 0, width, height);
|
|
973
|
+
|
|
974
|
+
const barWidth = (width / dataArray.length) * 2.5;
|
|
975
|
+
let x = 0;
|
|
976
|
+
|
|
977
|
+
for (let i = 0; i < dataArray.length; i++) {
|
|
978
|
+
const barHeight = (dataArray[i] / 255) * height;
|
|
979
|
+
|
|
980
|
+
ctx.fillStyle = `rgb(${barHeight + 100}, 50, 150)`;
|
|
981
|
+
ctx.fillRect(x, height - barHeight, barWidth, barHeight);
|
|
982
|
+
|
|
983
|
+
x += barWidth + 1;
|
|
984
|
+
}
|
|
985
|
+
};
|
|
986
|
+
|
|
987
|
+
if (convaiClient.state.isConnected) {
|
|
988
|
+
setupAnalyzer();
|
|
537
989
|
}
|
|
538
|
-
|
|
990
|
+
|
|
991
|
+
return () => {
|
|
992
|
+
if (animationId) cancelAnimationFrame(animationId);
|
|
993
|
+
};
|
|
994
|
+
}, [convaiClient.room, convaiClient.state.isConnected]);
|
|
539
995
|
|
|
540
996
|
return (
|
|
541
997
|
<div>
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
998
|
+
<canvas
|
|
999
|
+
ref={canvasRef}
|
|
1000
|
+
width={800}
|
|
1001
|
+
height={200}
|
|
1002
|
+
style={{ border: "1px solid #ccc" }}
|
|
1003
|
+
/>
|
|
1004
|
+
<div>Audio Level: {(audioLevel * 100).toFixed(0)}%</div>
|
|
1005
|
+
<div>
|
|
1006
|
+
Bot is {convaiClient.state.isSpeaking ? "speaking" : "silent"}
|
|
1007
|
+
</div>
|
|
550
1008
|
</div>
|
|
551
1009
|
);
|
|
552
1010
|
}
|
|
553
1011
|
```
|
|
554
1012
|
|
|
555
|
-
|
|
1013
|
+
#### Vanilla Example
|
|
556
1014
|
|
|
557
1015
|
```typescript
|
|
558
|
-
|
|
1016
|
+
import { ConvaiClient, AudioRenderer } from "@convai/web-sdk/vanilla";
|
|
559
1017
|
|
|
560
|
-
const
|
|
561
|
-
|
|
562
|
-
|
|
1018
|
+
const client = new ConvaiClient({
|
|
1019
|
+
apiKey: "your-api-key",
|
|
1020
|
+
characterId: "your-character-id",
|
|
1021
|
+
});
|
|
563
1022
|
|
|
564
|
-
|
|
565
|
-
await client.audioControls.unmuteAudio();
|
|
1023
|
+
await client.connect();
|
|
566
1024
|
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
};
|
|
1025
|
+
// CRITICAL: AudioRenderer for playback
|
|
1026
|
+
const audioRenderer = new AudioRenderer(client.room);
|
|
570
1027
|
|
|
571
|
-
const
|
|
572
|
-
|
|
573
|
-
client.sendInterruptMessage();
|
|
1028
|
+
const canvas = document.getElementById("visualizer") as HTMLCanvasElement;
|
|
1029
|
+
const ctx = canvas.getContext("2d")!;
|
|
574
1030
|
|
|
575
|
-
|
|
576
|
-
|
|
1031
|
+
let analyzer: AnalyserNode | null = null;
|
|
1032
|
+
let dataArray: Uint8Array | null = null;
|
|
1033
|
+
let animationId: number;
|
|
577
1034
|
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
};
|
|
1035
|
+
// Setup analyzer
|
|
1036
|
+
const audioContext = new AudioContext();
|
|
581
1037
|
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
document.getElementById("transcription").textContent = transcription;
|
|
587
|
-
}
|
|
588
|
-
});
|
|
1038
|
+
const remoteParticipants = Array.from(client.room.remoteParticipants.values());
|
|
1039
|
+
const participant = remoteParticipants[0];
|
|
1040
|
+
const audioTracks = Array.from(participant.audioTrackPublications.values());
|
|
1041
|
+
const audioTrack = audioTracks[0].track;
|
|
589
1042
|
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
const transcriptionDiv = document.getElementById("transcription");
|
|
1043
|
+
const mediaStream = new MediaStream([audioTrack.mediaStreamTrack]);
|
|
1044
|
+
const source = audioContext.createMediaStreamSource(mediaStream);
|
|
593
1045
|
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
} else {
|
|
598
|
-
voiceButton.textContent = "Start Voice Mode";
|
|
599
|
-
transcriptionDiv.style.display = "none";
|
|
600
|
-
}
|
|
601
|
-
}
|
|
602
|
-
```
|
|
1046
|
+
analyzer = audioContext.createAnalyser();
|
|
1047
|
+
analyzer.fftSize = 256;
|
|
1048
|
+
source.connect(analyzer);
|
|
603
1049
|
|
|
604
|
-
|
|
1050
|
+
dataArray = new Uint8Array(analyzer.frequencyBinCount);
|
|
605
1051
|
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
if (isVoiceMode) {
|
|
610
|
-
switch (state.agentState) {
|
|
611
|
-
case "listening":
|
|
612
|
-
// User can speak
|
|
613
|
-
console.log("Bot is listening");
|
|
614
|
-
break;
|
|
615
|
-
case "thinking":
|
|
616
|
-
// Bot is processing
|
|
617
|
-
console.log("Bot is thinking");
|
|
618
|
-
break;
|
|
619
|
-
case "speaking":
|
|
620
|
-
// Bot is responding
|
|
621
|
-
console.log("Bot is speaking");
|
|
622
|
-
// Optionally interrupt if user wants to speak
|
|
623
|
-
break;
|
|
624
|
-
}
|
|
625
|
-
}
|
|
626
|
-
});
|
|
627
|
-
```
|
|
1052
|
+
// Animate
|
|
1053
|
+
function animate() {
|
|
1054
|
+
if (!analyzer || !dataArray) return;
|
|
628
1055
|
|
|
629
|
-
|
|
1056
|
+
analyzer.getByteFrequencyData(dataArray);
|
|
630
1057
|
|
|
631
|
-
|
|
1058
|
+
// Clear canvas
|
|
1059
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
632
1060
|
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
const convaiClient = useConvaiClient({
|
|
636
|
-
apiKey: "your-api-key",
|
|
637
|
-
characterId: "your-character-id",
|
|
638
|
-
});
|
|
1061
|
+
const barWidth = (canvas.width / dataArray.length) * 2.5;
|
|
1062
|
+
let x = 0;
|
|
639
1063
|
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
apiKey: "your-api-key",
|
|
643
|
-
characterId: "your-character-id",
|
|
644
|
-
});
|
|
1064
|
+
for (let i = 0; i < dataArray.length; i++) {
|
|
1065
|
+
const barHeight = (dataArray[i] / 255) * canvas.height;
|
|
645
1066
|
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
1067
|
+
ctx.fillStyle = `rgb(${barHeight + 100}, 50, 150)`;
|
|
1068
|
+
ctx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
|
|
1069
|
+
|
|
1070
|
+
x += barWidth + 1;
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
animationId = requestAnimationFrame(animate);
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
animate();
|
|
1077
|
+
|
|
1078
|
+
// Cleanup
|
|
1079
|
+
// cancelAnimationFrame(animationId);
|
|
1080
|
+
// audioRenderer.destroy();
|
|
1081
|
+
// await client.disconnect();
|
|
652
1082
|
```
|
|
653
1083
|
|
|
654
|
-
|
|
1084
|
+
---
|
|
1085
|
+
|
|
1086
|
+
### Message Types
|
|
1087
|
+
|
|
1088
|
+
All messages from `convaiClient.chatMessages` have a `type` field:
|
|
655
1089
|
|
|
656
1090
|
```typescript
|
|
657
|
-
|
|
1091
|
+
type ChatMessageType =
|
|
1092
|
+
| "user" // User's sent message (raw)
|
|
1093
|
+
| "user-transcription" // Real-time speech-to-text from user
|
|
1094
|
+
| "user-llm-text" // User text processed by LLM (final)
|
|
1095
|
+
| "convai" // Character's response (raw)
|
|
1096
|
+
| "bot-llm-text" // Character's LLM-generated text (final)
|
|
1097
|
+
| "bot-emotion" // Character's emotional state
|
|
1098
|
+
| "emotion" // Generic emotion
|
|
1099
|
+
| "behavior-tree" // Behavior tree response
|
|
1100
|
+
| "action" // Action execution
|
|
1101
|
+
| "interrupt-bot"; // Interrupt message
|
|
658
1102
|
```
|
|
659
1103
|
|
|
660
|
-
**
|
|
1104
|
+
**For Chat UIs, filter to:**
|
|
661
1105
|
|
|
662
1106
|
```typescript
|
|
663
|
-
|
|
1107
|
+
const displayMessages = chatMessages.filter(
|
|
1108
|
+
(msg) => msg.type === "user-llm-text" || msg.type === "bot-llm-text"
|
|
1109
|
+
);
|
|
664
1110
|
```
|
|
665
1111
|
|
|
666
|
-
|
|
1112
|
+
---
|
|
1113
|
+
|
|
1114
|
+
## API Reference
|
|
1115
|
+
|
|
1116
|
+
### Configuration
|
|
667
1117
|
|
|
668
1118
|
```typescript
|
|
669
|
-
|
|
670
|
-
|
|
1119
|
+
interface ConvaiConfig {
|
|
1120
|
+
/** Your Convai API key from convai.com dashboard (required) */
|
|
1121
|
+
apiKey: string;
|
|
1122
|
+
|
|
1123
|
+
/** The Character ID to connect to (required) */
|
|
1124
|
+
characterId: string;
|
|
1125
|
+
|
|
1126
|
+
/**
|
|
1127
|
+
* End user identifier for speaker management (optional).
|
|
1128
|
+
* When provided: enables long-term memory and analytics
|
|
1129
|
+
* When not provided: anonymous mode, no persistent memory
|
|
1130
|
+
*/
|
|
1131
|
+
endUserId?: string;
|
|
1132
|
+
|
|
1133
|
+
/** Custom Convai API URL (optional) */
|
|
1134
|
+
url?: string;
|
|
1135
|
+
|
|
1136
|
+
/**
|
|
1137
|
+
* Enable video capability (default: false).
|
|
1138
|
+
* If true, connection_type will be "video" (supports audio, video, screenshare).
|
|
1139
|
+
* If false, connection_type will be "audio" (audio only).
|
|
1140
|
+
* ⚠️ REQUIRED for video and screen share features.
|
|
1141
|
+
*/
|
|
1142
|
+
enableVideo?: boolean;
|
|
1143
|
+
|
|
1144
|
+
/**
|
|
1145
|
+
* Start with video camera on when connecting (default: false).
|
|
1146
|
+
* Only works if enableVideo is true.
|
|
1147
|
+
*/
|
|
1148
|
+
startWithVideoOn?: boolean;
|
|
1149
|
+
|
|
1150
|
+
/**
|
|
1151
|
+
* Start with microphone on when connecting (default: false).
|
|
1152
|
+
* If false, microphone stays off until user enables it.
|
|
1153
|
+
*/
|
|
1154
|
+
startWithAudioOn?: boolean;
|
|
1155
|
+
|
|
1156
|
+
/**
|
|
1157
|
+
* Enable text-to-speech audio generation (default: true).
|
|
1158
|
+
*/
|
|
1159
|
+
ttsEnabled?: boolean;
|
|
1160
|
+
}
|
|
671
1161
|
```
|
|
672
1162
|
|
|
673
|
-
|
|
1163
|
+
### Connection Management
|
|
674
1164
|
|
|
675
1165
|
```typescript
|
|
676
|
-
//
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
// Core API (event-based)
|
|
683
|
-
convaiClient.on("stateChange", (state) => {
|
|
684
|
-
console.log("State changed:", state);
|
|
1166
|
+
// Connect
|
|
1167
|
+
await convaiClient.connect({
|
|
1168
|
+
apiKey: "your-api-key",
|
|
1169
|
+
characterId: "your-character-id",
|
|
1170
|
+
enableVideo: true,
|
|
685
1171
|
});
|
|
686
1172
|
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
});
|
|
1173
|
+
// Disconnect
|
|
1174
|
+
await convaiClient.disconnect();
|
|
690
1175
|
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
1176
|
+
// Reconnect
|
|
1177
|
+
await convaiClient.reconnect();
|
|
1178
|
+
|
|
1179
|
+
// Reset session (clear conversation history)
|
|
1180
|
+
convaiClient.resetSession();
|
|
1181
|
+
|
|
1182
|
+
// Check connection state
|
|
1183
|
+
console.log(convaiClient.state.isConnected);
|
|
1184
|
+
console.log(convaiClient.state.isConnecting);
|
|
1185
|
+
console.log(convaiClient.state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
|
|
1186
|
+
console.log(convaiClient.isBotReady); // Bot ready to receive messages
|
|
694
1187
|
```
|
|
695
1188
|
|
|
696
1189
|
### Messaging
|
|
697
1190
|
|
|
698
|
-
**Send Text Message:**
|
|
699
|
-
|
|
700
1191
|
```typescript
|
|
1192
|
+
// Send text message
|
|
701
1193
|
convaiClient.sendUserTextMessage("Hello, how are you?");
|
|
702
|
-
```
|
|
703
|
-
|
|
704
|
-
**Send Trigger Message:**
|
|
705
1194
|
|
|
706
|
-
|
|
707
|
-
// Trigger specific character action
|
|
1195
|
+
// Send trigger message (invoke character action)
|
|
708
1196
|
convaiClient.sendTriggerMessage("greet", "User entered the room");
|
|
709
1197
|
|
|
710
|
-
//
|
|
711
|
-
convaiClient.
|
|
712
|
-
```
|
|
1198
|
+
// Interrupt character's current response
|
|
1199
|
+
convaiClient.sendInterruptMessage();
|
|
713
1200
|
|
|
714
|
-
|
|
1201
|
+
// Update context
|
|
1202
|
+
convaiClient.updateTemplateKeys({ user_name: "John" });
|
|
1203
|
+
convaiClient.updateDynamicInfo({ text: "User is browsing products" });
|
|
715
1204
|
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
convaiClient.updateTemplateKeys({
|
|
719
|
-
user_name: "John",
|
|
720
|
-
location: "New York",
|
|
721
|
-
});
|
|
1205
|
+
// Access messages
|
|
1206
|
+
console.log(convaiClient.chatMessages);
|
|
722
1207
|
|
|
723
|
-
//
|
|
724
|
-
convaiClient.
|
|
725
|
-
text: "User is currently browsing the products page",
|
|
726
|
-
});
|
|
1208
|
+
// Access real-time user transcription
|
|
1209
|
+
console.log(convaiClient.userTranscription);
|
|
727
1210
|
```
|
|
728
1211
|
|
|
729
|
-
|
|
1212
|
+
### Audio Controls
|
|
730
1213
|
|
|
731
1214
|
```typescript
|
|
732
|
-
//
|
|
733
|
-
|
|
1215
|
+
// Mute/unmute microphone
|
|
1216
|
+
await convaiClient.audioControls.muteAudio();
|
|
1217
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
1218
|
+
await convaiClient.audioControls.toggleAudio();
|
|
734
1219
|
|
|
735
|
-
//
|
|
736
|
-
convaiClient.
|
|
737
|
-
console.log("New message:", message.content);
|
|
738
|
-
console.log("Message type:", message.type);
|
|
739
|
-
});
|
|
1220
|
+
// Check mute state
|
|
1221
|
+
console.log(convaiClient.isAudioMuted);
|
|
740
1222
|
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
});
|
|
744
|
-
```
|
|
1223
|
+
// Get available microphones
|
|
1224
|
+
const devices = await convaiClient.audioControls.getAudioDevices();
|
|
745
1225
|
|
|
746
|
-
|
|
1226
|
+
// Set microphone
|
|
1227
|
+
await convaiClient.audioControls.setAudioDevice(deviceId);
|
|
747
1228
|
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
| "emotion" // Character's emotional state
|
|
755
|
-
| "behavior-tree" // Behavior tree response
|
|
756
|
-
| "action" // Action execution
|
|
757
|
-
| "bot-emotion" // Bot emotional response
|
|
758
|
-
| "user-llm-text" // User text processed by LLM
|
|
759
|
-
| "interrupt-bot"; // Interrupt message
|
|
1229
|
+
// Monitor audio level
|
|
1230
|
+
convaiClient.audioControls.startAudioLevelMonitoring();
|
|
1231
|
+
convaiClient.audioControls.on("audioLevelChange", (level) => {
|
|
1232
|
+
console.log("Audio level:", level); // 0 to 1
|
|
1233
|
+
});
|
|
1234
|
+
convaiClient.audioControls.stopAudioLevelMonitoring();
|
|
760
1235
|
```
|
|
761
1236
|
|
|
762
|
-
###
|
|
1237
|
+
### Video Controls
|
|
763
1238
|
|
|
764
|
-
|
|
1239
|
+
**⚠️ Requires `enableVideo: true` in config.**
|
|
765
1240
|
|
|
766
1241
|
```typescript
|
|
767
|
-
//
|
|
768
|
-
|
|
1242
|
+
// Enable/disable camera
|
|
1243
|
+
await convaiClient.videoControls.enableVideo();
|
|
1244
|
+
await convaiClient.videoControls.disableVideo();
|
|
1245
|
+
await convaiClient.videoControls.toggleVideo();
|
|
769
1246
|
|
|
770
|
-
// Check
|
|
771
|
-
|
|
772
|
-
console.log("Bot is listening");
|
|
773
|
-
}
|
|
1247
|
+
// Check video state
|
|
1248
|
+
console.log(convaiClient.isVideoEnabled);
|
|
774
1249
|
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
}
|
|
1250
|
+
// Set video quality
|
|
1251
|
+
await convaiClient.videoControls.setVideoQuality("high"); // 'low' | 'medium' | 'high'
|
|
778
1252
|
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
}
|
|
1253
|
+
// Get available cameras
|
|
1254
|
+
const devices = await convaiClient.videoControls.getVideoDevices();
|
|
782
1255
|
|
|
783
|
-
//
|
|
784
|
-
|
|
1256
|
+
// Switch camera
|
|
1257
|
+
await convaiClient.videoControls.setVideoDevice(deviceId);
|
|
785
1258
|
```
|
|
786
1259
|
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
```typescript
|
|
790
|
-
// React
|
|
791
|
-
const { userTranscription } = convaiClient;
|
|
792
|
-
|
|
793
|
-
// Core API (event-based)
|
|
794
|
-
convaiClient.on("userTranscriptionChange", (transcription: string) => {
|
|
795
|
-
console.log("User is saying:", transcription);
|
|
796
|
-
});
|
|
797
|
-
```
|
|
1260
|
+
### Screen Share Controls
|
|
798
1261
|
|
|
799
|
-
|
|
1262
|
+
**⚠️ Requires `enableVideo: true` in config.**
|
|
800
1263
|
|
|
801
1264
|
```typescript
|
|
802
|
-
//
|
|
803
|
-
|
|
1265
|
+
// Start/stop screen share
|
|
1266
|
+
await convaiClient.screenShareControls.enableScreenShare();
|
|
1267
|
+
await convaiClient.screenShareControls.enableScreenShareWithAudio();
|
|
1268
|
+
await convaiClient.screenShareControls.disableScreenShare();
|
|
1269
|
+
await convaiClient.screenShareControls.toggleScreenShare();
|
|
804
1270
|
|
|
805
|
-
//
|
|
806
|
-
convaiClient.
|
|
807
|
-
console.log("Bot is ready to receive messages");
|
|
808
|
-
});
|
|
1271
|
+
// Check screen share state
|
|
1272
|
+
console.log(convaiClient.isScreenShareActive);
|
|
809
1273
|
```
|
|
810
1274
|
|
|
811
|
-
|
|
1275
|
+
---
|
|
1276
|
+
|
|
1277
|
+
## Getting Credentials
|
|
812
1278
|
|
|
813
1279
|
1. Visit [convai.com](https://convai.com) and create an account
|
|
814
1280
|
2. Navigate to your dashboard
|
|
@@ -816,41 +1282,71 @@ convaiClient.on("botReady", () => {
|
|
|
816
1282
|
4. Copy your **API Key** from the dashboard
|
|
817
1283
|
5. Copy your **Character ID** from the character details
|
|
818
1284
|
|
|
819
|
-
|
|
1285
|
+
---
|
|
1286
|
+
|
|
1287
|
+
## TypeScript Support
|
|
1288
|
+
|
|
1289
|
+
All exports are fully typed:
|
|
1290
|
+
|
|
1291
|
+
**React:**
|
|
820
1292
|
|
|
821
1293
|
```typescript
|
|
822
|
-
|
|
823
|
-
|
|
1294
|
+
import type {
|
|
1295
|
+
// Configuration
|
|
1296
|
+
ConvaiConfig,
|
|
824
1297
|
|
|
825
|
-
//
|
|
826
|
-
|
|
1298
|
+
// State
|
|
1299
|
+
ConvaiClientState,
|
|
827
1300
|
|
|
828
|
-
//
|
|
829
|
-
|
|
1301
|
+
// Messages
|
|
1302
|
+
ChatMessage,
|
|
1303
|
+
ChatMessageType,
|
|
830
1304
|
|
|
831
|
-
//
|
|
832
|
-
|
|
833
|
-
|
|
1305
|
+
// Client
|
|
1306
|
+
IConvaiClient,
|
|
1307
|
+
ConvaiClient,
|
|
834
1308
|
|
|
835
|
-
|
|
1309
|
+
// Controls
|
|
1310
|
+
AudioControls,
|
|
1311
|
+
VideoControls,
|
|
1312
|
+
ScreenShareControls,
|
|
1313
|
+
} from "@convai/web-sdk/react";
|
|
1314
|
+
```
|
|
836
1315
|
|
|
837
|
-
|
|
1316
|
+
**Vanilla:**
|
|
838
1317
|
|
|
839
1318
|
```typescript
|
|
840
1319
|
import type {
|
|
841
|
-
|
|
1320
|
+
// Configuration
|
|
842
1321
|
ConvaiConfig,
|
|
1322
|
+
|
|
1323
|
+
// State
|
|
843
1324
|
ConvaiClientState,
|
|
1325
|
+
|
|
1326
|
+
// Messages
|
|
844
1327
|
ChatMessage,
|
|
1328
|
+
ChatMessageType,
|
|
1329
|
+
|
|
1330
|
+
// Client
|
|
1331
|
+
IConvaiClient,
|
|
1332
|
+
ConvaiClient,
|
|
1333
|
+
|
|
1334
|
+
// Controls
|
|
845
1335
|
AudioControls,
|
|
846
1336
|
VideoControls,
|
|
847
1337
|
ScreenShareControls,
|
|
848
|
-
|
|
849
|
-
|
|
1338
|
+
|
|
1339
|
+
// Widget
|
|
1340
|
+
VanillaWidget,
|
|
1341
|
+
VanillaWidgetOptions,
|
|
1342
|
+
} from "@convai/web-sdk/vanilla";
|
|
850
1343
|
```
|
|
851
1344
|
|
|
1345
|
+
---
|
|
1346
|
+
|
|
852
1347
|
## Support
|
|
853
1348
|
|
|
854
|
-
- [
|
|
855
|
-
- [
|
|
856
|
-
- [
|
|
1349
|
+
- **Documentation**: [API Reference](./API_REFERENCE.md)
|
|
1350
|
+
- **Forum**: [Convai Forum](https://forum.convai.com)
|
|
1351
|
+
- **Website**: [convai.com](https://convai.com)
|
|
1352
|
+
- **Issues**: [GitHub Issues](https://github.com/convai/web-sdk/issues)
|