@convai/web-sdk 0.2.4 → 0.3.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +535 -1031
- package/dist/core/AudioManager.d.ts.map +1 -1
- package/dist/core/AudioManager.js +0 -5
- package/dist/core/AudioManager.js.map +1 -1
- package/dist/core/BlendshapeQueue.d.ts +128 -0
- package/dist/core/BlendshapeQueue.d.ts.map +1 -0
- package/dist/core/BlendshapeQueue.js +229 -0
- package/dist/core/BlendshapeQueue.js.map +1 -0
- package/dist/core/ConvaiClient.d.ts +19 -0
- package/dist/core/ConvaiClient.d.ts.map +1 -1
- package/dist/core/ConvaiClient.js +67 -29
- package/dist/core/ConvaiClient.js.map +1 -1
- package/dist/core/MessageHandler.d.ts +7 -0
- package/dist/core/MessageHandler.d.ts.map +1 -1
- package/dist/core/MessageHandler.js +35 -2
- package/dist/core/MessageHandler.js.map +1 -1
- package/dist/core/ScreenShareManager.d.ts.map +1 -1
- package/dist/core/ScreenShareManager.js +0 -3
- package/dist/core/ScreenShareManager.js.map +1 -1
- package/dist/core/VideoManager.d.ts.map +1 -1
- package/dist/core/VideoManager.js +0 -4
- package/dist/core/VideoManager.js.map +1 -1
- package/dist/core/index.d.ts +2 -0
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +2 -0
- package/dist/core/index.js.map +1 -1
- package/dist/core/types.d.ts +19 -0
- package/dist/core/types.d.ts.map +1 -1
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.d.ts +80 -0
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.d.ts.map +1 -0
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.js +201 -0
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.js.map +1 -0
- package/dist/lipsync-helpers/arkitOrder61.d.ts +121 -0
- package/dist/lipsync-helpers/arkitOrder61.d.ts.map +1 -0
- package/dist/lipsync-helpers/arkitOrder61.js +287 -0
- package/dist/lipsync-helpers/arkitOrder61.js.map +1 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.d.ts +155 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.d.ts.map +1 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.js +362 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.js.map +1 -0
- package/dist/lipsync-helpers/index.d.ts +10 -0
- package/dist/lipsync-helpers/index.d.ts.map +1 -0
- package/dist/lipsync-helpers/index.js +21 -0
- package/dist/lipsync-helpers/index.js.map +1 -0
- package/dist/lipsync-helpers/metahumanOrder251.d.ts +115 -0
- package/dist/lipsync-helpers/metahumanOrder251.d.ts.map +1 -0
- package/dist/lipsync-helpers/metahumanOrder251.js +432 -0
- package/dist/lipsync-helpers/metahumanOrder251.js.map +1 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.d.ts +30 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.d.ts.map +1 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.js +315 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.js.map +1 -0
- package/dist/react/components/ConvaiWidget.d.ts.map +1 -1
- package/dist/react/components/ConvaiWidget.js +43 -3
- package/dist/react/components/ConvaiWidget.js.map +1 -1
- package/dist/react/hooks/useCharacterInfo.d.ts.map +1 -1
- package/dist/react/hooks/useCharacterInfo.js +1 -1
- package/dist/react/hooks/useCharacterInfo.js.map +1 -1
- package/dist/react/hooks/useConvaiClient.d.ts.map +1 -1
- package/dist/react/hooks/useConvaiClient.js +5 -0
- package/dist/react/hooks/useConvaiClient.js.map +1 -1
- package/dist/utils/speakerManagement.d.ts.map +1 -1
- package/dist/utils/speakerManagement.js +0 -5
- package/dist/utils/speakerManagement.js.map +1 -1
- package/dist/vanilla/AudioRenderer.d.ts +5 -0
- package/dist/vanilla/AudioRenderer.d.ts.map +1 -1
- package/dist/vanilla/AudioRenderer.js +27 -18
- package/dist/vanilla/AudioRenderer.js.map +1 -1
- package/package.json +4 -4
package/README.md
CHANGED
|
@@ -1,47 +1,6 @@
|
|
|
1
1
|
# @convai/web-sdk
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
JavaScript/TypeScript SDK for building AI voice assistants with real-time audio/video streaming. Drop-in widgets for **React** and **Vanilla JavaScript/TypeScript** with customizable UI components.
|
|
6
|
-
|
|
7
|
-
---
|
|
8
|
-
|
|
9
|
-
## 📑 Table of Contents
|
|
10
|
-
|
|
11
|
-
- [Installation](#installation)
|
|
12
|
-
- [Quick Start](#quick-start)
|
|
13
|
-
- [React - ConvaiWidget](#react---convaiwidget)
|
|
14
|
-
- [Vanilla JS/TS - ConvaiWidget](#vanilla-jsts---convaiwidget)
|
|
15
|
-
- [Core Concepts](#core-concepts)
|
|
16
|
-
- [React SDK](#react-sdk)
|
|
17
|
-
- [useConvaiClient Hook](#useconvaiclient-hook)
|
|
18
|
-
- [AudioRenderer Component](#audiorenderer-component)
|
|
19
|
-
- [AudioContext](#audiocontext)
|
|
20
|
-
- [React Exports Reference](#react-exports-reference)
|
|
21
|
-
- [Vanilla SDK](#vanilla-sdk)
|
|
22
|
-
- [ConvaiClient Class](#convaiclient-class)
|
|
23
|
-
- [AudioRenderer Class](#audiorenderer-class)
|
|
24
|
-
- [Vanilla Exports Reference](#vanilla-exports-reference)
|
|
25
|
-
- [Video & Screen Share](#video--screen-share)
|
|
26
|
-
- [Critical Requirements](#critical-requirements)
|
|
27
|
-
- [Enabling Video](#enabling-video)
|
|
28
|
-
- [Enabling Screen Share](#enabling-screen-share)
|
|
29
|
-
- [Building Custom UIs](#building-custom-uis)
|
|
30
|
-
- [Custom Chat Interface](#custom-chat-interface)
|
|
31
|
-
- [Audio Visualizer](#audio-visualizer)
|
|
32
|
-
- [Message Types](#message-types)
|
|
33
|
-
- [API Reference](#api-reference)
|
|
34
|
-
- [Configuration](#configuration)
|
|
35
|
-
- [Connection Management](#connection-management)
|
|
36
|
-
- [Messaging](#messaging)
|
|
37
|
-
- [Audio Controls](#audio-controls)
|
|
38
|
-
- [Video Controls](#video-controls)
|
|
39
|
-
- [Screen Share Controls](#screen-share-controls)
|
|
40
|
-
- [Getting Credentials](#getting-credentials)
|
|
41
|
-
- [TypeScript Support](#typescript-support)
|
|
42
|
-
- [Support](#support)
|
|
43
|
-
|
|
44
|
-
---
|
|
3
|
+
JavaScript/TypeScript SDK for Convai AI voice assistants. Build voice-powered AI interactions for web applications with real-time audio/video streaming. Supports both React and Vanilla JavaScript/TypeScript.
|
|
45
4
|
|
|
46
5
|
## Installation
|
|
47
6
|
|
|
@@ -49,25 +8,14 @@ JavaScript/TypeScript SDK for building AI voice assistants with real-time audio/
|
|
|
49
8
|
npm install @convai/web-sdk
|
|
50
9
|
```
|
|
51
10
|
|
|
52
|
-
|
|
11
|
+
## Basic Setup
|
|
53
12
|
|
|
54
|
-
|
|
55
|
-
npm install react@^18.0.0 react-dom@^18.0.0
|
|
56
|
-
```
|
|
57
|
-
|
|
58
|
-
---
|
|
59
|
-
|
|
60
|
-
## Quick Start
|
|
61
|
-
|
|
62
|
-
### React - ConvaiWidget
|
|
63
|
-
|
|
64
|
-
The `ConvaiWidget` is a complete, pre-built chat interface with voice/video capabilities.
|
|
13
|
+
### React
|
|
65
14
|
|
|
66
15
|
```tsx
|
|
67
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk
|
|
16
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
68
17
|
|
|
69
18
|
function App() {
|
|
70
|
-
// Initialize the Convai client
|
|
71
19
|
const convaiClient = useConvaiClient({
|
|
72
20
|
apiKey: "your-api-key",
|
|
73
21
|
characterId: "your-character-id",
|
|
@@ -77,166 +25,7 @@ function App() {
|
|
|
77
25
|
}
|
|
78
26
|
```
|
|
79
27
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
---
|
|
83
|
-
|
|
84
|
-
## 🤖 For AI Code Generators (v0, Lovable, Bolt, etc.)
|
|
85
|
-
|
|
86
|
-
**If you're using an AI coding assistant to add Convai to your project, use this exact template to avoid errors:**
|
|
87
|
-
|
|
88
|
-
### Copy-Paste Template (Works Every Time)
|
|
89
|
-
|
|
90
|
-
```tsx
|
|
91
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
92
|
-
|
|
93
|
-
export default function App() {
|
|
94
|
-
// Step 1: Create the client with your credentials
|
|
95
|
-
const convaiClient = useConvaiClient({
|
|
96
|
-
apiKey: "your-api-key-here",
|
|
97
|
-
characterId: "your-character-id-here"
|
|
98
|
-
});
|
|
99
|
-
|
|
100
|
-
// Step 2: Pass ONLY the client to the widget
|
|
101
|
-
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
102
|
-
}
|
|
103
|
-
```
|
|
104
|
-
|
|
105
|
-
### Common Mistakes AI Tools Make
|
|
106
|
-
|
|
107
|
-
❌ **DON'T DO THIS:**
|
|
108
|
-
```tsx
|
|
109
|
-
// Wrong: Passing props directly to ConvaiWidget
|
|
110
|
-
<ConvaiWidget apiKey="..." characterId="..." />
|
|
111
|
-
|
|
112
|
-
// Wrong: Stringifying the client
|
|
113
|
-
<ConvaiWidget convaiClient={JSON.stringify(convaiClient)} />
|
|
114
|
-
|
|
115
|
-
// Wrong: Spreading client properties
|
|
116
|
-
<ConvaiWidget {...convaiClient} />
|
|
117
|
-
|
|
118
|
-
// Wrong: Using client in string context
|
|
119
|
-
const info = `Client: ${convaiClient}`; // "Cannot convert object to primitive value"
|
|
120
|
-
|
|
121
|
-
// Wrong: Passing client through env vars
|
|
122
|
-
const client = process.env.CONVAI_CLIENT; // This won't work
|
|
123
|
-
```
|
|
124
|
-
|
|
125
|
-
✅ **DO THIS:**
|
|
126
|
-
```tsx
|
|
127
|
-
// Correct: Client created in component, passed as object
|
|
128
|
-
const convaiClient = useConvaiClient({
|
|
129
|
-
apiKey: "your-api-key",
|
|
130
|
-
characterId: "your-character-id"
|
|
131
|
-
});
|
|
132
|
-
|
|
133
|
-
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
134
|
-
```
|
|
135
|
-
|
|
136
|
-
### If You Get "Cannot convert object to primitive value"
|
|
137
|
-
|
|
138
|
-
This error means you're using the client object in a primitive context. Check for:
|
|
139
|
-
|
|
140
|
-
1. **String concatenation:**
|
|
141
|
-
```tsx
|
|
142
|
-
// ❌ Wrong
|
|
143
|
-
console.log("Client: " + convaiClient);
|
|
144
|
-
|
|
145
|
-
// ✅ Correct
|
|
146
|
-
console.log("Connected:", convaiClient.state.isConnected);
|
|
147
|
-
```
|
|
148
|
-
|
|
149
|
-
2. **Template literals:**
|
|
150
|
-
```tsx
|
|
151
|
-
// ❌ Wrong
|
|
152
|
-
const text = `Client: ${convaiClient}`;
|
|
153
|
-
|
|
154
|
-
// ✅ Correct
|
|
155
|
-
const text = `Status: ${convaiClient.state.agentState}`;
|
|
156
|
-
```
|
|
157
|
-
|
|
158
|
-
3. **Using as a key:**
|
|
159
|
-
```tsx
|
|
160
|
-
// ❌ Wrong
|
|
161
|
-
<div key={convaiClient}>...</div>
|
|
162
|
-
|
|
163
|
-
// ✅ Correct
|
|
164
|
-
<div key="convai-widget">...</div>
|
|
165
|
-
```
|
|
166
|
-
|
|
167
|
-
### Complete Working Example for AI Tools
|
|
168
|
-
|
|
169
|
-
```tsx
|
|
170
|
-
import React from 'react';
|
|
171
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
172
|
-
|
|
173
|
-
function App() {
|
|
174
|
-
// Initialize the client
|
|
175
|
-
const convaiClient = useConvaiClient({
|
|
176
|
-
apiKey: "your-api-key-here",
|
|
177
|
-
characterId: "your-character-id-here",
|
|
178
|
-
enableVideo: false, // Set to true if you need video
|
|
179
|
-
});
|
|
180
|
-
|
|
181
|
-
// Optional: Add debug logging
|
|
182
|
-
console.log("Convai client initialized:", !!convaiClient);
|
|
183
|
-
console.log("Connection status:", convaiClient?.state?.isConnected);
|
|
184
|
-
|
|
185
|
-
return (
|
|
186
|
-
<div style={{ width: '100vw', height: '100vh' }}>
|
|
187
|
-
<ConvaiWidget convaiClient={convaiClient} />
|
|
188
|
-
</div>
|
|
189
|
-
);
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
export default App;
|
|
193
|
-
```
|
|
194
|
-
|
|
195
|
-
### Prompt for AI Code Generators
|
|
196
|
-
|
|
197
|
-
**Use this prompt to get correct code:**
|
|
198
|
-
|
|
199
|
-
```
|
|
200
|
-
Add Convai voice chat to this app using @convai/web-sdk.
|
|
201
|
-
|
|
202
|
-
CRITICAL RULES:
|
|
203
|
-
1. Import: import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
204
|
-
2. Create client INSIDE component: const convaiClient = useConvaiClient({ apiKey: "...", characterId: "..." });
|
|
205
|
-
3. Pass ONLY client to widget: <ConvaiWidget convaiClient={convaiClient} />
|
|
206
|
-
4. DO NOT pass apiKey or characterId directly to ConvaiWidget
|
|
207
|
-
5. DO NOT stringify, spread, or destructure the client object
|
|
208
|
-
6. DO NOT use the client object in string contexts
|
|
209
|
-
|
|
210
|
-
Example:
|
|
211
|
-
const convaiClient = useConvaiClient({ apiKey: "KEY", characterId: "ID" });
|
|
212
|
-
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
213
|
-
```
|
|
214
|
-
|
|
215
|
-
### Video & Screen Share with AI Tools
|
|
216
|
-
|
|
217
|
-
If you need video/screen share, add these TWO changes:
|
|
218
|
-
|
|
219
|
-
```tsx
|
|
220
|
-
// Step 1: Add enableVideo to client config
|
|
221
|
-
const convaiClient = useConvaiClient({
|
|
222
|
-
apiKey: "your-api-key",
|
|
223
|
-
characterId: "your-character-id",
|
|
224
|
-
enableVideo: true // ← Required for video features
|
|
225
|
-
});
|
|
226
|
-
|
|
227
|
-
// Step 2: Show controls in widget
|
|
228
|
-
<ConvaiWidget
|
|
229
|
-
convaiClient={convaiClient}
|
|
230
|
-
showVideo={true} // ← Shows video button
|
|
231
|
-
showScreenShare={true} // ← Shows screen share button
|
|
232
|
-
/>
|
|
233
|
-
```
|
|
234
|
-
|
|
235
|
-
**Without `enableVideo: true`, video and screen share will NOT work even if you show the buttons.**
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
### Vanilla JS/TS - ConvaiWidget
|
|
28
|
+
### Vanilla TypeScript
|
|
240
29
|
|
|
241
30
|
```typescript
|
|
242
31
|
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
@@ -247,7 +36,7 @@ const client = new ConvaiClient({
|
|
|
247
36
|
characterId: "your-character-id",
|
|
248
37
|
});
|
|
249
38
|
|
|
250
|
-
// Create
|
|
39
|
+
// Create widget - auto-connects on first user click
|
|
251
40
|
const widget = createConvaiWidget(document.body, {
|
|
252
41
|
convaiClient: client,
|
|
253
42
|
});
|
|
@@ -256,1025 +45,770 @@ const widget = createConvaiWidget(document.body, {
|
|
|
256
45
|
widget.destroy();
|
|
257
46
|
```
|
|
258
47
|
|
|
259
|
-
|
|
48
|
+
## Exports
|
|
260
49
|
|
|
261
|
-
|
|
50
|
+
### React Exports (`@convai/web-sdk` or `@convai/web-sdk/react`)
|
|
262
51
|
|
|
263
|
-
|
|
52
|
+
**Components:**
|
|
264
53
|
|
|
265
|
-
|
|
266
|
-
┌─────────────────────────────────────────────────┐
|
|
267
|
-
│ ConvaiWidget (UI Layer) │
|
|
268
|
-
│ ├─ Chat Interface │
|
|
269
|
-
│ ├─ Voice Mode │
|
|
270
|
-
│ └─ Video/Screen Share UI │
|
|
271
|
-
└─────────────────────────────────────────────────┘
|
|
272
|
-
▼
|
|
273
|
-
┌─────────────────────────────────────────────────┐
|
|
274
|
-
│ ConvaiClient (Core Logic) │
|
|
275
|
-
│ ├─ Connection Management │
|
|
276
|
-
│ ├─ Message Handling │
|
|
277
|
-
│ ├─ State Management │
|
|
278
|
-
│ └─ Audio/Video Controls │
|
|
279
|
-
└─────────────────────────────────────────────────┘
|
|
280
|
-
▼
|
|
281
|
-
┌─────────────────────────────────────────────────┐
|
|
282
|
-
│ WebRTC Room (Communication Layer) │
|
|
283
|
-
│ ├─ Real-time Audio/Video Streaming │
|
|
284
|
-
│ ├─ Track Management │
|
|
285
|
-
│ └─ Network Communication │
|
|
286
|
-
└─────────────────────────────────────────────────┘
|
|
287
|
-
▼
|
|
288
|
-
┌─────────────────────────────────────────────────┐
|
|
289
|
-
│ AudioRenderer (Critical for Playback) │
|
|
290
|
-
│ ├─ Attaches audio tracks to DOM │
|
|
291
|
-
│ ├─ Manages audio elements │
|
|
292
|
-
│ └─ Enables bot voice playback │
|
|
293
|
-
└─────────────────────────────────────────────────┘
|
|
294
|
-
```
|
|
295
|
-
|
|
296
|
-
### Key Principles
|
|
297
|
-
|
|
298
|
-
1. **ConvaiClient** - The brain. Manages connection, state, and communication with Convai servers.
|
|
299
|
-
2. **AudioRenderer** - **CRITICAL**: Without this, you won't hear the bot. It renders audio to the user's speakers.
|
|
300
|
-
3. **ConvaiWidget** - The complete UI. Uses both ConvaiClient and AudioRenderer internally.
|
|
301
|
-
4. **Connection Type** - Determines capabilities:
|
|
302
|
-
- `"audio"` (default) - Audio only
|
|
303
|
-
- `"video"` - Audio + Video + Screen Share
|
|
304
|
-
|
|
305
|
-
---
|
|
306
|
-
|
|
307
|
-
## React SDK
|
|
54
|
+
- `ConvaiWidget` - Main chat widget component
|
|
308
55
|
|
|
309
|
-
|
|
56
|
+
**Hooks:**
|
|
310
57
|
|
|
311
|
-
|
|
58
|
+
- `useConvaiClient(config?)` - Main client hook
|
|
59
|
+
- `useCharacterInfo(characterId, apiKey)` - Fetch character metadata
|
|
60
|
+
- `useLocalCameraTrack()` - Get local camera track
|
|
312
61
|
|
|
313
|
-
**
|
|
62
|
+
**Core Client:**
|
|
314
63
|
|
|
315
|
-
|
|
64
|
+
- `ConvaiClient` - Core client class
|
|
316
65
|
|
|
317
|
-
|
|
318
|
-
- Provides reactive state (connection, messages, activity)
|
|
319
|
-
- Handles connection lifecycle
|
|
320
|
-
- Exposes audio/video/screen share controls
|
|
321
|
-
|
|
322
|
-
```tsx
|
|
323
|
-
import { useConvaiClient } from "@convai/web-sdk/react";
|
|
324
|
-
|
|
325
|
-
function ChatbotWrapper() {
|
|
326
|
-
const convaiClient = useConvaiClient({
|
|
327
|
-
apiKey: "your-api-key",
|
|
328
|
-
characterId: "your-character-id",
|
|
329
|
-
enableVideo: false, // Default: audio only
|
|
330
|
-
startWithAudioOn: false, // Mic starts muted
|
|
331
|
-
});
|
|
66
|
+
**Types:**
|
|
332
67
|
|
|
333
|
-
|
|
334
|
-
|
|
68
|
+
- `ConvaiConfig` - Configuration interface
|
|
69
|
+
- `ConvaiClientState` - Client state interface
|
|
70
|
+
- `ChatMessage` - Message interface
|
|
71
|
+
- `IConvaiClient` - Client interface
|
|
72
|
+
- `AudioControls` - Audio control interface
|
|
73
|
+
- `VideoControls` - Video control interface
|
|
74
|
+
- `ScreenShareControls` - Screen share control interface
|
|
335
75
|
|
|
336
|
-
|
|
337
|
-
const handleMute = () => convaiClient.audioControls.muteAudio();
|
|
338
|
-
const handleSend = () =>
|
|
339
|
-
convaiClient.sendUserTextMessage("Hello, character!");
|
|
76
|
+
**Components:**
|
|
340
77
|
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
<p>Status: {state.agentState}</p>
|
|
344
|
-
<p>Messages: {chatMessages.length}</p>
|
|
345
|
-
<button onClick={handleMute}>Mute</button>
|
|
346
|
-
<button onClick={handleSend}>Send</button>
|
|
347
|
-
</div>
|
|
348
|
-
);
|
|
349
|
-
}
|
|
350
|
-
```
|
|
78
|
+
- `AudioRenderer` - Audio playback component
|
|
79
|
+
- `AudioContext` - Audio context provider
|
|
351
80
|
|
|
352
|
-
###
|
|
81
|
+
### Vanilla Exports (`@convai/web-sdk/vanilla`)
|
|
353
82
|
|
|
354
|
-
**
|
|
83
|
+
**Functions:**
|
|
355
84
|
|
|
356
|
-
|
|
85
|
+
- `createConvaiWidget(container, options)` - Create widget instance
|
|
86
|
+
- `destroyConvaiWidget(widget)` - Destroy widget instance
|
|
357
87
|
|
|
358
|
-
**
|
|
88
|
+
**Classes:**
|
|
359
89
|
|
|
360
|
-
-
|
|
361
|
-
-
|
|
90
|
+
- `ConvaiClient` - Core client class
|
|
91
|
+
- `AudioRenderer` - Audio playback handler
|
|
362
92
|
|
|
363
|
-
**
|
|
93
|
+
**Types:**
|
|
364
94
|
|
|
365
|
-
-
|
|
366
|
-
-
|
|
367
|
-
-
|
|
95
|
+
- `VanillaWidget` - Widget instance interface
|
|
96
|
+
- `VanillaWidgetOptions` - Widget options interface
|
|
97
|
+
- `IConvaiClient` - Client interface
|
|
98
|
+
- `ConvaiConfig` - Configuration interface
|
|
99
|
+
- `ConvaiClientState` - Client state interface
|
|
100
|
+
- `ChatMessage` - Message interface
|
|
368
101
|
|
|
369
|
-
|
|
370
|
-
import { useConvaiClient, AudioRenderer } from "@convai/web-sdk/react";
|
|
102
|
+
### Core Exports (`@convai/web-sdk/core`)
|
|
371
103
|
|
|
372
|
-
|
|
373
|
-
const convaiClient = useConvaiClient({
|
|
374
|
-
apiKey: "your-api-key",
|
|
375
|
-
characterId: "your-character-id",
|
|
376
|
-
});
|
|
104
|
+
**Classes:**
|
|
377
105
|
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
<div>
|
|
385
|
-
{convaiClient.chatMessages.map((msg) => (
|
|
386
|
-
<div key={msg.id}>{msg.content}</div>
|
|
387
|
-
))}
|
|
388
|
-
</div>
|
|
389
|
-
</div>
|
|
390
|
-
);
|
|
391
|
-
}
|
|
392
|
-
```
|
|
106
|
+
- `ConvaiClient` - Main client class
|
|
107
|
+
- `AudioManager` - Audio management
|
|
108
|
+
- `VideoManager` - Video management
|
|
109
|
+
- `ScreenShareManager` - Screen share management
|
|
110
|
+
- `MessageHandler` - Message handling
|
|
111
|
+
- `EventEmitter` - Event emitter base class
|
|
393
112
|
|
|
394
|
-
|
|
113
|
+
**Types:**
|
|
395
114
|
|
|
396
|
-
|
|
115
|
+
- All types from React/Vanilla exports
|
|
116
|
+
- `ConvaiClientType` - Type alias for ConvaiClient
|
|
397
117
|
|
|
398
|
-
|
|
118
|
+
## Props and Configuration
|
|
399
119
|
|
|
400
|
-
|
|
120
|
+
### ConvaiWidget Props (React)
|
|
401
121
|
|
|
402
122
|
```tsx
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
</AudioContext.Provider>
|
|
416
|
-
);
|
|
417
|
-
}
|
|
418
|
-
|
|
419
|
-
function ChildComponent() {
|
|
420
|
-
const room = useContext(AudioContext);
|
|
421
|
-
// Access WebRTC room directly
|
|
422
|
-
console.log("Room state:", room?.state);
|
|
423
|
-
return <div>Child has access to Room</div>;
|
|
123
|
+
interface ConvaiWidgetProps {
|
|
124
|
+
/** Convai client instance (required) */
|
|
125
|
+
convaiClient: IConvaiClient & {
|
|
126
|
+
activity?: string;
|
|
127
|
+
isAudioMuted: boolean;
|
|
128
|
+
isVideoEnabled: boolean;
|
|
129
|
+
isScreenShareActive: boolean;
|
|
130
|
+
};
|
|
131
|
+
/** Show video toggle button in settings (default: true) */
|
|
132
|
+
showVideo?: boolean;
|
|
133
|
+
/** Show screen share toggle button in settings (default: true) */
|
|
134
|
+
showScreenShare?: boolean;
|
|
424
135
|
}
|
|
425
136
|
```
|
|
426
137
|
|
|
427
|
-
###
|
|
428
|
-
|
|
429
|
-
```tsx
|
|
430
|
-
// Components
|
|
431
|
-
import { ConvaiWidget } from "@convai/web-sdk/react";
|
|
432
|
-
|
|
433
|
-
// Hooks
|
|
434
|
-
import { useConvaiClient, useCharacterInfo } from "@convai/web-sdk/react";
|
|
435
|
-
|
|
436
|
-
// Audio Rendering (Critical)
|
|
437
|
-
import { AudioRenderer, AudioContext } from "@convai/web-sdk/react";
|
|
438
|
-
|
|
439
|
-
// Core Client (for advanced usage)
|
|
440
|
-
import { ConvaiClient } from "@convai/web-sdk/react";
|
|
441
|
-
|
|
442
|
-
// Types
|
|
443
|
-
import type {
|
|
444
|
-
ConvaiConfig,
|
|
445
|
-
ConvaiClientState,
|
|
446
|
-
ChatMessage,
|
|
447
|
-
IConvaiClient,
|
|
448
|
-
AudioControls,
|
|
449
|
-
VideoControls,
|
|
450
|
-
ScreenShareControls,
|
|
451
|
-
} from "@convai/web-sdk/react";
|
|
452
|
-
```
|
|
453
|
-
|
|
454
|
-
---
|
|
455
|
-
|
|
456
|
-
## Vanilla SDK
|
|
457
|
-
|
|
458
|
-
### ConvaiClient Class
|
|
459
|
-
|
|
460
|
-
**Purpose**: Core client for managing Convai connections in vanilla JavaScript/TypeScript.
|
|
461
|
-
|
|
462
|
-
**When to Use**: Any non-React application or when you need full control.
|
|
463
|
-
|
|
464
|
-
**What It Provides**:
|
|
465
|
-
|
|
466
|
-
- Connection management
|
|
467
|
-
- Message handling
|
|
468
|
-
- State management (via events)
|
|
469
|
-
- Audio/video/screen share controls
|
|
470
|
-
|
|
471
|
-
```typescript
|
|
472
|
-
import { ConvaiClient } from "@convai/web-sdk/vanilla";
|
|
473
|
-
|
|
474
|
-
const client = new ConvaiClient({
|
|
475
|
-
apiKey: "your-api-key",
|
|
476
|
-
characterId: "your-character-id",
|
|
477
|
-
});
|
|
478
|
-
|
|
479
|
-
// Connect
|
|
480
|
-
await client.connect();
|
|
481
|
-
|
|
482
|
-
// Listen to events
|
|
483
|
-
client.on("stateChange", (state) => {
|
|
484
|
-
console.log("Agent state:", state.agentState);
|
|
485
|
-
});
|
|
486
|
-
|
|
487
|
-
client.on("message", (message) => {
|
|
488
|
-
console.log("New message:", message.content);
|
|
489
|
-
});
|
|
490
|
-
|
|
491
|
-
// Send messages
|
|
492
|
-
client.sendUserTextMessage("Hello!");
|
|
493
|
-
|
|
494
|
-
// Control audio
|
|
495
|
-
await client.audioControls.muteAudio();
|
|
496
|
-
await client.audioControls.unmuteAudio();
|
|
497
|
-
|
|
498
|
-
// Disconnect
|
|
499
|
-
await client.disconnect();
|
|
500
|
-
```
|
|
501
|
-
|
|
502
|
-
### AudioRenderer Class
|
|
503
|
-
|
|
504
|
-
**Purpose**: Manages audio playback for vanilla JavaScript/TypeScript applications.
|
|
505
|
-
|
|
506
|
-
**⚠️ CRITICAL**: Without this, you will NOT hear the bot's voice.
|
|
507
|
-
|
|
508
|
-
**When to Use**:
|
|
509
|
-
|
|
510
|
-
- Always when building custom vanilla UIs
|
|
511
|
-
- Already included in vanilla `ConvaiWidget` (no need to add separately)
|
|
512
|
-
|
|
513
|
-
**How It Works**:
|
|
514
|
-
|
|
515
|
-
- Attaches to the WebRTC room
|
|
516
|
-
- Automatically creates hidden `<audio>` elements
|
|
517
|
-
- Manages audio playback for remote participants (the bot)
|
|
138
|
+
### createConvaiWidget Options (Vanilla)
|
|
518
139
|
|
|
519
140
|
```typescript
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
}
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
// Your custom UI logic...
|
|
533
|
-
|
|
534
|
-
// Cleanup
|
|
535
|
-
audioRenderer.destroy();
|
|
536
|
-
await client.disconnect();
|
|
141
|
+
interface VanillaWidgetOptions {
|
|
142
|
+
/** Convai client instance (required) */
|
|
143
|
+
convaiClient: IConvaiClient & {
|
|
144
|
+
activity?: string;
|
|
145
|
+
chatMessages: ChatMessage[];
|
|
146
|
+
};
|
|
147
|
+
/** Show video toggle button in settings (default: true) */
|
|
148
|
+
showVideo?: boolean;
|
|
149
|
+
/** Show screen share toggle button in settings (default: true) */
|
|
150
|
+
showScreenShare?: boolean;
|
|
151
|
+
}
|
|
537
152
|
```
|
|
538
153
|
|
|
539
|
-
###
|
|
154
|
+
### ConvaiConfig
|
|
540
155
|
|
|
541
156
|
```typescript
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
157
|
+
interface ConvaiConfig {
|
|
158
|
+
/** Your Convai API key from convai.com dashboard (required) */
|
|
159
|
+
apiKey: string;
|
|
160
|
+
/** The Character ID to connect to (required) */
|
|
161
|
+
characterId: string;
|
|
162
|
+
/**
|
|
163
|
+
* End user identifier for speaker management (optional).
|
|
164
|
+
* When provided: enables long-term memory and analytics
|
|
165
|
+
* When not provided: anonymous mode, no persistent memory
|
|
166
|
+
*/
|
|
167
|
+
endUserId?: string;
|
|
168
|
+
/** Custom Convai API URL (optional, defaults to production endpoint) */
|
|
169
|
+
url?: string;
|
|
170
|
+
/**
|
|
171
|
+
* Enable video capability (default: false).
|
|
172
|
+
* If true, connection_type will be "video" (supports audio, video, and screenshare).
|
|
173
|
+
* If false, connection_type will be "audio" (audio only).
|
|
174
|
+
*/
|
|
175
|
+
enableVideo?: boolean;
|
|
176
|
+
/**
|
|
177
|
+
* Start with video camera on when connecting (default: false).
|
|
178
|
+
* Only works if enableVideo is true.
|
|
179
|
+
*/
|
|
180
|
+
startWithVideoOn?: boolean;
|
|
181
|
+
/**
|
|
182
|
+
* Start with microphone on when connecting (default: false).
|
|
183
|
+
* If false, microphone stays off until user enables it.
|
|
184
|
+
*/
|
|
185
|
+
startWithAudioOn?: boolean;
|
|
186
|
+
/** Enable text-to-speech audio generation (default: true) */
|
|
187
|
+
ttsEnabled?: boolean;
|
|
188
|
+
}
|
|
560
189
|
```
|
|
561
190
|
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
## Video & Screen Share
|
|
565
|
-
|
|
566
|
-
### Critical Requirements
|
|
567
|
-
|
|
568
|
-
> ⚠️ **IMPORTANT**: Video and Screen Share features require **TWO** configuration changes:
|
|
569
|
-
|
|
570
|
-
#### 1. Set `enableVideo: true` in Client Configuration
|
|
571
|
-
|
|
572
|
-
This sets the connection type to `"video"` which enables video capabilities.
|
|
573
|
-
|
|
574
|
-
#### 2. Set `showVideo` and/or `showScreenShare` in Widget Props
|
|
575
|
-
|
|
576
|
-
This shows the UI controls for video/screen share.
|
|
191
|
+
## Features
|
|
577
192
|
|
|
578
|
-
|
|
193
|
+
### Video Enabled Chat
|
|
579
194
|
|
|
580
|
-
|
|
195
|
+
To enable video capabilities, set `enableVideo: true` in your configuration. This enables audio, video, and screen sharing.
|
|
581
196
|
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
#### React
|
|
197
|
+
**React:**
|
|
585
198
|
|
|
586
199
|
```tsx
|
|
587
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk
|
|
200
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
588
201
|
|
|
589
202
|
function App() {
|
|
590
|
-
// ✅ STEP 1: Enable video in client config
|
|
591
203
|
const convaiClient = useConvaiClient({
|
|
592
204
|
apiKey: "your-api-key",
|
|
593
205
|
characterId: "your-character-id",
|
|
594
|
-
enableVideo: true,
|
|
206
|
+
enableVideo: true,
|
|
595
207
|
startWithVideoOn: false, // Camera off by default
|
|
596
208
|
});
|
|
597
209
|
|
|
598
210
|
return (
|
|
599
211
|
<ConvaiWidget
|
|
600
212
|
convaiClient={convaiClient}
|
|
601
|
-
showVideo={true}
|
|
602
|
-
showScreenShare={
|
|
213
|
+
showVideo={true}
|
|
214
|
+
showScreenShare={true}
|
|
603
215
|
/>
|
|
604
216
|
);
|
|
605
217
|
}
|
|
606
218
|
```
|
|
607
219
|
|
|
608
|
-
|
|
220
|
+
**Vanilla:**
|
|
609
221
|
|
|
610
222
|
```typescript
|
|
611
223
|
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
612
224
|
|
|
613
|
-
// ✅ STEP 1: Enable video in client config
|
|
614
225
|
const client = new ConvaiClient({
|
|
615
226
|
apiKey: "your-api-key",
|
|
616
227
|
characterId: "your-character-id",
|
|
617
|
-
enableVideo: true,
|
|
228
|
+
enableVideo: true,
|
|
618
229
|
startWithVideoOn: false,
|
|
619
230
|
});
|
|
620
231
|
|
|
621
232
|
const widget = createConvaiWidget(document.body, {
|
|
622
233
|
convaiClient: client,
|
|
623
|
-
showVideo: true,
|
|
624
|
-
showScreenShare:
|
|
234
|
+
showVideo: true,
|
|
235
|
+
showScreenShare: true,
|
|
625
236
|
});
|
|
626
237
|
```
|
|
627
238
|
|
|
628
|
-
|
|
239
|
+
**Manual Video Controls:**
|
|
629
240
|
|
|
630
241
|
```typescript
|
|
631
|
-
// Enable camera
|
|
242
|
+
// Enable video camera
|
|
632
243
|
await convaiClient.videoControls.enableVideo();
|
|
633
244
|
|
|
634
|
-
// Disable camera
|
|
245
|
+
// Disable video camera
|
|
635
246
|
await convaiClient.videoControls.disableVideo();
|
|
636
247
|
|
|
637
|
-
// Toggle
|
|
248
|
+
// Toggle video
|
|
638
249
|
await convaiClient.videoControls.toggleVideo();
|
|
639
250
|
|
|
640
|
-
// Check state
|
|
641
|
-
|
|
251
|
+
// Check video state
|
|
252
|
+
const isVideoEnabled = convaiClient.videoControls.isVideoEnabled;
|
|
642
253
|
|
|
643
254
|
// Set video quality
|
|
644
255
|
await convaiClient.videoControls.setVideoQuality("high"); // 'low' | 'medium' | 'high'
|
|
645
256
|
|
|
646
|
-
// Get available
|
|
257
|
+
// Get available video devices
|
|
647
258
|
const devices = await convaiClient.videoControls.getVideoDevices();
|
|
648
259
|
|
|
649
|
-
//
|
|
260
|
+
// Set specific video device
|
|
650
261
|
await convaiClient.videoControls.setVideoDevice(deviceId);
|
|
651
262
|
```
|
|
652
263
|
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
### Enabling Screen Share
|
|
656
|
-
|
|
657
|
-
Screen sharing **requires** `enableVideo: true` (connection type must be `"video"`).
|
|
658
|
-
|
|
659
|
-
#### React
|
|
660
|
-
|
|
661
|
-
```tsx
|
|
662
|
-
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
663
|
-
|
|
664
|
-
function App() {
|
|
665
|
-
// ✅ STEP 1: Enable video (required for screen share)
|
|
666
|
-
const convaiClient = useConvaiClient({
|
|
667
|
-
apiKey: "your-api-key",
|
|
668
|
-
characterId: "your-character-id",
|
|
669
|
-
enableVideo: true, // ← REQUIRED for screen share
|
|
670
|
-
});
|
|
671
|
-
|
|
672
|
-
return (
|
|
673
|
-
<ConvaiWidget
|
|
674
|
-
convaiClient={convaiClient}
|
|
675
|
-
showVideo={true} // Optional: show video controls
|
|
676
|
-
showScreenShare={true} // ← STEP 2: Show screen share controls
|
|
677
|
-
/>
|
|
678
|
-
);
|
|
679
|
-
}
|
|
680
|
-
```
|
|
681
|
-
|
|
682
|
-
#### Vanilla
|
|
264
|
+
**Screen Sharing:**
|
|
683
265
|
|
|
684
266
|
```typescript
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
// ✅ STEP 1: Enable video (required for screen share)
|
|
688
|
-
const client = new ConvaiClient({
|
|
689
|
-
apiKey: "your-api-key",
|
|
690
|
-
characterId: "your-character-id",
|
|
691
|
-
enableVideo: true, // ← REQUIRED for screen share
|
|
692
|
-
});
|
|
693
|
-
|
|
694
|
-
const widget = createConvaiWidget(document.body, {
|
|
695
|
-
convaiClient: client,
|
|
696
|
-
showVideo: true,
|
|
697
|
-
showScreenShare: true, // ← STEP 2: Show screen share controls
|
|
698
|
-
});
|
|
699
|
-
```
|
|
700
|
-
|
|
701
|
-
#### Manual Screen Share Control
|
|
702
|
-
|
|
703
|
-
```typescript
|
|
704
|
-
// Start screen share
|
|
267
|
+
// Enable screen share
|
|
705
268
|
await convaiClient.screenShareControls.enableScreenShare();
|
|
706
269
|
|
|
707
|
-
//
|
|
270
|
+
// Enable screen share with audio
|
|
708
271
|
await convaiClient.screenShareControls.enableScreenShareWithAudio();
|
|
709
272
|
|
|
710
|
-
//
|
|
273
|
+
// Disable screen share
|
|
711
274
|
await convaiClient.screenShareControls.disableScreenShare();
|
|
712
275
|
|
|
713
276
|
// Toggle screen share
|
|
714
277
|
await convaiClient.screenShareControls.toggleScreenShare();
|
|
715
278
|
|
|
716
|
-
// Check state
|
|
717
|
-
|
|
718
|
-
```
|
|
719
|
-
|
|
720
|
-
---
|
|
721
|
-
|
|
722
|
-
## Building Custom UIs
|
|
723
|
-
|
|
724
|
-
### Custom Chat Interface
|
|
725
|
-
|
|
726
|
-
Use the `chatMessages` array from ConvaiClient to build your own chat UI.
|
|
727
|
-
|
|
728
|
-
#### React Example
|
|
729
|
-
|
|
730
|
-
```tsx
|
|
731
|
-
import { useConvaiClient, AudioRenderer } from "@convai/web-sdk/react";
|
|
732
|
-
import { useState } from "react";
|
|
733
|
-
|
|
734
|
-
function CustomChatUI() {
|
|
735
|
-
const convaiClient = useConvaiClient({
|
|
736
|
-
apiKey: "your-api-key",
|
|
737
|
-
characterId: "your-character-id",
|
|
738
|
-
});
|
|
739
|
-
|
|
740
|
-
const { chatMessages, state } = convaiClient;
|
|
741
|
-
const [inputValue, setInputValue] = useState("");
|
|
742
|
-
|
|
743
|
-
const handleSend = () => {
|
|
744
|
-
if (inputValue.trim() && state.isConnected) {
|
|
745
|
-
convaiClient.sendUserTextMessage(inputValue);
|
|
746
|
-
setInputValue("");
|
|
747
|
-
}
|
|
748
|
-
};
|
|
749
|
-
|
|
750
|
-
return (
|
|
751
|
-
<div>
|
|
752
|
-
{/* CRITICAL: AudioRenderer for bot voice */}
|
|
753
|
-
<AudioRenderer />
|
|
754
|
-
|
|
755
|
-
{/* Chat Messages */}
|
|
756
|
-
<div className="chat-container">
|
|
757
|
-
{chatMessages.map((msg) => {
|
|
758
|
-
const isUser = msg.type.includes("user");
|
|
759
|
-
const displayMessage =
|
|
760
|
-
msg.type === "user-llm-text" || msg.type === "bot-llm-text";
|
|
761
|
-
|
|
762
|
-
if (!displayMessage) return null;
|
|
763
|
-
|
|
764
|
-
return (
|
|
765
|
-
<div
|
|
766
|
-
key={msg.id}
|
|
767
|
-
className={isUser ? "user-message" : "bot-message"}
|
|
768
|
-
>
|
|
769
|
-
<span className="sender">
|
|
770
|
-
{isUser ? "You" : "Character"}
|
|
771
|
-
</span>
|
|
772
|
-
<p>{msg.content}</p>
|
|
773
|
-
<span className="timestamp">
|
|
774
|
-
{new Date(msg.timestamp).toLocaleTimeString()}
|
|
775
|
-
</span>
|
|
776
|
-
</div>
|
|
777
|
-
);
|
|
778
|
-
})}
|
|
779
|
-
</div>
|
|
780
|
-
|
|
781
|
-
{/* Input */}
|
|
782
|
-
<div className="input-container">
|
|
783
|
-
<input
|
|
784
|
-
type="text"
|
|
785
|
-
value={inputValue}
|
|
786
|
-
onChange={(e) => setInputValue(e.target.value)}
|
|
787
|
-
onKeyPress={(e) => e.key === "Enter" && handleSend()}
|
|
788
|
-
placeholder="Type a message..."
|
|
789
|
-
disabled={!state.isConnected}
|
|
790
|
-
/>
|
|
791
|
-
<button onClick={handleSend} disabled={!state.isConnected}>
|
|
792
|
-
Send
|
|
793
|
-
</button>
|
|
794
|
-
</div>
|
|
795
|
-
|
|
796
|
-
{/* Status Indicator */}
|
|
797
|
-
<div className="status">
|
|
798
|
-
{state.isConnecting && "Connecting..."}
|
|
799
|
-
{state.isConnected && state.agentState}
|
|
800
|
-
{!state.isConnected && "Disconnected"}
|
|
801
|
-
</div>
|
|
802
|
-
</div>
|
|
803
|
-
);
|
|
804
|
-
}
|
|
279
|
+
// Check screen share state
|
|
280
|
+
const isActive = convaiClient.screenShareControls.isScreenShareActive;
|
|
805
281
|
```
|
|
806
282
|
|
|
807
|
-
|
|
283
|
+
**Video State Monitoring:**
|
|
808
284
|
|
|
809
285
|
```typescript
|
|
810
|
-
|
|
286
|
+
// React
|
|
287
|
+
const { isVideoEnabled } = convaiClient;
|
|
811
288
|
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
289
|
+
// Core API (event-based)
|
|
290
|
+
convaiClient.videoControls.on("videoStateChange", (state) => {
|
|
291
|
+
console.log("Video enabled:", state.isVideoEnabled);
|
|
292
|
+
console.log("Video hidden:", state.isVideoHidden);
|
|
815
293
|
});
|
|
294
|
+
```
|
|
816
295
|
|
|
817
|
-
|
|
296
|
+
### Interruption
|
|
818
297
|
|
|
819
|
-
|
|
820
|
-
const audioRenderer = new AudioRenderer(client.room);
|
|
298
|
+
Interrupt the character's current response to allow the user to speak immediately.
|
|
821
299
|
|
|
822
|
-
|
|
823
|
-
const inputElement = document.getElementById("message-input");
|
|
824
|
-
const sendButton = document.getElementById("send-button");
|
|
300
|
+
**React:**
|
|
825
301
|
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
302
|
+
```tsx
|
|
303
|
+
function ChatInterface() {
|
|
304
|
+
const convaiClient = useConvaiClient({
|
|
305
|
+
/* config */
|
|
306
|
+
});
|
|
829
307
|
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
308
|
+
const handleInterrupt = () => {
|
|
309
|
+
// Interrupt the bot's current response
|
|
310
|
+
convaiClient.sendInterruptMessage();
|
|
311
|
+
};
|
|
834
312
|
|
|
835
|
-
|
|
313
|
+
return <button onClick={handleInterrupt}>Interrupt</button>;
|
|
314
|
+
}
|
|
315
|
+
```
|
|
836
316
|
|
|
837
|
-
|
|
838
|
-
messageDiv.className = isUser ? "user-message" : "bot-message";
|
|
317
|
+
**Vanilla:**
|
|
839
318
|
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
sender.className = "sender";
|
|
319
|
+
```typescript
|
|
320
|
+
const interruptButton = document.getElementById("interrupt-btn");
|
|
843
321
|
|
|
844
|
-
|
|
845
|
-
|
|
322
|
+
interruptButton.addEventListener("click", () => {
|
|
323
|
+
client.sendInterruptMessage();
|
|
324
|
+
});
|
|
325
|
+
```
|
|
846
326
|
|
|
847
|
-
|
|
848
|
-
timestamp.textContent = new Date(msg.timestamp).toLocaleTimeString();
|
|
849
|
-
timestamp.className = "timestamp";
|
|
327
|
+
**Voice Mode Interruption Pattern:**
|
|
850
328
|
|
|
851
|
-
|
|
852
|
-
messageDiv.appendChild(content);
|
|
853
|
-
messageDiv.appendChild(timestamp);
|
|
854
|
-
chatContainer.appendChild(messageDiv);
|
|
855
|
-
});
|
|
329
|
+
When implementing voice mode, interrupt the bot when the user starts speaking:
|
|
856
330
|
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
331
|
+
```typescript
|
|
332
|
+
// When user enters voice mode
|
|
333
|
+
const enterVoiceMode = async () => {
|
|
334
|
+
// Interrupt any ongoing bot response
|
|
335
|
+
convaiClient.sendInterruptMessage();
|
|
860
336
|
|
|
861
|
-
//
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
if (text && client.state.isConnected) {
|
|
865
|
-
client.sendUserTextMessage(text);
|
|
866
|
-
inputElement.value = "";
|
|
867
|
-
}
|
|
868
|
-
});
|
|
337
|
+
// Unmute microphone
|
|
338
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
339
|
+
};
|
|
869
340
|
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
});
|
|
341
|
+
// When user exits voice mode
|
|
342
|
+
const exitVoiceMode = async () => {
|
|
343
|
+
// Interrupt any ongoing bot response
|
|
344
|
+
convaiClient.sendInterruptMessage();
|
|
875
345
|
|
|
876
|
-
//
|
|
877
|
-
|
|
878
|
-
|
|
346
|
+
// Mute microphone
|
|
347
|
+
await convaiClient.audioControls.muteAudio();
|
|
348
|
+
};
|
|
879
349
|
```
|
|
880
350
|
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
### Audio Visualizer
|
|
351
|
+
### User Microphone Mute/Unmute
|
|
884
352
|
|
|
885
|
-
|
|
353
|
+
Control the user's microphone input.
|
|
886
354
|
|
|
887
|
-
|
|
355
|
+
**React:**
|
|
888
356
|
|
|
889
357
|
```tsx
|
|
890
|
-
|
|
891
|
-
import { useEffect, useRef, useState } from "react";
|
|
892
|
-
|
|
893
|
-
function AudioVisualizer() {
|
|
358
|
+
function AudioControls() {
|
|
894
359
|
const convaiClient = useConvaiClient({
|
|
895
|
-
|
|
896
|
-
characterId: "your-character-id",
|
|
360
|
+
/* config */
|
|
897
361
|
});
|
|
898
362
|
|
|
899
|
-
const
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
useEffect(() => {
|
|
903
|
-
if (!convaiClient.room) return;
|
|
904
|
-
|
|
905
|
-
let animationId: number;
|
|
906
|
-
let analyzer: AnalyserNode | null = null;
|
|
907
|
-
let dataArray: Uint8Array | null = null;
|
|
908
|
-
|
|
909
|
-
const setupAnalyzer = async () => {
|
|
910
|
-
const audioContext = new AudioContext();
|
|
911
|
-
|
|
912
|
-
// Get remote participant (bot)
|
|
913
|
-
const remoteParticipants = Array.from(
|
|
914
|
-
convaiClient.room.remoteParticipants.values()
|
|
915
|
-
);
|
|
916
|
-
|
|
917
|
-
if (remoteParticipants.length === 0) return;
|
|
363
|
+
const handleMute = async () => {
|
|
364
|
+
await convaiClient.audioControls.muteAudio();
|
|
365
|
+
};
|
|
918
366
|
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
);
|
|
367
|
+
const handleUnmute = async () => {
|
|
368
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
369
|
+
};
|
|
923
370
|
|
|
924
|
-
|
|
371
|
+
const handleToggle = async () => {
|
|
372
|
+
await convaiClient.audioControls.toggleAudio();
|
|
373
|
+
};
|
|
925
374
|
|
|
926
|
-
|
|
927
|
-
|
|
375
|
+
return (
|
|
376
|
+
<div>
|
|
377
|
+
<button onClick={handleMute}>Mute</button>
|
|
378
|
+
<button onClick={handleUnmute}>Unmute</button>
|
|
379
|
+
<button onClick={handleToggle}>Toggle</button>
|
|
380
|
+
<p>Muted: {convaiClient.audioControls.isAudioMuted ? "Yes" : "No"}</p>
|
|
381
|
+
</div>
|
|
382
|
+
);
|
|
383
|
+
}
|
|
384
|
+
```
|
|
928
385
|
|
|
929
|
-
|
|
930
|
-
const mediaStream = new MediaStream([audioTrack.mediaStreamTrack]);
|
|
386
|
+
**Vanilla:**
|
|
931
387
|
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
analyzer.fftSize = 256;
|
|
388
|
+
```typescript
|
|
389
|
+
// Mute microphone
|
|
390
|
+
await client.audioControls.muteAudio();
|
|
936
391
|
|
|
937
|
-
|
|
938
|
-
|
|
392
|
+
// Unmute microphone
|
|
393
|
+
await client.audioControls.unmuteAudio();
|
|
939
394
|
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
if (!analyzer || !dataArray) return;
|
|
395
|
+
// Toggle mute state
|
|
396
|
+
await client.audioControls.toggleAudio();
|
|
943
397
|
|
|
944
|
-
|
|
398
|
+
// Check mute state
|
|
399
|
+
const isMuted = client.audioControls.isAudioMuted;
|
|
945
400
|
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
const average = sum / dataArray.length;
|
|
949
|
-
const normalizedLevel = average / 255;
|
|
401
|
+
// Enable audio (request permissions if needed)
|
|
402
|
+
await client.audioControls.enableAudio();
|
|
950
403
|
|
|
951
|
-
|
|
404
|
+
// Disable audio
|
|
405
|
+
await client.audioControls.disableAudio();
|
|
406
|
+
```
|
|
952
407
|
|
|
953
|
-
|
|
954
|
-
drawVisualizer(dataArray);
|
|
408
|
+
**Audio Device Management:**
|
|
955
409
|
|
|
956
|
-
|
|
957
|
-
|
|
410
|
+
```typescript
|
|
411
|
+
// Get available audio devices
|
|
412
|
+
const devices = await convaiClient.audioControls.getAudioDevices();
|
|
958
413
|
|
|
959
|
-
|
|
960
|
-
|
|
414
|
+
// Set specific audio device
|
|
415
|
+
await convaiClient.audioControls.setAudioDevice(deviceId);
|
|
961
416
|
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
if (!canvas) return;
|
|
417
|
+
// Monitor audio level
|
|
418
|
+
convaiClient.audioControls.startAudioLevelMonitoring();
|
|
965
419
|
|
|
966
|
-
|
|
967
|
-
|
|
420
|
+
convaiClient.audioControls.on("audioLevelChange", (level) => {
|
|
421
|
+
console.log("Audio level:", level);
|
|
422
|
+
// level is a number between 0 and 1
|
|
423
|
+
});
|
|
968
424
|
|
|
969
|
-
|
|
970
|
-
|
|
425
|
+
convaiClient.audioControls.stopAudioLevelMonitoring();
|
|
426
|
+
```
|
|
971
427
|
|
|
972
|
-
|
|
428
|
+
**Audio State Monitoring:**
|
|
973
429
|
|
|
974
|
-
|
|
975
|
-
|
|
430
|
+
```typescript
|
|
431
|
+
// React
|
|
432
|
+
const { isAudioMuted } = convaiClient;
|
|
433
|
+
|
|
434
|
+
// Core API (event-based)
|
|
435
|
+
convaiClient.audioControls.on("audioStateChange", (state) => {
|
|
436
|
+
console.log("Audio enabled:", state.isAudioEnabled);
|
|
437
|
+
console.log("Audio muted:", state.isAudioMuted);
|
|
438
|
+
console.log("Audio level:", state.audioLevel);
|
|
439
|
+
});
|
|
440
|
+
```
|
|
976
441
|
|
|
977
|
-
|
|
978
|
-
const barHeight = (dataArray[i] / 255) * height;
|
|
442
|
+
### Character TTS Mute/Unmute
|
|
979
443
|
|
|
980
|
-
|
|
981
|
-
ctx.fillRect(x, height - barHeight, barWidth, barHeight);
|
|
444
|
+
Control whether the character's responses are spoken aloud (text-to-speech).
|
|
982
445
|
|
|
983
|
-
|
|
984
|
-
}
|
|
985
|
-
};
|
|
446
|
+
**React:**
|
|
986
447
|
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
448
|
+
```tsx
|
|
449
|
+
function TTSControls() {
|
|
450
|
+
const convaiClient = useConvaiClient({
|
|
451
|
+
/* config */
|
|
452
|
+
});
|
|
990
453
|
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
}, [convaiClient.room, convaiClient.state.isConnected]);
|
|
454
|
+
const handleToggleTTS = (enabled: boolean) => {
|
|
455
|
+
convaiClient.toggleTts(enabled);
|
|
456
|
+
};
|
|
995
457
|
|
|
996
458
|
return (
|
|
997
459
|
<div>
|
|
998
|
-
<
|
|
999
|
-
|
|
1000
|
-
width={800}
|
|
1001
|
-
height={200}
|
|
1002
|
-
style={{ border: "1px solid #ccc" }}
|
|
1003
|
-
/>
|
|
1004
|
-
<div>Audio Level: {(audioLevel * 100).toFixed(0)}%</div>
|
|
1005
|
-
<div>
|
|
1006
|
-
Bot is {convaiClient.state.isSpeaking ? "speaking" : "silent"}
|
|
1007
|
-
</div>
|
|
460
|
+
<button onClick={() => handleToggleTTS(true)}>Enable TTS</button>
|
|
461
|
+
<button onClick={() => handleToggleTTS(false)}>Disable TTS</button>
|
|
1008
462
|
</div>
|
|
1009
463
|
);
|
|
1010
464
|
}
|
|
1011
465
|
```
|
|
1012
466
|
|
|
1013
|
-
|
|
467
|
+
**Vanilla:**
|
|
1014
468
|
|
|
1015
469
|
```typescript
|
|
1016
|
-
|
|
470
|
+
// Enable text-to-speech (character will speak responses)
|
|
471
|
+
client.toggleTts(true);
|
|
472
|
+
|
|
473
|
+
// Disable text-to-speech (character will only send text, no audio)
|
|
474
|
+
client.toggleTts(false);
|
|
475
|
+
```
|
|
476
|
+
|
|
477
|
+
**Initial TTS Configuration:**
|
|
1017
478
|
|
|
479
|
+
```typescript
|
|
480
|
+
// Set TTS state during connection
|
|
1018
481
|
const client = new ConvaiClient({
|
|
1019
482
|
apiKey: "your-api-key",
|
|
1020
483
|
characterId: "your-character-id",
|
|
484
|
+
ttsEnabled: true, // Enable TTS by default
|
|
1021
485
|
});
|
|
1022
486
|
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
let analyzer: AnalyserNode | null = null;
|
|
1032
|
-
let dataArray: Uint8Array | null = null;
|
|
1033
|
-
let animationId: number;
|
|
487
|
+
// Or disable initially
|
|
488
|
+
const client = new ConvaiClient({
|
|
489
|
+
apiKey: "your-api-key",
|
|
490
|
+
characterId: "your-character-id",
|
|
491
|
+
ttsEnabled: false, // Disable TTS
|
|
492
|
+
});
|
|
493
|
+
```
|
|
1034
494
|
|
|
1035
|
-
|
|
1036
|
-
const audioContext = new AudioContext();
|
|
495
|
+
### Voice Mode Implementation
|
|
1037
496
|
|
|
1038
|
-
|
|
1039
|
-
const participant = remoteParticipants[0];
|
|
1040
|
-
const audioTracks = Array.from(participant.audioTrackPublications.values());
|
|
1041
|
-
const audioTrack = audioTracks[0].track;
|
|
497
|
+
Voice mode allows users to speak instead of typing. The widget automatically handles voice mode, but you can implement it manually.
|
|
1042
498
|
|
|
1043
|
-
|
|
1044
|
-
const source = audioContext.createMediaStreamSource(mediaStream);
|
|
499
|
+
**React - Manual Voice Mode:**
|
|
1045
500
|
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
501
|
+
```tsx
|
|
502
|
+
import { useConvaiClient } from "@convai/web-sdk";
|
|
503
|
+
import { useState, useEffect } from "react";
|
|
1049
504
|
|
|
1050
|
-
|
|
505
|
+
function CustomChatInterface() {
|
|
506
|
+
const convaiClient = useConvaiClient({
|
|
507
|
+
/* config */
|
|
508
|
+
});
|
|
509
|
+
const [isVoiceMode, setIsVoiceMode] = useState(false);
|
|
1051
510
|
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
511
|
+
const enterVoiceMode = async () => {
|
|
512
|
+
// Interrupt any ongoing bot response
|
|
513
|
+
convaiClient.sendInterruptMessage();
|
|
1055
514
|
|
|
1056
|
-
|
|
515
|
+
// Unmute microphone
|
|
516
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
1057
517
|
|
|
1058
|
-
|
|
1059
|
-
|
|
518
|
+
setIsVoiceMode(true);
|
|
519
|
+
};
|
|
1060
520
|
|
|
1061
|
-
const
|
|
1062
|
-
|
|
521
|
+
const exitVoiceMode = async () => {
|
|
522
|
+
// Interrupt any ongoing bot response
|
|
523
|
+
convaiClient.sendInterruptMessage();
|
|
1063
524
|
|
|
1064
|
-
|
|
1065
|
-
|
|
525
|
+
// Mute microphone
|
|
526
|
+
await convaiClient.audioControls.muteAudio();
|
|
1066
527
|
|
|
1067
|
-
|
|
1068
|
-
|
|
528
|
+
setIsVoiceMode(false);
|
|
529
|
+
};
|
|
1069
530
|
|
|
1070
|
-
|
|
1071
|
-
|
|
531
|
+
// Monitor user transcription for voice input
|
|
532
|
+
useEffect(() => {
|
|
533
|
+
const transcription = convaiClient.userTranscription;
|
|
534
|
+
if (transcription && isVoiceMode) {
|
|
535
|
+
// Display real-time transcription
|
|
536
|
+
console.log("User is saying:", transcription);
|
|
537
|
+
}
|
|
538
|
+
}, [convaiClient.userTranscription, isVoiceMode]);
|
|
1072
539
|
|
|
1073
|
-
|
|
540
|
+
return (
|
|
541
|
+
<div>
|
|
542
|
+
{isVoiceMode ? (
|
|
543
|
+
<div>
|
|
544
|
+
<p>Listening: {convaiClient.userTranscription}</p>
|
|
545
|
+
<button onClick={exitVoiceMode}>Stop Voice Mode</button>
|
|
546
|
+
</div>
|
|
547
|
+
) : (
|
|
548
|
+
<button onClick={enterVoiceMode}>Start Voice Mode</button>
|
|
549
|
+
)}
|
|
550
|
+
</div>
|
|
551
|
+
);
|
|
1074
552
|
}
|
|
1075
|
-
|
|
1076
|
-
animate();
|
|
1077
|
-
|
|
1078
|
-
// Cleanup
|
|
1079
|
-
// cancelAnimationFrame(animationId);
|
|
1080
|
-
// audioRenderer.destroy();
|
|
1081
|
-
// await client.disconnect();
|
|
1082
|
-
```
|
|
1083
|
-
|
|
1084
|
-
---
|
|
1085
|
-
|
|
1086
|
-
### Message Types
|
|
1087
|
-
|
|
1088
|
-
All messages from `convaiClient.chatMessages` have a `type` field:
|
|
1089
|
-
|
|
1090
|
-
```typescript
|
|
1091
|
-
type ChatMessageType =
|
|
1092
|
-
| "user" // User's sent message (raw)
|
|
1093
|
-
| "user-transcription" // Real-time speech-to-text from user
|
|
1094
|
-
| "user-llm-text" // User text processed by LLM (final)
|
|
1095
|
-
| "convai" // Character's response (raw)
|
|
1096
|
-
| "bot-llm-text" // Character's LLM-generated text (final)
|
|
1097
|
-
| "bot-emotion" // Character's emotional state
|
|
1098
|
-
| "emotion" // Generic emotion
|
|
1099
|
-
| "behavior-tree" // Behavior tree response
|
|
1100
|
-
| "action" // Action execution
|
|
1101
|
-
| "interrupt-bot"; // Interrupt message
|
|
1102
553
|
```
|
|
1103
554
|
|
|
1104
|
-
**
|
|
555
|
+
**Vanilla - Manual Voice Mode:**
|
|
1105
556
|
|
|
1106
557
|
```typescript
|
|
1107
|
-
|
|
1108
|
-
(msg) => msg.type === "user-llm-text" || msg.type === "bot-llm-text"
|
|
1109
|
-
);
|
|
1110
|
-
```
|
|
558
|
+
let isVoiceMode = false;
|
|
1111
559
|
|
|
1112
|
-
|
|
560
|
+
const enterVoiceMode = async () => {
|
|
561
|
+
// Interrupt any ongoing bot response
|
|
562
|
+
client.sendInterruptMessage();
|
|
1113
563
|
|
|
1114
|
-
|
|
564
|
+
// Unmute microphone
|
|
565
|
+
await client.audioControls.unmuteAudio();
|
|
1115
566
|
|
|
1116
|
-
|
|
567
|
+
isVoiceMode = true;
|
|
568
|
+
updateUI();
|
|
569
|
+
};
|
|
1117
570
|
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
apiKey: string;
|
|
571
|
+
const exitVoiceMode = async () => {
|
|
572
|
+
// Interrupt any ongoing bot response
|
|
573
|
+
client.sendInterruptMessage();
|
|
1122
574
|
|
|
1123
|
-
|
|
1124
|
-
|
|
575
|
+
// Mute microphone
|
|
576
|
+
await client.audioControls.muteAudio();
|
|
1125
577
|
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
* When not provided: anonymous mode, no persistent memory
|
|
1130
|
-
*/
|
|
1131
|
-
endUserId?: string;
|
|
578
|
+
isVoiceMode = false;
|
|
579
|
+
updateUI();
|
|
580
|
+
};
|
|
1132
581
|
|
|
1133
|
-
|
|
1134
|
-
|
|
582
|
+
// Monitor user transcription
|
|
583
|
+
client.on("userTranscriptionChange", (transcription) => {
|
|
584
|
+
if (isVoiceMode && transcription) {
|
|
585
|
+
// Display real-time transcription
|
|
586
|
+
document.getElementById("transcription").textContent = transcription;
|
|
587
|
+
}
|
|
588
|
+
});
|
|
1135
589
|
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
* If false, connection_type will be "audio" (audio only).
|
|
1140
|
-
* ⚠️ REQUIRED for video and screen share features.
|
|
1141
|
-
*/
|
|
1142
|
-
enableVideo?: boolean;
|
|
590
|
+
function updateUI() {
|
|
591
|
+
const voiceButton = document.getElementById("voice-btn");
|
|
592
|
+
const transcriptionDiv = document.getElementById("transcription");
|
|
1143
593
|
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
594
|
+
if (isVoiceMode) {
|
|
595
|
+
voiceButton.textContent = "Stop Voice Mode";
|
|
596
|
+
transcriptionDiv.style.display = "block";
|
|
597
|
+
} else {
|
|
598
|
+
voiceButton.textContent = "Start Voice Mode";
|
|
599
|
+
transcriptionDiv.style.display = "none";
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
```
|
|
1149
603
|
|
|
1150
|
-
|
|
1151
|
-
* Start with microphone on when connecting (default: false).
|
|
1152
|
-
* If false, microphone stays off until user enables it.
|
|
1153
|
-
*/
|
|
1154
|
-
startWithAudioOn?: boolean;
|
|
604
|
+
**Voice Mode with State Monitoring:**
|
|
1155
605
|
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
606
|
+
```typescript
|
|
607
|
+
// Monitor agent state to handle voice mode transitions
|
|
608
|
+
convaiClient.on("stateChange", (state) => {
|
|
609
|
+
if (isVoiceMode) {
|
|
610
|
+
switch (state.agentState) {
|
|
611
|
+
case "listening":
|
|
612
|
+
// User can speak
|
|
613
|
+
console.log("Bot is listening");
|
|
614
|
+
break;
|
|
615
|
+
case "thinking":
|
|
616
|
+
// Bot is processing
|
|
617
|
+
console.log("Bot is thinking");
|
|
618
|
+
break;
|
|
619
|
+
case "speaking":
|
|
620
|
+
// Bot is responding
|
|
621
|
+
console.log("Bot is speaking");
|
|
622
|
+
// Optionally interrupt if user wants to speak
|
|
623
|
+
break;
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
});
|
|
1161
627
|
```
|
|
1162
628
|
|
|
1163
629
|
### Connection Management
|
|
1164
630
|
|
|
631
|
+
**Connect:**
|
|
632
|
+
|
|
1165
633
|
```typescript
|
|
1166
|
-
//
|
|
634
|
+
// React - config passed to hook
|
|
635
|
+
const convaiClient = useConvaiClient({
|
|
636
|
+
apiKey: "your-api-key",
|
|
637
|
+
characterId: "your-character-id",
|
|
638
|
+
});
|
|
639
|
+
|
|
640
|
+
// Or connect manually
|
|
1167
641
|
await convaiClient.connect({
|
|
1168
642
|
apiKey: "your-api-key",
|
|
1169
643
|
characterId: "your-character-id",
|
|
1170
|
-
enableVideo: true,
|
|
1171
644
|
});
|
|
1172
645
|
|
|
1173
|
-
//
|
|
646
|
+
// Vanilla
|
|
647
|
+
const client = new ConvaiClient();
|
|
648
|
+
await client.connect({
|
|
649
|
+
apiKey: "your-api-key",
|
|
650
|
+
characterId: "your-character-id",
|
|
651
|
+
});
|
|
652
|
+
```
|
|
653
|
+
|
|
654
|
+
**Disconnect:**
|
|
655
|
+
|
|
656
|
+
```typescript
|
|
1174
657
|
await convaiClient.disconnect();
|
|
658
|
+
```
|
|
1175
659
|
|
|
1176
|
-
|
|
660
|
+
**Reconnect:**
|
|
661
|
+
|
|
662
|
+
```typescript
|
|
1177
663
|
await convaiClient.reconnect();
|
|
664
|
+
```
|
|
665
|
+
|
|
666
|
+
**Reset Session:**
|
|
1178
667
|
|
|
1179
|
-
|
|
668
|
+
```typescript
|
|
669
|
+
// Clear conversation history and start new session
|
|
1180
670
|
convaiClient.resetSession();
|
|
671
|
+
```
|
|
1181
672
|
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
673
|
+
**Connection State:**
|
|
674
|
+
|
|
675
|
+
```typescript
|
|
676
|
+
// React
|
|
677
|
+
const { state } = convaiClient;
|
|
678
|
+
console.log("Connected:", state.isConnected);
|
|
679
|
+
console.log("Connecting:", state.isConnecting);
|
|
680
|
+
console.log("Agent state:", state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
|
|
681
|
+
|
|
682
|
+
// Core API (event-based)
|
|
683
|
+
convaiClient.on("stateChange", (state) => {
|
|
684
|
+
console.log("State changed:", state);
|
|
685
|
+
});
|
|
686
|
+
|
|
687
|
+
convaiClient.on("connect", () => {
|
|
688
|
+
console.log("Connected");
|
|
689
|
+
});
|
|
690
|
+
|
|
691
|
+
convaiClient.on("disconnect", () => {
|
|
692
|
+
console.log("Disconnected");
|
|
693
|
+
});
|
|
1187
694
|
```
|
|
1188
695
|
|
|
1189
696
|
### Messaging
|
|
1190
697
|
|
|
698
|
+
**Send Text Message:**
|
|
699
|
+
|
|
1191
700
|
```typescript
|
|
1192
|
-
// Send text message
|
|
1193
701
|
convaiClient.sendUserTextMessage("Hello, how are you?");
|
|
702
|
+
```
|
|
703
|
+
|
|
704
|
+
**Send Trigger Message:**
|
|
1194
705
|
|
|
1195
|
-
|
|
706
|
+
```typescript
|
|
707
|
+
// Trigger specific character action
|
|
1196
708
|
convaiClient.sendTriggerMessage("greet", "User entered the room");
|
|
1197
709
|
|
|
1198
|
-
//
|
|
1199
|
-
convaiClient.
|
|
710
|
+
// Trigger without message
|
|
711
|
+
convaiClient.sendTriggerMessage("wave");
|
|
712
|
+
```
|
|
1200
713
|
|
|
1201
|
-
|
|
1202
|
-
convaiClient.updateTemplateKeys({ user_name: "John" });
|
|
1203
|
-
convaiClient.updateDynamicInfo({ text: "User is browsing products" });
|
|
714
|
+
**Update Context:**
|
|
1204
715
|
|
|
1205
|
-
|
|
1206
|
-
|
|
716
|
+
```typescript
|
|
717
|
+
// Update template keys (e.g., user name, location)
|
|
718
|
+
convaiClient.updateTemplateKeys({
|
|
719
|
+
user_name: "John",
|
|
720
|
+
location: "New York",
|
|
721
|
+
});
|
|
1207
722
|
|
|
1208
|
-
//
|
|
1209
|
-
|
|
723
|
+
// Update dynamic information
|
|
724
|
+
convaiClient.updateDynamicInfo({
|
|
725
|
+
text: "User is currently browsing the products page",
|
|
726
|
+
});
|
|
1210
727
|
```
|
|
1211
728
|
|
|
1212
|
-
|
|
729
|
+
**Message History:**
|
|
1213
730
|
|
|
1214
731
|
```typescript
|
|
1215
|
-
//
|
|
1216
|
-
|
|
1217
|
-
await convaiClient.audioControls.unmuteAudio();
|
|
1218
|
-
await convaiClient.audioControls.toggleAudio();
|
|
732
|
+
// React
|
|
733
|
+
const { chatMessages } = convaiClient;
|
|
1219
734
|
|
|
1220
|
-
//
|
|
1221
|
-
|
|
735
|
+
// Core API (event-based)
|
|
736
|
+
convaiClient.on("message", (message: ChatMessage) => {
|
|
737
|
+
console.log("New message:", message.content);
|
|
738
|
+
console.log("Message type:", message.type);
|
|
739
|
+
});
|
|
1222
740
|
|
|
1223
|
-
|
|
1224
|
-
|
|
741
|
+
convaiClient.on("messagesChange", (messages: ChatMessage[]) => {
|
|
742
|
+
console.log("All messages:", messages);
|
|
743
|
+
});
|
|
744
|
+
```
|
|
1225
745
|
|
|
1226
|
-
|
|
1227
|
-
await convaiClient.audioControls.setAudioDevice(deviceId);
|
|
746
|
+
**Message Types:**
|
|
1228
747
|
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
748
|
+
```typescript
|
|
749
|
+
type ChatMessageType =
|
|
750
|
+
| "user" // User's sent message
|
|
751
|
+
| "convai" // Character's response
|
|
752
|
+
| "user-transcription" // Real-time speech-to-text from user
|
|
753
|
+
| "bot-llm-text" // Character's LLM-generated text
|
|
754
|
+
| "emotion" // Character's emotional state
|
|
755
|
+
| "behavior-tree" // Behavior tree response
|
|
756
|
+
| "action" // Action execution
|
|
757
|
+
| "bot-emotion" // Bot emotional response
|
|
758
|
+
| "user-llm-text" // User text processed by LLM
|
|
759
|
+
| "interrupt-bot"; // Interrupt message
|
|
1235
760
|
```
|
|
1236
761
|
|
|
1237
|
-
###
|
|
762
|
+
### State Monitoring
|
|
1238
763
|
|
|
1239
|
-
|
|
764
|
+
**Agent State:**
|
|
1240
765
|
|
|
1241
766
|
```typescript
|
|
1242
|
-
//
|
|
1243
|
-
|
|
1244
|
-
await convaiClient.videoControls.disableVideo();
|
|
1245
|
-
await convaiClient.videoControls.toggleVideo();
|
|
767
|
+
// React
|
|
768
|
+
const { state } = convaiClient;
|
|
1246
769
|
|
|
1247
|
-
// Check
|
|
1248
|
-
|
|
770
|
+
// Check specific states
|
|
771
|
+
if (state.isListening) {
|
|
772
|
+
console.log("Bot is listening");
|
|
773
|
+
}
|
|
1249
774
|
|
|
1250
|
-
|
|
1251
|
-
|
|
775
|
+
if (state.isThinking) {
|
|
776
|
+
console.log("Bot is thinking");
|
|
777
|
+
}
|
|
1252
778
|
|
|
1253
|
-
|
|
1254
|
-
|
|
779
|
+
if (state.isSpeaking) {
|
|
780
|
+
console.log("Bot is speaking");
|
|
781
|
+
}
|
|
1255
782
|
|
|
1256
|
-
//
|
|
1257
|
-
|
|
783
|
+
// Combined state
|
|
784
|
+
console.log(state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
|
|
1258
785
|
```
|
|
1259
786
|
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
**⚠️ Requires `enableVideo: true` in config.**
|
|
787
|
+
**User Transcription:**
|
|
1263
788
|
|
|
1264
789
|
```typescript
|
|
1265
|
-
//
|
|
1266
|
-
|
|
1267
|
-
await convaiClient.screenShareControls.enableScreenShareWithAudio();
|
|
1268
|
-
await convaiClient.screenShareControls.disableScreenShare();
|
|
1269
|
-
await convaiClient.screenShareControls.toggleScreenShare();
|
|
790
|
+
// React
|
|
791
|
+
const { userTranscription } = convaiClient;
|
|
1270
792
|
|
|
1271
|
-
//
|
|
1272
|
-
|
|
793
|
+
// Core API (event-based)
|
|
794
|
+
convaiClient.on("userTranscriptionChange", (transcription: string) => {
|
|
795
|
+
console.log("User is saying:", transcription);
|
|
796
|
+
});
|
|
1273
797
|
```
|
|
1274
798
|
|
|
1275
|
-
|
|
799
|
+
**Bot Ready State:**
|
|
800
|
+
|
|
801
|
+
```typescript
|
|
802
|
+
// React
|
|
803
|
+
const { isBotReady } = convaiClient;
|
|
804
|
+
|
|
805
|
+
// Core API (event-based)
|
|
806
|
+
convaiClient.on("botReady", () => {
|
|
807
|
+
console.log("Bot is ready to receive messages");
|
|
808
|
+
});
|
|
809
|
+
```
|
|
1276
810
|
|
|
1277
|
-
## Getting Credentials
|
|
811
|
+
## Getting Convai Credentials
|
|
1278
812
|
|
|
1279
813
|
1. Visit [convai.com](https://convai.com) and create an account
|
|
1280
814
|
2. Navigate to your dashboard
|
|
@@ -1282,71 +816,41 @@ console.log(convaiClient.isScreenShareActive);
|
|
|
1282
816
|
4. Copy your **API Key** from the dashboard
|
|
1283
817
|
5. Copy your **Character ID** from the character details
|
|
1284
818
|
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
## TypeScript Support
|
|
1288
|
-
|
|
1289
|
-
All exports are fully typed:
|
|
1290
|
-
|
|
1291
|
-
**React:**
|
|
819
|
+
## Import Paths
|
|
1292
820
|
|
|
1293
821
|
```typescript
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
ConvaiConfig,
|
|
822
|
+
// Default: React version (backward compatible)
|
|
823
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
1297
824
|
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
// Messages
|
|
1302
|
-
ChatMessage,
|
|
1303
|
-
ChatMessageType,
|
|
825
|
+
// Explicit React import
|
|
826
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
1304
827
|
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
ConvaiClient,
|
|
828
|
+
// Vanilla JS/TS
|
|
829
|
+
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
1308
830
|
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
VideoControls,
|
|
1312
|
-
ScreenShareControls,
|
|
1313
|
-
} from "@convai/web-sdk/react";
|
|
831
|
+
// Core only (no UI, framework agnostic)
|
|
832
|
+
import { ConvaiClient } from "@convai/web-sdk/core";
|
|
1314
833
|
```
|
|
1315
834
|
|
|
1316
|
-
|
|
835
|
+
## TypeScript Support
|
|
836
|
+
|
|
837
|
+
All exports are fully typed:
|
|
1317
838
|
|
|
1318
839
|
```typescript
|
|
1319
840
|
import type {
|
|
1320
|
-
|
|
841
|
+
ConvaiClient,
|
|
1321
842
|
ConvaiConfig,
|
|
1322
|
-
|
|
1323
|
-
// State
|
|
1324
843
|
ConvaiClientState,
|
|
1325
|
-
|
|
1326
|
-
// Messages
|
|
1327
844
|
ChatMessage,
|
|
1328
|
-
ChatMessageType,
|
|
1329
|
-
|
|
1330
|
-
// Client
|
|
1331
|
-
IConvaiClient,
|
|
1332
|
-
ConvaiClient,
|
|
1333
|
-
|
|
1334
|
-
// Controls
|
|
1335
845
|
AudioControls,
|
|
1336
846
|
VideoControls,
|
|
1337
847
|
ScreenShareControls,
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
VanillaWidget,
|
|
1341
|
-
VanillaWidgetOptions,
|
|
1342
|
-
} from "@convai/web-sdk/vanilla";
|
|
848
|
+
IConvaiClient,
|
|
849
|
+
} from "@convai/web-sdk";
|
|
1343
850
|
```
|
|
1344
851
|
|
|
1345
|
-
---
|
|
1346
|
-
|
|
1347
852
|
## Support
|
|
1348
853
|
|
|
1349
|
-
-
|
|
1350
|
-
-
|
|
1351
|
-
-
|
|
1352
|
-
- **Issues**: [GitHub Issues](https://github.com/convai/web-sdk/issues)
|
|
854
|
+
- [Convai Forum](https://forum.convai.com)
|
|
855
|
+
- [API Reference](./API_REFERENCE.md)
|
|
856
|
+
- [Convai Website](https://convai.com)
|