@convai/web-sdk 0.1.1-beta.5 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +720 -213
- package/dist/core/AudioManager.d.ts.map +1 -1
- package/dist/core/AudioManager.js +10 -0
- package/dist/core/AudioManager.js.map +1 -1
- package/dist/core/ConvaiClient.d.ts +4 -0
- package/dist/core/ConvaiClient.d.ts.map +1 -1
- package/dist/core/ConvaiClient.js +28 -0
- package/dist/core/ConvaiClient.js.map +1 -1
- package/dist/core/MessageHandler.d.ts.map +1 -1
- package/dist/core/MessageHandler.js +1 -0
- package/dist/core/MessageHandler.js.map +1 -1
- package/dist/core/ScreenShareManager.d.ts.map +1 -1
- package/dist/core/ScreenShareManager.js +4 -0
- package/dist/core/ScreenShareManager.js.map +1 -1
- package/dist/core/VideoManager.d.ts.map +1 -1
- package/dist/core/VideoManager.js +2 -0
- package/dist/core/VideoManager.js.map +1 -1
- package/dist/core/types.d.ts +13 -1
- package/dist/core/types.d.ts.map +1 -1
- package/dist/react/components/ConvaiWidget.d.ts +3 -0
- package/dist/react/components/ConvaiWidget.d.ts.map +1 -1
- package/dist/react/components/ConvaiWidget.js +20 -12
- package/dist/react/components/ConvaiWidget.js.map +1 -1
- package/dist/react/components/index.d.ts +0 -1
- package/dist/react/components/index.d.ts.map +1 -1
- package/dist/react/components/index.js +0 -2
- package/dist/react/components/index.js.map +1 -1
- package/dist/react/hooks/useConvaiClient.d.ts +3 -0
- package/dist/react/hooks/useConvaiClient.d.ts.map +1 -1
- package/dist/react/hooks/useConvaiClient.js +34 -0
- package/dist/react/hooks/useConvaiClient.js.map +1 -1
- package/dist/vanilla/ConvaiWidget.d.ts +1 -1
- package/dist/vanilla/ConvaiWidget.d.ts.map +1 -1
- package/dist/vanilla/ConvaiWidget.js +625 -367
- package/dist/vanilla/ConvaiWidget.js.map +1 -1
- package/dist/vanilla/index.d.ts +0 -2
- package/dist/vanilla/index.d.ts.map +1 -1
- package/dist/vanilla/index.js +0 -2
- package/dist/vanilla/index.js.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,332 +1,840 @@
|
|
|
1
1
|
# @convai/web-sdk
|
|
2
2
|
|
|
3
|
-
JavaScript/TypeScript SDK for Convai AI voice assistants. Build voice-powered AI interactions for web applications with real-time audio/video streaming. Supports both
|
|
3
|
+
JavaScript/TypeScript SDK for Convai AI voice assistants. Build voice-powered AI interactions for web applications with real-time audio/video streaming. Supports both React and Vanilla JavaScript/TypeScript.
|
|
4
4
|
|
|
5
|
-
##
|
|
6
|
-
|
|
7
|
-
- **React & Vanilla Support**: Use with React or any JavaScript framework (or none!)
|
|
8
|
-
- **Tree-Shakeable**: Import only what you need for optimal bundle size
|
|
9
|
-
- **Voice & Video**: Real-time audio/video conversations with AI characters
|
|
10
|
-
- **Screen Sharing**: Share your screen with the AI assistant
|
|
11
|
-
- **TypeScript**: Fully typed for excellent developer experience
|
|
12
|
-
- **WebRTC Powered**: Built on LiveKit for robust real-time communication
|
|
13
|
-
- **Customizable UI**: Pre-built widgets or build your own
|
|
14
|
-
- **Framework Agnostic Core**: Use the core client with any framework
|
|
15
|
-
|
|
16
|
-
## Installation
|
|
5
|
+
## Installation
|
|
17
6
|
|
|
18
7
|
```bash
|
|
19
8
|
npm install @convai/web-sdk
|
|
20
9
|
```
|
|
21
10
|
|
|
22
|
-
##
|
|
11
|
+
## Basic Setup
|
|
23
12
|
|
|
24
13
|
### React
|
|
25
14
|
|
|
26
15
|
```tsx
|
|
27
|
-
import { useConvaiClient, ConvaiWidget } from
|
|
16
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
28
17
|
|
|
29
18
|
function App() {
|
|
30
19
|
const convaiClient = useConvaiClient({
|
|
31
|
-
apiKey:
|
|
32
|
-
characterId:
|
|
33
|
-
// endUserId: 'user-uuid', // Optional: enables memory & analytics
|
|
34
|
-
enableVideo: true
|
|
20
|
+
apiKey: "your-api-key",
|
|
21
|
+
characterId: "your-character-id",
|
|
35
22
|
});
|
|
36
23
|
|
|
37
24
|
return <ConvaiWidget convaiClient={convaiClient} />;
|
|
38
25
|
}
|
|
39
26
|
```
|
|
40
27
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
### Vanilla JavaScript/TypeScript
|
|
28
|
+
### Vanilla TypeScript
|
|
44
29
|
|
|
45
30
|
```typescript
|
|
46
|
-
import { ConvaiClient, createConvaiWidget } from
|
|
31
|
+
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
47
32
|
|
|
48
|
-
// Create client with
|
|
33
|
+
// Create client with configuration
|
|
49
34
|
const client = new ConvaiClient({
|
|
50
|
-
apiKey:
|
|
51
|
-
characterId:
|
|
52
|
-
// endUserId: 'user-uuid' // Optional: enables memory & analytics
|
|
53
|
-
enableVideo: true,
|
|
35
|
+
apiKey: "your-api-key",
|
|
36
|
+
characterId: "your-character-id",
|
|
54
37
|
});
|
|
55
38
|
|
|
56
|
-
// Create widget - auto-connects on first click
|
|
39
|
+
// Create widget - auto-connects on first user click
|
|
57
40
|
const widget = createConvaiWidget(document.body, {
|
|
58
41
|
convaiClient: client,
|
|
59
|
-
showVideo: true,
|
|
60
|
-
showScreenShare: true
|
|
61
42
|
});
|
|
62
43
|
|
|
63
|
-
//
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
});
|
|
44
|
+
// Cleanup when done
|
|
45
|
+
widget.destroy();
|
|
46
|
+
```
|
|
67
47
|
|
|
68
|
-
|
|
69
|
-
client.sendUserTextMessage('Hello!');
|
|
48
|
+
## Exports
|
|
70
49
|
|
|
71
|
-
|
|
72
|
-
|
|
50
|
+
### React Exports (`@convai/web-sdk` or `@convai/web-sdk/react`)
|
|
51
|
+
|
|
52
|
+
**Components:**
|
|
53
|
+
|
|
54
|
+
- `ConvaiWidget` - Main chat widget component
|
|
55
|
+
|
|
56
|
+
**Hooks:**
|
|
57
|
+
|
|
58
|
+
- `useConvaiClient(config?)` - Main client hook
|
|
59
|
+
- `useCharacterInfo(characterId, apiKey)` - Fetch character metadata
|
|
60
|
+
- `useLocalCameraTrack()` - Get local camera track
|
|
61
|
+
|
|
62
|
+
**Core Client:**
|
|
63
|
+
|
|
64
|
+
- `ConvaiClient` - Core client class
|
|
65
|
+
|
|
66
|
+
**Types:**
|
|
67
|
+
|
|
68
|
+
- `ConvaiConfig` - Configuration interface
|
|
69
|
+
- `ConvaiClientState` - Client state interface
|
|
70
|
+
- `ChatMessage` - Message interface
|
|
71
|
+
- `IConvaiClient` - Client interface
|
|
72
|
+
- `AudioControls` - Audio control interface
|
|
73
|
+
- `VideoControls` - Video control interface
|
|
74
|
+
- `ScreenShareControls` - Screen share control interface
|
|
75
|
+
|
|
76
|
+
**Components:**
|
|
77
|
+
|
|
78
|
+
- `AudioRenderer` - Audio playback component
|
|
79
|
+
- `AudioContext` - Audio context provider
|
|
80
|
+
|
|
81
|
+
### Vanilla Exports (`@convai/web-sdk/vanilla`)
|
|
82
|
+
|
|
83
|
+
**Functions:**
|
|
84
|
+
|
|
85
|
+
- `createConvaiWidget(container, options)` - Create widget instance
|
|
86
|
+
- `destroyConvaiWidget(widget)` - Destroy widget instance
|
|
87
|
+
|
|
88
|
+
**Classes:**
|
|
89
|
+
|
|
90
|
+
- `ConvaiClient` - Core client class
|
|
91
|
+
- `AudioRenderer` - Audio playback handler
|
|
92
|
+
|
|
93
|
+
**Types:**
|
|
94
|
+
|
|
95
|
+
- `VanillaWidget` - Widget instance interface
|
|
96
|
+
- `VanillaWidgetOptions` - Widget options interface
|
|
97
|
+
- `IConvaiClient` - Client interface
|
|
98
|
+
- `ConvaiConfig` - Configuration interface
|
|
99
|
+
- `ConvaiClientState` - Client state interface
|
|
100
|
+
- `ChatMessage` - Message interface
|
|
101
|
+
|
|
102
|
+
### Core Exports (`@convai/web-sdk/core`)
|
|
103
|
+
|
|
104
|
+
**Classes:**
|
|
105
|
+
|
|
106
|
+
- `ConvaiClient` - Main client class
|
|
107
|
+
- `AudioManager` - Audio management
|
|
108
|
+
- `VideoManager` - Video management
|
|
109
|
+
- `ScreenShareManager` - Screen share management
|
|
110
|
+
- `MessageHandler` - Message handling
|
|
111
|
+
- `EventEmitter` - Event emitter base class
|
|
112
|
+
|
|
113
|
+
**Types:**
|
|
114
|
+
|
|
115
|
+
- All types from React/Vanilla exports
|
|
116
|
+
- `ConvaiClientType` - Type alias for ConvaiClient
|
|
117
|
+
|
|
118
|
+
## Props and Configuration
|
|
119
|
+
|
|
120
|
+
### ConvaiWidget Props (React)
|
|
121
|
+
|
|
122
|
+
```tsx
|
|
123
|
+
interface ConvaiWidgetProps {
|
|
124
|
+
/** Convai client instance (required) */
|
|
125
|
+
convaiClient: IConvaiClient & {
|
|
126
|
+
activity?: string;
|
|
127
|
+
isAudioMuted: boolean;
|
|
128
|
+
isVideoEnabled: boolean;
|
|
129
|
+
isScreenShareActive: boolean;
|
|
130
|
+
};
|
|
131
|
+
/** Show video toggle button in settings (default: true) */
|
|
132
|
+
showVideo?: boolean;
|
|
133
|
+
/** Show screen share toggle button in settings (default: true) */
|
|
134
|
+
showScreenShare?: boolean;
|
|
135
|
+
}
|
|
73
136
|
```
|
|
74
137
|
|
|
75
|
-
|
|
138
|
+
### createConvaiWidget Options (Vanilla)
|
|
76
139
|
|
|
77
|
-
|
|
140
|
+
```typescript
|
|
141
|
+
interface VanillaWidgetOptions {
|
|
142
|
+
/** Convai client instance (required) */
|
|
143
|
+
convaiClient: IConvaiClient & {
|
|
144
|
+
activity?: string;
|
|
145
|
+
chatMessages: ChatMessage[];
|
|
146
|
+
};
|
|
147
|
+
/** Show video toggle button in settings (default: true) */
|
|
148
|
+
showVideo?: boolean;
|
|
149
|
+
/** Show screen share toggle button in settings (default: true) */
|
|
150
|
+
showScreenShare?: boolean;
|
|
151
|
+
}
|
|
152
|
+
```
|
|
78
153
|
|
|
79
|
-
|
|
154
|
+
### ConvaiConfig
|
|
80
155
|
|
|
81
156
|
```typescript
|
|
82
|
-
|
|
157
|
+
interface ConvaiConfig {
|
|
158
|
+
/** Your Convai API key from convai.com dashboard (required) */
|
|
159
|
+
apiKey: string;
|
|
160
|
+
/** The Character ID to connect to (required) */
|
|
161
|
+
characterId: string;
|
|
162
|
+
/**
|
|
163
|
+
* End user identifier for speaker management (optional).
|
|
164
|
+
* When provided: enables long-term memory and analytics
|
|
165
|
+
* When not provided: anonymous mode, no persistent memory
|
|
166
|
+
*/
|
|
167
|
+
endUserId?: string;
|
|
168
|
+
/** Custom Convai API URL (optional, defaults to production endpoint) */
|
|
169
|
+
url?: string;
|
|
170
|
+
/**
|
|
171
|
+
* Enable video capability (default: false).
|
|
172
|
+
* If true, connection_type will be "video" (supports audio, video, and screenshare).
|
|
173
|
+
* If false, connection_type will be "audio" (audio only).
|
|
174
|
+
*/
|
|
175
|
+
enableVideo?: boolean;
|
|
176
|
+
/**
|
|
177
|
+
* Start with video camera on when connecting (default: false).
|
|
178
|
+
* Only works if enableVideo is true.
|
|
179
|
+
*/
|
|
180
|
+
startWithVideoOn?: boolean;
|
|
181
|
+
/**
|
|
182
|
+
* Start with microphone on when connecting (default: false).
|
|
183
|
+
* If false, microphone stays off until user enables it.
|
|
184
|
+
*/
|
|
185
|
+
startWithAudioOn?: boolean;
|
|
186
|
+
/** Enable text-to-speech audio generation (default: true) */
|
|
187
|
+
ttsEnabled?: boolean;
|
|
188
|
+
}
|
|
189
|
+
```
|
|
83
190
|
|
|
84
|
-
|
|
191
|
+
## Features
|
|
85
192
|
|
|
86
|
-
|
|
87
|
-
client.on('stateChange', (state) => {
|
|
88
|
-
console.log('State:', state.agentState);
|
|
89
|
-
});
|
|
193
|
+
### Video Enabled Chat
|
|
90
194
|
|
|
91
|
-
|
|
92
|
-
console.log('New message:', message.content);
|
|
93
|
-
});
|
|
195
|
+
To enable video capabilities, set `enableVideo: true` in your configuration. This enables audio, video, and screen sharing.
|
|
94
196
|
|
|
95
|
-
|
|
96
|
-
await client.connect({
|
|
97
|
-
apiKey: 'your-api-key',
|
|
98
|
-
characterId: 'your-character-id',
|
|
99
|
-
// endUserId: 'user-uuid' // Optional: enables memory & analytics
|
|
100
|
-
});
|
|
197
|
+
**React:**
|
|
101
198
|
|
|
102
|
-
|
|
103
|
-
|
|
199
|
+
```tsx
|
|
200
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
201
|
+
|
|
202
|
+
function App() {
|
|
203
|
+
const convaiClient = useConvaiClient({
|
|
204
|
+
apiKey: "your-api-key",
|
|
205
|
+
characterId: "your-character-id",
|
|
206
|
+
enableVideo: true,
|
|
207
|
+
startWithVideoOn: false, // Camera off by default
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
return (
|
|
211
|
+
<ConvaiWidget
|
|
212
|
+
convaiClient={convaiClient}
|
|
213
|
+
showVideo={true}
|
|
214
|
+
showScreenShare={true}
|
|
215
|
+
/>
|
|
216
|
+
);
|
|
217
|
+
}
|
|
104
218
|
```
|
|
105
219
|
|
|
106
|
-
|
|
220
|
+
**Vanilla:**
|
|
107
221
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
- [**API Reference**](./API_REFERENCE.md) - Complete API documentation
|
|
222
|
+
```typescript
|
|
223
|
+
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
111
224
|
|
|
112
|
-
|
|
225
|
+
const client = new ConvaiClient({
|
|
226
|
+
apiKey: "your-api-key",
|
|
227
|
+
characterId: "your-character-id",
|
|
228
|
+
enableVideo: true,
|
|
229
|
+
startWithVideoOn: false,
|
|
230
|
+
});
|
|
113
231
|
|
|
114
|
-
|
|
232
|
+
const widget = createConvaiWidget(document.body, {
|
|
233
|
+
convaiClient: client,
|
|
234
|
+
showVideo: true,
|
|
235
|
+
showScreenShare: true,
|
|
236
|
+
});
|
|
237
|
+
```
|
|
238
|
+
|
|
239
|
+
**Manual Video Controls:**
|
|
115
240
|
|
|
116
241
|
```typescript
|
|
117
|
-
//
|
|
118
|
-
|
|
242
|
+
// Enable video camera
|
|
243
|
+
await convaiClient.videoControls.enableVideo();
|
|
119
244
|
|
|
120
|
-
//
|
|
121
|
-
|
|
245
|
+
// Disable video camera
|
|
246
|
+
await convaiClient.videoControls.disableVideo();
|
|
122
247
|
|
|
123
|
-
//
|
|
124
|
-
|
|
248
|
+
// Toggle video
|
|
249
|
+
await convaiClient.videoControls.toggleVideo();
|
|
125
250
|
|
|
126
|
-
//
|
|
127
|
-
|
|
251
|
+
// Check video state
|
|
252
|
+
const isVideoEnabled = convaiClient.videoControls.isVideoEnabled;
|
|
253
|
+
|
|
254
|
+
// Set video quality
|
|
255
|
+
await convaiClient.videoControls.setVideoQuality("high"); // 'low' | 'medium' | 'high'
|
|
256
|
+
|
|
257
|
+
// Get available video devices
|
|
258
|
+
const devices = await convaiClient.videoControls.getVideoDevices();
|
|
259
|
+
|
|
260
|
+
// Set specific video device
|
|
261
|
+
await convaiClient.videoControls.setVideoDevice(deviceId);
|
|
128
262
|
```
|
|
129
263
|
|
|
130
|
-
|
|
264
|
+
**Screen Sharing:**
|
|
131
265
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
4. Copy your **API Key** from the dashboard
|
|
136
|
-
5. Copy your **Character ID** from the character details
|
|
266
|
+
```typescript
|
|
267
|
+
// Enable screen share
|
|
268
|
+
await convaiClient.screenShareControls.enableScreenShare();
|
|
137
269
|
|
|
138
|
-
|
|
270
|
+
// Enable screen share with audio
|
|
271
|
+
await convaiClient.screenShareControls.enableScreenShareWithAudio();
|
|
139
272
|
|
|
140
|
-
|
|
273
|
+
// Disable screen share
|
|
274
|
+
await convaiClient.screenShareControls.disableScreenShare();
|
|
275
|
+
|
|
276
|
+
// Toggle screen share
|
|
277
|
+
await convaiClient.screenShareControls.toggleScreenShare();
|
|
278
|
+
|
|
279
|
+
// Check screen share state
|
|
280
|
+
const isActive = convaiClient.screenShareControls.isScreenShareActive;
|
|
281
|
+
```
|
|
282
|
+
|
|
283
|
+
**Video State Monitoring:**
|
|
141
284
|
|
|
142
285
|
```typescript
|
|
143
|
-
//
|
|
144
|
-
|
|
286
|
+
// React
|
|
287
|
+
const { isVideoEnabled } = convaiClient;
|
|
288
|
+
|
|
289
|
+
// Core API (event-based)
|
|
290
|
+
convaiClient.videoControls.on("videoStateChange", (state) => {
|
|
291
|
+
console.log("Video enabled:", state.isVideoEnabled);
|
|
292
|
+
console.log("Video hidden:", state.isVideoHidden);
|
|
293
|
+
});
|
|
294
|
+
```
|
|
145
295
|
|
|
146
|
-
|
|
147
|
-
await client.disconnect();
|
|
296
|
+
### Interruption
|
|
148
297
|
|
|
149
|
-
|
|
150
|
-
client.resetSession();
|
|
298
|
+
Interrupt the character's current response to allow the user to speak immediately.
|
|
151
299
|
|
|
152
|
-
|
|
153
|
-
|
|
300
|
+
**React:**
|
|
301
|
+
|
|
302
|
+
```tsx
|
|
303
|
+
function ChatInterface() {
|
|
304
|
+
const convaiClient = useConvaiClient({
|
|
305
|
+
/* config */
|
|
306
|
+
});
|
|
307
|
+
|
|
308
|
+
const handleInterrupt = () => {
|
|
309
|
+
// Interrupt the bot's current response
|
|
310
|
+
convaiClient.sendInterruptMessage();
|
|
311
|
+
};
|
|
312
|
+
|
|
313
|
+
return <button onClick={handleInterrupt}>Interrupt</button>;
|
|
314
|
+
}
|
|
154
315
|
```
|
|
155
316
|
|
|
156
|
-
|
|
317
|
+
**Vanilla:**
|
|
157
318
|
|
|
158
319
|
```typescript
|
|
159
|
-
|
|
160
|
-
|
|
320
|
+
const interruptButton = document.getElementById("interrupt-btn");
|
|
321
|
+
|
|
322
|
+
interruptButton.addEventListener("click", () => {
|
|
323
|
+
client.sendInterruptMessage();
|
|
324
|
+
});
|
|
325
|
+
```
|
|
326
|
+
|
|
327
|
+
**Voice Mode Interruption Pattern:**
|
|
328
|
+
|
|
329
|
+
When implementing voice mode, interrupt the bot when the user starts speaking:
|
|
330
|
+
|
|
331
|
+
```typescript
|
|
332
|
+
// When user enters voice mode
|
|
333
|
+
const enterVoiceMode = async () => {
|
|
334
|
+
// Interrupt any ongoing bot response
|
|
335
|
+
convaiClient.sendInterruptMessage();
|
|
336
|
+
|
|
337
|
+
// Unmute microphone
|
|
338
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
339
|
+
};
|
|
340
|
+
|
|
341
|
+
// When user exits voice mode
|
|
342
|
+
const exitVoiceMode = async () => {
|
|
343
|
+
// Interrupt any ongoing bot response
|
|
344
|
+
convaiClient.sendInterruptMessage();
|
|
345
|
+
|
|
346
|
+
// Mute microphone
|
|
347
|
+
await convaiClient.audioControls.muteAudio();
|
|
348
|
+
};
|
|
349
|
+
```
|
|
161
350
|
|
|
162
|
-
|
|
163
|
-
client.sendTriggerMessage('greet', 'User entered room');
|
|
351
|
+
### User Microphone Mute/Unmute
|
|
164
352
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
353
|
+
Control the user's microphone input.
|
|
354
|
+
|
|
355
|
+
**React:**
|
|
356
|
+
|
|
357
|
+
```tsx
|
|
358
|
+
function AudioControls() {
|
|
359
|
+
const convaiClient = useConvaiClient({
|
|
360
|
+
/* config */
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
const handleMute = async () => {
|
|
364
|
+
await convaiClient.audioControls.muteAudio();
|
|
365
|
+
};
|
|
366
|
+
|
|
367
|
+
const handleUnmute = async () => {
|
|
368
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
369
|
+
};
|
|
370
|
+
|
|
371
|
+
const handleToggle = async () => {
|
|
372
|
+
await convaiClient.audioControls.toggleAudio();
|
|
373
|
+
};
|
|
374
|
+
|
|
375
|
+
return (
|
|
376
|
+
<div>
|
|
377
|
+
<button onClick={handleMute}>Mute</button>
|
|
378
|
+
<button onClick={handleUnmute}>Unmute</button>
|
|
379
|
+
<button onClick={handleToggle}>Toggle</button>
|
|
380
|
+
<p>Muted: {convaiClient.audioControls.isAudioMuted ? "Yes" : "No"}</p>
|
|
381
|
+
</div>
|
|
382
|
+
);
|
|
383
|
+
}
|
|
168
384
|
```
|
|
169
385
|
|
|
170
|
-
|
|
386
|
+
**Vanilla:**
|
|
171
387
|
|
|
172
388
|
```typescript
|
|
173
|
-
//
|
|
174
|
-
await client.audioControls.toggleAudio();
|
|
389
|
+
// Mute microphone
|
|
175
390
|
await client.audioControls.muteAudio();
|
|
391
|
+
|
|
392
|
+
// Unmute microphone
|
|
176
393
|
await client.audioControls.unmuteAudio();
|
|
177
394
|
|
|
178
|
-
//
|
|
179
|
-
await client.
|
|
180
|
-
|
|
181
|
-
|
|
395
|
+
// Toggle mute state
|
|
396
|
+
await client.audioControls.toggleAudio();
|
|
397
|
+
|
|
398
|
+
// Check mute state
|
|
399
|
+
const isMuted = client.audioControls.isAudioMuted;
|
|
400
|
+
|
|
401
|
+
// Enable audio (request permissions if needed)
|
|
402
|
+
await client.audioControls.enableAudio();
|
|
182
403
|
|
|
183
|
-
//
|
|
184
|
-
await client.
|
|
404
|
+
// Disable audio
|
|
405
|
+
await client.audioControls.disableAudio();
|
|
185
406
|
```
|
|
186
407
|
|
|
187
|
-
|
|
408
|
+
**Audio Device Management:**
|
|
409
|
+
|
|
410
|
+
```typescript
|
|
411
|
+
// Get available audio devices
|
|
412
|
+
const devices = await convaiClient.audioControls.getAudioDevices();
|
|
413
|
+
|
|
414
|
+
// Set specific audio device
|
|
415
|
+
await convaiClient.audioControls.setAudioDevice(deviceId);
|
|
416
|
+
|
|
417
|
+
// Monitor audio level
|
|
418
|
+
convaiClient.audioControls.startAudioLevelMonitoring();
|
|
419
|
+
|
|
420
|
+
convaiClient.audioControls.on("audioLevelChange", (level) => {
|
|
421
|
+
console.log("Audio level:", level);
|
|
422
|
+
// level is a number between 0 and 1
|
|
423
|
+
});
|
|
424
|
+
|
|
425
|
+
convaiClient.audioControls.stopAudioLevelMonitoring();
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
**Audio State Monitoring:**
|
|
188
429
|
|
|
189
430
|
```typescript
|
|
190
431
|
// React
|
|
191
|
-
const {
|
|
432
|
+
const { isAudioMuted } = convaiClient;
|
|
192
433
|
|
|
193
434
|
// Core API (event-based)
|
|
194
|
-
|
|
195
|
-
console.log(state.
|
|
435
|
+
convaiClient.audioControls.on("audioStateChange", (state) => {
|
|
436
|
+
console.log("Audio enabled:", state.isAudioEnabled);
|
|
437
|
+
console.log("Audio muted:", state.isAudioMuted);
|
|
438
|
+
console.log("Audio level:", state.audioLevel);
|
|
196
439
|
});
|
|
440
|
+
```
|
|
441
|
+
|
|
442
|
+
### Character TTS Mute/Unmute
|
|
443
|
+
|
|
444
|
+
Control whether the character's responses are spoken aloud (text-to-speech).
|
|
197
445
|
|
|
198
|
-
|
|
199
|
-
|
|
446
|
+
**React:**
|
|
447
|
+
|
|
448
|
+
```tsx
|
|
449
|
+
function TTSControls() {
|
|
450
|
+
const convaiClient = useConvaiClient({
|
|
451
|
+
/* config */
|
|
452
|
+
});
|
|
453
|
+
|
|
454
|
+
const handleToggleTTS = (enabled: boolean) => {
|
|
455
|
+
convaiClient.toggleTts(enabled);
|
|
456
|
+
};
|
|
457
|
+
|
|
458
|
+
return (
|
|
459
|
+
<div>
|
|
460
|
+
<button onClick={() => handleToggleTTS(true)}>Enable TTS</button>
|
|
461
|
+
<button onClick={() => handleToggleTTS(false)}>Disable TTS</button>
|
|
462
|
+
</div>
|
|
463
|
+
);
|
|
464
|
+
}
|
|
465
|
+
```
|
|
466
|
+
|
|
467
|
+
**Vanilla:**
|
|
468
|
+
|
|
469
|
+
```typescript
|
|
470
|
+
// Enable text-to-speech (character will speak responses)
|
|
471
|
+
client.toggleTts(true);
|
|
472
|
+
|
|
473
|
+
// Disable text-to-speech (character will only send text, no audio)
|
|
474
|
+
client.toggleTts(false);
|
|
475
|
+
```
|
|
476
|
+
|
|
477
|
+
**Initial TTS Configuration:**
|
|
478
|
+
|
|
479
|
+
```typescript
|
|
480
|
+
// Set TTS state during connection
|
|
481
|
+
const client = new ConvaiClient({
|
|
482
|
+
apiKey: "your-api-key",
|
|
483
|
+
characterId: "your-character-id",
|
|
484
|
+
ttsEnabled: true, // Enable TTS by default
|
|
485
|
+
});
|
|
486
|
+
|
|
487
|
+
// Or disable initially
|
|
488
|
+
const client = new ConvaiClient({
|
|
489
|
+
apiKey: "your-api-key",
|
|
490
|
+
characterId: "your-character-id",
|
|
491
|
+
ttsEnabled: false, // Disable TTS
|
|
200
492
|
});
|
|
201
493
|
```
|
|
202
494
|
|
|
203
|
-
|
|
495
|
+
### Voice Mode Implementation
|
|
204
496
|
|
|
205
|
-
|
|
497
|
+
Voice mode allows users to speak instead of typing. The widget automatically handles voice mode, but you can implement it manually.
|
|
206
498
|
|
|
207
|
-
React
|
|
499
|
+
**React - Manual Voice Mode:**
|
|
208
500
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
- Video controls (when enabled)
|
|
213
|
-
- Screen sharing (when enabled)
|
|
214
|
-
- Auto-connects on first interaction
|
|
501
|
+
```tsx
|
|
502
|
+
import { useConvaiClient } from "@convai/web-sdk";
|
|
503
|
+
import { useState, useEffect } from "react";
|
|
215
504
|
|
|
216
|
-
|
|
505
|
+
function CustomChatInterface() {
|
|
506
|
+
const convaiClient = useConvaiClient({
|
|
507
|
+
/* config */
|
|
508
|
+
});
|
|
509
|
+
const [isVoiceMode, setIsVoiceMode] = useState(false);
|
|
217
510
|
|
|
218
|
-
|
|
511
|
+
const enterVoiceMode = async () => {
|
|
512
|
+
// Interrupt any ongoing bot response
|
|
513
|
+
convaiClient.sendInterruptMessage();
|
|
219
514
|
|
|
220
|
-
|
|
515
|
+
// Unmute microphone
|
|
516
|
+
await convaiClient.audioControls.unmuteAudio();
|
|
221
517
|
|
|
222
|
-
|
|
518
|
+
setIsVoiceMode(true);
|
|
519
|
+
};
|
|
223
520
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
- `action` - Action executions
|
|
228
|
-
- `behavior-tree` - Behavior responses
|
|
521
|
+
const exitVoiceMode = async () => {
|
|
522
|
+
// Interrupt any ongoing bot response
|
|
523
|
+
convaiClient.sendInterruptMessage();
|
|
229
524
|
|
|
230
|
-
|
|
525
|
+
// Mute microphone
|
|
526
|
+
await convaiClient.audioControls.muteAudio();
|
|
231
527
|
|
|
232
|
-
|
|
233
|
-
interface ConvaiConfig {
|
|
234
|
-
apiKey: string; // Required
|
|
235
|
-
characterId: string; // Required
|
|
236
|
-
endUserId?: string; // Optional: unique ID for memory & analytics
|
|
237
|
-
url?: string; // Optional: API URL
|
|
238
|
-
enableVideo?: boolean; // Optional: Enable video (default: false)
|
|
239
|
-
startWithVideoOn?: boolean; // Optional: Start with camera on
|
|
240
|
-
ttsEnabled?: boolean; // Optional: Enable TTS (default: true)
|
|
241
|
-
actionConfig?: { // Optional: Actions & context
|
|
242
|
-
actions: string[];
|
|
243
|
-
characters: Array<{ name: string; bio: string }>;
|
|
244
|
-
objects: Array<{ name: string; description: string }>;
|
|
528
|
+
setIsVoiceMode(false);
|
|
245
529
|
};
|
|
530
|
+
|
|
531
|
+
// Monitor user transcription for voice input
|
|
532
|
+
useEffect(() => {
|
|
533
|
+
const transcription = convaiClient.userTranscription;
|
|
534
|
+
if (transcription && isVoiceMode) {
|
|
535
|
+
// Display real-time transcription
|
|
536
|
+
console.log("User is saying:", transcription);
|
|
537
|
+
}
|
|
538
|
+
}, [convaiClient.userTranscription, isVoiceMode]);
|
|
539
|
+
|
|
540
|
+
return (
|
|
541
|
+
<div>
|
|
542
|
+
{isVoiceMode ? (
|
|
543
|
+
<div>
|
|
544
|
+
<p>Listening: {convaiClient.userTranscription}</p>
|
|
545
|
+
<button onClick={exitVoiceMode}>Stop Voice Mode</button>
|
|
546
|
+
</div>
|
|
547
|
+
) : (
|
|
548
|
+
<button onClick={enterVoiceMode}>Start Voice Mode</button>
|
|
549
|
+
)}
|
|
550
|
+
</div>
|
|
551
|
+
);
|
|
246
552
|
}
|
|
247
553
|
```
|
|
248
554
|
|
|
249
|
-
|
|
555
|
+
**Vanilla - Manual Voice Mode:**
|
|
556
|
+
|
|
557
|
+
```typescript
|
|
558
|
+
let isVoiceMode = false;
|
|
559
|
+
|
|
560
|
+
const enterVoiceMode = async () => {
|
|
561
|
+
// Interrupt any ongoing bot response
|
|
562
|
+
client.sendInterruptMessage();
|
|
250
563
|
|
|
251
|
-
|
|
564
|
+
// Unmute microphone
|
|
565
|
+
await client.audioControls.unmuteAudio();
|
|
252
566
|
|
|
253
|
-
|
|
567
|
+
isVoiceMode = true;
|
|
568
|
+
updateUI();
|
|
569
|
+
};
|
|
254
570
|
|
|
255
|
-
|
|
571
|
+
const exitVoiceMode = async () => {
|
|
572
|
+
// Interrupt any ongoing bot response
|
|
573
|
+
client.sendInterruptMessage();
|
|
256
574
|
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
575
|
+
// Mute microphone
|
|
576
|
+
await client.audioControls.muteAudio();
|
|
577
|
+
|
|
578
|
+
isVoiceMode = false;
|
|
579
|
+
updateUI();
|
|
580
|
+
};
|
|
581
|
+
|
|
582
|
+
// Monitor user transcription
|
|
583
|
+
client.on("userTranscriptionChange", (transcription) => {
|
|
584
|
+
if (isVoiceMode && transcription) {
|
|
585
|
+
// Display real-time transcription
|
|
586
|
+
document.getElementById("transcription").textContent = transcription;
|
|
587
|
+
}
|
|
588
|
+
});
|
|
589
|
+
|
|
590
|
+
function updateUI() {
|
|
591
|
+
const voiceButton = document.getElementById("voice-btn");
|
|
592
|
+
const transcriptionDiv = document.getElementById("transcription");
|
|
593
|
+
|
|
594
|
+
if (isVoiceMode) {
|
|
595
|
+
voiceButton.textContent = "Stop Voice Mode";
|
|
596
|
+
transcriptionDiv.style.display = "block";
|
|
597
|
+
} else {
|
|
598
|
+
voiceButton.textContent = "Start Voice Mode";
|
|
599
|
+
transcriptionDiv.style.display = "none";
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
```
|
|
603
|
+
|
|
604
|
+
**Voice Mode with State Monitoring:**
|
|
260
605
|
|
|
261
606
|
```typescript
|
|
262
|
-
//
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
607
|
+
// Monitor agent state to handle voice mode transitions
|
|
608
|
+
convaiClient.on("stateChange", (state) => {
|
|
609
|
+
if (isVoiceMode) {
|
|
610
|
+
switch (state.agentState) {
|
|
611
|
+
case "listening":
|
|
612
|
+
// User can speak
|
|
613
|
+
console.log("Bot is listening");
|
|
614
|
+
break;
|
|
615
|
+
case "thinking":
|
|
616
|
+
// Bot is processing
|
|
617
|
+
console.log("Bot is thinking");
|
|
618
|
+
break;
|
|
619
|
+
case "speaking":
|
|
620
|
+
// Bot is responding
|
|
621
|
+
console.log("Bot is speaking");
|
|
622
|
+
// Optionally interrupt if user wants to speak
|
|
623
|
+
break;
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
});
|
|
627
|
+
```
|
|
628
|
+
|
|
629
|
+
### Connection Management
|
|
630
|
+
|
|
631
|
+
**Connect:**
|
|
632
|
+
|
|
633
|
+
```typescript
|
|
634
|
+
// React - config passed to hook
|
|
635
|
+
const convaiClient = useConvaiClient({
|
|
636
|
+
apiKey: "your-api-key",
|
|
637
|
+
characterId: "your-character-id",
|
|
638
|
+
});
|
|
639
|
+
|
|
640
|
+
// Or connect manually
|
|
641
|
+
await convaiClient.connect({
|
|
642
|
+
apiKey: "your-api-key",
|
|
643
|
+
characterId: "your-character-id",
|
|
267
644
|
});
|
|
268
645
|
|
|
269
|
-
//
|
|
646
|
+
// Vanilla
|
|
647
|
+
const client = new ConvaiClient();
|
|
270
648
|
await client.connect({
|
|
271
|
-
apiKey:
|
|
272
|
-
characterId:
|
|
273
|
-
// No endUserId = anonymous
|
|
649
|
+
apiKey: "your-api-key",
|
|
650
|
+
characterId: "your-character-id",
|
|
274
651
|
});
|
|
275
652
|
```
|
|
276
653
|
|
|
277
|
-
|
|
654
|
+
**Disconnect:**
|
|
278
655
|
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
- Mobile browsers: Supported
|
|
656
|
+
```typescript
|
|
657
|
+
await convaiClient.disconnect();
|
|
658
|
+
```
|
|
283
659
|
|
|
284
|
-
|
|
660
|
+
**Reconnect:**
|
|
285
661
|
|
|
662
|
+
```typescript
|
|
663
|
+
await convaiClient.reconnect();
|
|
286
664
|
```
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
665
|
+
|
|
666
|
+
**Reset Session:**
|
|
667
|
+
|
|
668
|
+
```typescript
|
|
669
|
+
// Clear conversation history and start new session
|
|
670
|
+
convaiClient.resetSession();
|
|
291
671
|
```
|
|
292
672
|
|
|
293
|
-
|
|
673
|
+
**Connection State:**
|
|
294
674
|
|
|
295
|
-
|
|
675
|
+
```typescript
|
|
676
|
+
// React
|
|
677
|
+
const { state } = convaiClient;
|
|
678
|
+
console.log("Connected:", state.isConnected);
|
|
679
|
+
console.log("Connecting:", state.isConnecting);
|
|
680
|
+
console.log("Agent state:", state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
|
|
296
681
|
|
|
297
|
-
|
|
682
|
+
// Core API (event-based)
|
|
683
|
+
convaiClient.on("stateChange", (state) => {
|
|
684
|
+
console.log("State changed:", state);
|
|
685
|
+
});
|
|
298
686
|
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
687
|
+
convaiClient.on("connect", () => {
|
|
688
|
+
console.log("Connected");
|
|
689
|
+
});
|
|
690
|
+
|
|
691
|
+
convaiClient.on("disconnect", () => {
|
|
692
|
+
console.log("Disconnected");
|
|
693
|
+
});
|
|
302
694
|
```
|
|
303
695
|
|
|
304
|
-
###
|
|
696
|
+
### Messaging
|
|
305
697
|
|
|
306
|
-
|
|
307
|
-
// New: Explicit React import
|
|
308
|
-
import { useConvaiClient } from '@convai/web-sdk/react';
|
|
698
|
+
**Send Text Message:**
|
|
309
699
|
|
|
310
|
-
|
|
311
|
-
|
|
700
|
+
```typescript
|
|
701
|
+
convaiClient.sendUserTextMessage("Hello, how are you?");
|
|
312
702
|
```
|
|
313
703
|
|
|
314
|
-
|
|
704
|
+
**Send Trigger Message:**
|
|
315
705
|
|
|
316
|
-
|
|
706
|
+
```typescript
|
|
707
|
+
// Trigger specific character action
|
|
708
|
+
convaiClient.sendTriggerMessage("greet", "User entered the room");
|
|
709
|
+
|
|
710
|
+
// Trigger without message
|
|
711
|
+
convaiClient.sendTriggerMessage("wave");
|
|
712
|
+
```
|
|
317
713
|
|
|
318
|
-
|
|
714
|
+
**Update Context:**
|
|
319
715
|
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
716
|
+
```typescript
|
|
717
|
+
// Update template keys (e.g., user name, location)
|
|
718
|
+
convaiClient.updateTemplateKeys({
|
|
719
|
+
user_name: "John",
|
|
720
|
+
location: "New York",
|
|
721
|
+
});
|
|
326
722
|
|
|
327
|
-
|
|
723
|
+
// Update dynamic information
|
|
724
|
+
convaiClient.updateDynamicInfo({
|
|
725
|
+
text: "User is currently browsing the products page",
|
|
726
|
+
});
|
|
727
|
+
```
|
|
328
728
|
|
|
329
|
-
|
|
729
|
+
**Message History:**
|
|
730
|
+
|
|
731
|
+
```typescript
|
|
732
|
+
// React
|
|
733
|
+
const { chatMessages } = convaiClient;
|
|
734
|
+
|
|
735
|
+
// Core API (event-based)
|
|
736
|
+
convaiClient.on("message", (message: ChatMessage) => {
|
|
737
|
+
console.log("New message:", message.content);
|
|
738
|
+
console.log("Message type:", message.type);
|
|
739
|
+
});
|
|
740
|
+
|
|
741
|
+
convaiClient.on("messagesChange", (messages: ChatMessage[]) => {
|
|
742
|
+
console.log("All messages:", messages);
|
|
743
|
+
});
|
|
744
|
+
```
|
|
745
|
+
|
|
746
|
+
**Message Types:**
|
|
747
|
+
|
|
748
|
+
```typescript
|
|
749
|
+
type ChatMessageType =
|
|
750
|
+
| "user" // User's sent message
|
|
751
|
+
| "convai" // Character's response
|
|
752
|
+
| "user-transcription" // Real-time speech-to-text from user
|
|
753
|
+
| "bot-llm-text" // Character's LLM-generated text
|
|
754
|
+
| "emotion" // Character's emotional state
|
|
755
|
+
| "behavior-tree" // Behavior tree response
|
|
756
|
+
| "action" // Action execution
|
|
757
|
+
| "bot-emotion" // Bot emotional response
|
|
758
|
+
| "user-llm-text" // User text processed by LLM
|
|
759
|
+
| "interrupt-bot"; // Interrupt message
|
|
760
|
+
```
|
|
761
|
+
|
|
762
|
+
### State Monitoring
|
|
763
|
+
|
|
764
|
+
**Agent State:**
|
|
765
|
+
|
|
766
|
+
```typescript
|
|
767
|
+
// React
|
|
768
|
+
const { state } = convaiClient;
|
|
769
|
+
|
|
770
|
+
// Check specific states
|
|
771
|
+
if (state.isListening) {
|
|
772
|
+
console.log("Bot is listening");
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
if (state.isThinking) {
|
|
776
|
+
console.log("Bot is thinking");
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
if (state.isSpeaking) {
|
|
780
|
+
console.log("Bot is speaking");
|
|
781
|
+
}
|
|
782
|
+
|
|
783
|
+
// Combined state
|
|
784
|
+
console.log(state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
|
|
785
|
+
```
|
|
786
|
+
|
|
787
|
+
**User Transcription:**
|
|
788
|
+
|
|
789
|
+
```typescript
|
|
790
|
+
// React
|
|
791
|
+
const { userTranscription } = convaiClient;
|
|
792
|
+
|
|
793
|
+
// Core API (event-based)
|
|
794
|
+
convaiClient.on("userTranscriptionChange", (transcription: string) => {
|
|
795
|
+
console.log("User is saying:", transcription);
|
|
796
|
+
});
|
|
797
|
+
```
|
|
798
|
+
|
|
799
|
+
**Bot Ready State:**
|
|
800
|
+
|
|
801
|
+
```typescript
|
|
802
|
+
// React
|
|
803
|
+
const { isBotReady } = convaiClient;
|
|
804
|
+
|
|
805
|
+
// Core API (event-based)
|
|
806
|
+
convaiClient.on("botReady", () => {
|
|
807
|
+
console.log("Bot is ready to receive messages");
|
|
808
|
+
});
|
|
809
|
+
```
|
|
810
|
+
|
|
811
|
+
## Getting Convai Credentials
|
|
812
|
+
|
|
813
|
+
1. Visit [convai.com](https://convai.com) and create an account
|
|
814
|
+
2. Navigate to your dashboard
|
|
815
|
+
3. Create a new character or use an existing one
|
|
816
|
+
4. Copy your **API Key** from the dashboard
|
|
817
|
+
5. Copy your **Character ID** from the character details
|
|
818
|
+
|
|
819
|
+
## Import Paths
|
|
820
|
+
|
|
821
|
+
```typescript
|
|
822
|
+
// Default: React version (backward compatible)
|
|
823
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
|
|
824
|
+
|
|
825
|
+
// Explicit React import
|
|
826
|
+
import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
|
|
827
|
+
|
|
828
|
+
// Vanilla JS/TS
|
|
829
|
+
import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
|
|
830
|
+
|
|
831
|
+
// Core only (no UI, framework agnostic)
|
|
832
|
+
import { ConvaiClient } from "@convai/web-sdk/core";
|
|
833
|
+
```
|
|
834
|
+
|
|
835
|
+
## TypeScript Support
|
|
836
|
+
|
|
837
|
+
All exports are fully typed:
|
|
330
838
|
|
|
331
839
|
```typescript
|
|
332
840
|
import type {
|
|
@@ -337,37 +845,36 @@ import type {
|
|
|
337
845
|
AudioControls,
|
|
338
846
|
VideoControls,
|
|
339
847
|
ScreenShareControls,
|
|
340
|
-
|
|
848
|
+
IConvaiClient,
|
|
849
|
+
} from "@convai/web-sdk";
|
|
341
850
|
```
|
|
342
851
|
|
|
343
|
-
##
|
|
852
|
+
## Browser Support
|
|
344
853
|
|
|
345
|
-
|
|
346
|
-
-
|
|
854
|
+
- Chrome/Edge: Full support
|
|
855
|
+
- Firefox: Full support
|
|
856
|
+
- Safari: Full support
|
|
857
|
+
- Mobile browsers: Supported
|
|
858
|
+
|
|
859
|
+
## Dependencies
|
|
860
|
+
|
|
861
|
+
### Peer Dependencies (React)
|
|
347
862
|
|
|
348
|
-
### React (Peer)
|
|
349
863
|
- `react ^18.0.0`
|
|
350
864
|
- `react-dom ^18.0.0`
|
|
351
|
-
- `@livekit/components-react`
|
|
352
|
-
- `styled-components`
|
|
353
|
-
- `framer-motion`
|
|
354
|
-
- `react-icons`
|
|
355
865
|
|
|
356
|
-
|
|
866
|
+
### Included Dependencies
|
|
867
|
+
|
|
868
|
+
- `styled-components` - Styling
|
|
869
|
+
- `framer-motion` - Animations
|
|
870
|
+
- `react-icons` - Icon components
|
|
871
|
+
|
|
872
|
+
## License
|
|
357
873
|
|
|
358
874
|
MIT
|
|
359
875
|
|
|
360
|
-
##
|
|
876
|
+
## Support
|
|
361
877
|
|
|
362
878
|
- [GitHub Issues](https://github.com/convai/web-sdk/issues)
|
|
363
|
-
- [Documentation](./docs/)
|
|
364
879
|
- [API Reference](./API_REFERENCE.md)
|
|
365
880
|
- [Convai Website](https://convai.com)
|
|
366
|
-
|
|
367
|
-
## 🤝 Contributing
|
|
368
|
-
|
|
369
|
-
Contributions are welcome! Please read our contributing guidelines before submitting PRs.
|
|
370
|
-
|
|
371
|
-
---
|
|
372
|
-
|
|
373
|
-
Built by [Convai](https://convai.com)
|