@convai/web-sdk 0.3.1-beta.3 → 0.3.2-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (255) hide show
  1. package/README.md +535 -1077
  2. package/dist/core/AudioManager.d.ts.map +1 -0
  3. package/dist/core/AudioManager.js +262 -0
  4. package/dist/core/AudioManager.js.map +1 -0
  5. package/dist/core/BlendshapeQueue.d.ts +128 -0
  6. package/dist/core/BlendshapeQueue.d.ts.map +1 -0
  7. package/dist/core/BlendshapeQueue.js +229 -0
  8. package/dist/core/BlendshapeQueue.js.map +1 -0
  9. package/dist/{types/core → core}/ConvaiClient.d.ts +19 -15
  10. package/dist/core/ConvaiClient.d.ts.map +1 -0
  11. package/dist/core/ConvaiClient.js +623 -0
  12. package/dist/core/ConvaiClient.js.map +1 -0
  13. package/dist/core/EventEmitter.d.ts.map +1 -0
  14. package/dist/core/EventEmitter.js +68 -0
  15. package/dist/core/EventEmitter.js.map +1 -0
  16. package/dist/{types/core → core}/MessageHandler.d.ts +7 -0
  17. package/dist/core/MessageHandler.d.ts.map +1 -0
  18. package/dist/core/MessageHandler.js +333 -0
  19. package/dist/core/MessageHandler.js.map +1 -0
  20. package/dist/core/ScreenShareManager.d.ts.map +1 -0
  21. package/dist/core/ScreenShareManager.js +207 -0
  22. package/dist/core/ScreenShareManager.js.map +1 -0
  23. package/dist/core/VideoManager.d.ts.map +1 -0
  24. package/dist/core/VideoManager.js +205 -0
  25. package/dist/core/VideoManager.js.map +1 -0
  26. package/dist/{types/core → core}/index.d.ts +2 -0
  27. package/dist/core/index.d.ts.map +1 -0
  28. package/dist/core/index.js +14 -1970
  29. package/dist/core/index.js.map +1 -0
  30. package/dist/{types/core → core}/types.d.ts +12 -21
  31. package/dist/core/types.d.ts.map +1 -0
  32. package/dist/core/types.js +2 -0
  33. package/dist/core/types.js.map +1 -0
  34. package/dist/dev.d.ts +12 -0
  35. package/dist/dev.d.ts.map +1 -0
  36. package/dist/dev.js +12 -0
  37. package/dist/dev.js.map +1 -0
  38. package/dist/index.d.ts +4 -0
  39. package/dist/index.d.ts.map +1 -0
  40. package/dist/index.js +6 -0
  41. package/dist/index.js.map +1 -0
  42. package/dist/lipsync-helpers/arkitBlendshapeHelpers.d.ts.map +1 -0
  43. package/dist/lipsync-helpers/arkitBlendshapeHelpers.js +201 -0
  44. package/dist/lipsync-helpers/arkitBlendshapeHelpers.js.map +1 -0
  45. package/dist/lipsync-helpers/arkitOrder61.d.ts.map +1 -0
  46. package/dist/lipsync-helpers/arkitOrder61.js +287 -0
  47. package/dist/lipsync-helpers/arkitOrder61.js.map +1 -0
  48. package/dist/lipsync-helpers/arkitPhonemeReference.d.ts.map +1 -0
  49. package/dist/lipsync-helpers/arkitPhonemeReference.js +362 -0
  50. package/dist/lipsync-helpers/arkitPhonemeReference.js.map +1 -0
  51. package/dist/{types/lipsync-helpers → lipsync-helpers}/index.d.ts +1 -0
  52. package/dist/lipsync-helpers/index.d.ts.map +1 -0
  53. package/dist/lipsync-helpers/index.js +20 -1165
  54. package/dist/lipsync-helpers/index.js.map +1 -0
  55. package/dist/lipsync-helpers/metahumanOrder251.d.ts +115 -0
  56. package/dist/lipsync-helpers/metahumanOrder251.d.ts.map +1 -0
  57. package/dist/lipsync-helpers/metahumanOrder251.js +432 -0
  58. package/dist/lipsync-helpers/metahumanOrder251.js.map +1 -0
  59. package/dist/lipsync-helpers/neurosyncBlendshapeMapper.d.ts.map +1 -0
  60. package/dist/lipsync-helpers/neurosyncBlendshapeMapper.js +315 -0
  61. package/dist/lipsync-helpers/neurosyncBlendshapeMapper.js.map +1 -0
  62. package/dist/react/components/ConvaiWidget.d.ts +68 -0
  63. package/dist/react/components/ConvaiWidget.d.ts.map +1 -0
  64. package/dist/react/components/ConvaiWidget.js +505 -0
  65. package/dist/react/components/ConvaiWidget.js.map +1 -0
  66. package/dist/react/components/index.d.ts +2 -0
  67. package/dist/react/components/index.d.ts.map +1 -0
  68. package/dist/react/components/index.js +3 -0
  69. package/dist/react/components/index.js.map +1 -0
  70. package/dist/react/components/rtc-widget/components/AudioSettingsPanel.d.ts +10 -0
  71. package/dist/react/components/rtc-widget/components/AudioSettingsPanel.d.ts.map +1 -0
  72. package/dist/react/components/rtc-widget/components/AudioSettingsPanel.js +316 -0
  73. package/dist/react/components/rtc-widget/components/AudioSettingsPanel.js.map +1 -0
  74. package/dist/react/components/rtc-widget/components/AudioVisualizer.d.ts +36 -0
  75. package/dist/react/components/rtc-widget/components/AudioVisualizer.d.ts.map +1 -0
  76. package/dist/react/components/rtc-widget/components/AudioVisualizer.js +259 -0
  77. package/dist/react/components/rtc-widget/components/AudioVisualizer.js.map +1 -0
  78. package/dist/react/components/rtc-widget/components/ConviMessage.d.ts +10 -0
  79. package/dist/react/components/rtc-widget/components/ConviMessage.d.ts.map +1 -0
  80. package/dist/react/components/rtc-widget/components/ConviMessage.js +14 -0
  81. package/dist/react/components/rtc-widget/components/ConviMessage.js.map +1 -0
  82. package/dist/react/components/rtc-widget/components/FloatingVideo.d.ts +9 -0
  83. package/dist/react/components/rtc-widget/components/FloatingVideo.d.ts.map +1 -0
  84. package/dist/react/components/rtc-widget/components/FloatingVideo.js +122 -0
  85. package/dist/react/components/rtc-widget/components/FloatingVideo.js.map +1 -0
  86. package/dist/react/components/rtc-widget/components/MarkdownRenderer.d.ts +7 -0
  87. package/dist/react/components/rtc-widget/components/MarkdownRenderer.d.ts.map +1 -0
  88. package/dist/react/components/rtc-widget/components/MarkdownRenderer.js +68 -0
  89. package/dist/react/components/rtc-widget/components/MarkdownRenderer.js.map +1 -0
  90. package/dist/react/components/rtc-widget/components/MessageBubble.d.ts +10 -0
  91. package/dist/react/components/rtc-widget/components/MessageBubble.d.ts.map +1 -0
  92. package/dist/react/components/rtc-widget/components/MessageBubble.js +23 -0
  93. package/dist/react/components/rtc-widget/components/MessageBubble.js.map +1 -0
  94. package/dist/react/components/rtc-widget/components/MessageList.d.ts +11 -0
  95. package/dist/react/components/rtc-widget/components/MessageList.d.ts.map +1 -0
  96. package/dist/react/components/rtc-widget/components/MessageList.js +89 -0
  97. package/dist/react/components/rtc-widget/components/MessageList.js.map +1 -0
  98. package/dist/react/components/rtc-widget/components/UserMessage.d.ts +9 -0
  99. package/dist/react/components/rtc-widget/components/UserMessage.d.ts.map +1 -0
  100. package/dist/react/components/rtc-widget/components/UserMessage.js +15 -0
  101. package/dist/react/components/rtc-widget/components/UserMessage.js.map +1 -0
  102. package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.d.ts +6 -0
  103. package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.d.ts.map +1 -0
  104. package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.js +15 -0
  105. package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.js.map +1 -0
  106. package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.d.ts +25 -0
  107. package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.d.ts.map +1 -0
  108. package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.js +172 -0
  109. package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.js.map +1 -0
  110. package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.d.ts +17 -0
  111. package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.d.ts.map +1 -0
  112. package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.js +66 -0
  113. package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.js.map +1 -0
  114. package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.d.ts +12 -0
  115. package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.d.ts.map +1 -0
  116. package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.js +68 -0
  117. package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.js.map +1 -0
  118. package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.d.ts +12 -0
  119. package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.d.ts.map +1 -0
  120. package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.js +255 -0
  121. package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.js.map +1 -0
  122. package/dist/react/components/rtc-widget/components/conviComponents/index.d.ts +6 -0
  123. package/dist/react/components/rtc-widget/components/conviComponents/index.d.ts.map +1 -0
  124. package/dist/react/components/rtc-widget/components/conviComponents/index.js +6 -0
  125. package/dist/react/components/rtc-widget/components/conviComponents/index.js.map +1 -0
  126. package/dist/react/components/rtc-widget/components/index.d.ts +9 -0
  127. package/dist/react/components/rtc-widget/components/index.d.ts.map +1 -0
  128. package/dist/react/components/rtc-widget/components/index.js +15 -0
  129. package/dist/react/components/rtc-widget/components/index.js.map +1 -0
  130. package/dist/react/components/rtc-widget/index.d.ts +6 -0
  131. package/dist/react/components/rtc-widget/index.d.ts.map +1 -0
  132. package/dist/react/components/rtc-widget/index.js +9 -0
  133. package/dist/react/components/rtc-widget/index.js.map +1 -0
  134. package/dist/react/components/rtc-widget/styles/framerConfig.d.ts +116 -0
  135. package/dist/react/components/rtc-widget/styles/framerConfig.d.ts.map +1 -0
  136. package/dist/react/components/rtc-widget/styles/framerConfig.js +73 -0
  137. package/dist/react/components/rtc-widget/styles/framerConfig.js.map +1 -0
  138. package/dist/react/components/rtc-widget/styles/icons.d.ts +28 -0
  139. package/dist/react/components/rtc-widget/styles/icons.d.ts.map +1 -0
  140. package/dist/react/components/rtc-widget/styles/icons.js +257 -0
  141. package/dist/react/components/rtc-widget/styles/icons.js.map +1 -0
  142. package/dist/react/components/rtc-widget/styles/index.d.ts +6 -0
  143. package/dist/react/components/rtc-widget/styles/index.d.ts.map +1 -0
  144. package/dist/react/components/rtc-widget/styles/index.js +9 -0
  145. package/dist/react/components/rtc-widget/styles/index.js.map +1 -0
  146. package/dist/react/components/rtc-widget/styles/styledComponents.d.ts +90 -0
  147. package/dist/react/components/rtc-widget/styles/styledComponents.d.ts.map +1 -0
  148. package/dist/react/components/rtc-widget/styles/styledComponents.js +663 -0
  149. package/dist/react/components/rtc-widget/styles/styledComponents.js.map +1 -0
  150. package/dist/react/components/rtc-widget/styles/theme.d.ts +188 -0
  151. package/dist/react/components/rtc-widget/styles/theme.d.ts.map +1 -0
  152. package/dist/react/components/rtc-widget/styles/theme.js +290 -0
  153. package/dist/react/components/rtc-widget/styles/theme.js.map +1 -0
  154. package/dist/react/components/rtc-widget/types/index.d.ts +60 -0
  155. package/dist/react/components/rtc-widget/types/index.d.ts.map +1 -0
  156. package/dist/react/components/rtc-widget/types/index.js +2 -0
  157. package/dist/react/components/rtc-widget/types/index.js.map +1 -0
  158. package/dist/react/hooks/index.d.ts +4 -0
  159. package/dist/react/hooks/index.d.ts.map +1 -0
  160. package/dist/react/hooks/index.js +6 -0
  161. package/dist/react/hooks/index.js.map +1 -0
  162. package/dist/react/hooks/useCharacterInfo.d.ts +17 -0
  163. package/dist/react/hooks/useCharacterInfo.d.ts.map +1 -0
  164. package/dist/react/hooks/useCharacterInfo.js +60 -0
  165. package/dist/react/hooks/useCharacterInfo.js.map +1 -0
  166. package/dist/react/hooks/useConvaiClient.d.ts +35 -0
  167. package/dist/react/hooks/useConvaiClient.d.ts.map +1 -0
  168. package/dist/react/hooks/useConvaiClient.js +183 -0
  169. package/dist/react/hooks/useConvaiClient.js.map +1 -0
  170. package/dist/react/hooks/useLocalCameraTrack.d.ts +22 -0
  171. package/dist/react/hooks/useLocalCameraTrack.d.ts.map +1 -0
  172. package/dist/react/hooks/useLocalCameraTrack.js +34 -0
  173. package/dist/react/hooks/useLocalCameraTrack.js.map +1 -0
  174. package/dist/react/index.d.ts +7 -0
  175. package/dist/react/index.d.ts.map +1 -0
  176. package/dist/react/index.js +13 -0
  177. package/dist/react/index.js.map +1 -0
  178. package/dist/types/index.d.ts.map +1 -0
  179. package/dist/types/index.js +2 -0
  180. package/dist/types/index.js.map +1 -0
  181. package/dist/utils/LatencyMonitor.d.ts.map +1 -0
  182. package/dist/utils/LatencyMonitor.js +136 -0
  183. package/dist/utils/LatencyMonitor.js.map +1 -0
  184. package/dist/utils/logger.d.ts.map +1 -0
  185. package/dist/utils/logger.js +96 -0
  186. package/dist/utils/logger.js.map +1 -0
  187. package/dist/utils/speakerManagement.d.ts.map +1 -0
  188. package/dist/utils/speakerManagement.js +64 -0
  189. package/dist/utils/speakerManagement.js.map +1 -0
  190. package/dist/{types/vanilla → vanilla}/AudioRenderer.d.ts +5 -0
  191. package/dist/vanilla/AudioRenderer.d.ts.map +1 -0
  192. package/dist/vanilla/AudioRenderer.js +135 -0
  193. package/dist/vanilla/AudioRenderer.js.map +1 -0
  194. package/dist/vanilla/ConvaiWidget.d.ts.map +1 -0
  195. package/dist/vanilla/ConvaiWidget.js +1786 -0
  196. package/dist/vanilla/ConvaiWidget.js.map +1 -0
  197. package/dist/vanilla/icons.d.ts.map +1 -0
  198. package/dist/vanilla/icons.js +222 -0
  199. package/dist/vanilla/icons.js.map +1 -0
  200. package/dist/{types/vanilla → vanilla}/index.d.ts +1 -3
  201. package/dist/vanilla/index.d.ts.map +1 -0
  202. package/dist/vanilla/index.js +20 -5509
  203. package/dist/vanilla/index.js.map +1 -0
  204. package/dist/vanilla/styles.d.ts.map +1 -0
  205. package/dist/vanilla/styles.js +287 -0
  206. package/dist/vanilla/styles.js.map +1 -0
  207. package/dist/vanilla/types.d.ts +43 -0
  208. package/dist/vanilla/types.d.ts.map +1 -0
  209. package/dist/vanilla/types.js +2 -0
  210. package/dist/vanilla/types.js.map +1 -0
  211. package/package.json +33 -38
  212. package/CHANGELOG.md +0 -165
  213. package/dist/core/index.cjs +0 -1977
  214. package/dist/lipsync-helpers/index.cjs +0 -1195
  215. package/dist/types/core/AudioManager.d.ts.map +0 -1
  216. package/dist/types/core/ConvaiClient.d.ts.map +0 -1
  217. package/dist/types/core/EventEmitter.d.ts.map +0 -1
  218. package/dist/types/core/MessageHandler.d.ts.map +0 -1
  219. package/dist/types/core/ScreenShareManager.d.ts.map +0 -1
  220. package/dist/types/core/VideoManager.d.ts.map +0 -1
  221. package/dist/types/core/index.d.ts.map +0 -1
  222. package/dist/types/core/types.d.ts.map +0 -1
  223. package/dist/types/lipsync-helpers/arkitBlendshapeHelpers.d.ts.map +0 -1
  224. package/dist/types/lipsync-helpers/arkitOrder61.d.ts.map +0 -1
  225. package/dist/types/lipsync-helpers/arkitPhonemeReference.d.ts.map +0 -1
  226. package/dist/types/lipsync-helpers/index.d.ts.map +0 -1
  227. package/dist/types/lipsync-helpers/neurosyncBlendshapeMapper.d.ts.map +0 -1
  228. package/dist/types/types/index.d.ts.map +0 -1
  229. package/dist/types/utils/LatencyMonitor.d.ts.map +0 -1
  230. package/dist/types/utils/logger.d.ts.map +0 -1
  231. package/dist/types/utils/speakerManagement.d.ts.map +0 -1
  232. package/dist/types/vanilla/AudioRenderer.d.ts.map +0 -1
  233. package/dist/types/vanilla/ConvaiWidget.d.ts.map +0 -1
  234. package/dist/types/vanilla/icons.d.ts.map +0 -1
  235. package/dist/types/vanilla/index.d.ts.map +0 -1
  236. package/dist/types/vanilla/styles.d.ts.map +0 -1
  237. package/dist/types/vanilla/types.d.ts +0 -106
  238. package/dist/types/vanilla/types.d.ts.map +0 -1
  239. package/dist/umd/convai.umd.js +0 -1
  240. package/dist/vanilla/index.cjs +0 -5559
  241. /package/dist/{types/core → core}/AudioManager.d.ts +0 -0
  242. /package/dist/{types/core → core}/EventEmitter.d.ts +0 -0
  243. /package/dist/{types/core → core}/ScreenShareManager.d.ts +0 -0
  244. /package/dist/{types/core → core}/VideoManager.d.ts +0 -0
  245. /package/dist/{types/lipsync-helpers → lipsync-helpers}/arkitBlendshapeHelpers.d.ts +0 -0
  246. /package/dist/{types/lipsync-helpers → lipsync-helpers}/arkitOrder61.d.ts +0 -0
  247. /package/dist/{types/lipsync-helpers → lipsync-helpers}/arkitPhonemeReference.d.ts +0 -0
  248. /package/dist/{types/lipsync-helpers → lipsync-helpers}/neurosyncBlendshapeMapper.d.ts +0 -0
  249. /package/dist/types/{types/index.d.ts → index.d.ts} +0 -0
  250. /package/dist/{types/utils → utils}/LatencyMonitor.d.ts +0 -0
  251. /package/dist/{types/utils → utils}/logger.d.ts +0 -0
  252. /package/dist/{types/utils → utils}/speakerManagement.d.ts +0 -0
  253. /package/dist/{types/vanilla → vanilla}/ConvaiWidget.d.ts +0 -0
  254. /package/dist/{types/vanilla → vanilla}/icons.d.ts +0 -0
  255. /package/dist/{types/vanilla → vanilla}/styles.d.ts +0 -0
package/README.md CHANGED
@@ -1,47 +1,6 @@
1
1
  # @convai/web-sdk
2
2
 
3
- [![npm version](https://badge.fury.io/js/%40convai%2Fweb-sdk.svg)](https://www.npmjs.com/package/@convai/web-sdk)
4
-
5
- JavaScript/TypeScript SDK for building AI voice assistants with real-time audio/video streaming. Drop-in widgets for **React** and **Vanilla JavaScript/TypeScript** with customizable UI components.
6
-
7
- ---
8
-
9
- ## 📑 Table of Contents
10
-
11
- - [Installation](#installation)
12
- - [Quick Start](#quick-start)
13
- - [React - ConvaiWidget](#react---convaiwidget)
14
- - [Vanilla JS/TS - ConvaiWidget](#vanilla-jsts---convaiwidget)
15
- - [Core Concepts](#core-concepts)
16
- - [React SDK](#react-sdk)
17
- - [useConvaiClient Hook](#useconvaiclient-hook)
18
- - [AudioRenderer Component](#audiorenderer-component)
19
- - [AudioContext](#audiocontext)
20
- - [React Exports Reference](#react-exports-reference)
21
- - [Vanilla SDK](#vanilla-sdk)
22
- - [ConvaiClient Class](#convaiclient-class)
23
- - [AudioRenderer Class](#audiorenderer-class)
24
- - [Vanilla Exports Reference](#vanilla-exports-reference)
25
- - [Video & Screen Share](#video--screen-share)
26
- - [Critical Requirements](#critical-requirements)
27
- - [Enabling Video](#enabling-video)
28
- - [Enabling Screen Share](#enabling-screen-share)
29
- - [Building Custom UIs](#building-custom-uis)
30
- - [Custom Chat Interface](#custom-chat-interface)
31
- - [Audio Visualizer](#audio-visualizer)
32
- - [Message Types](#message-types)
33
- - [API Reference](#api-reference)
34
- - [Configuration](#configuration)
35
- - [Connection Management](#connection-management)
36
- - [Messaging](#messaging)
37
- - [Audio Controls](#audio-controls)
38
- - [Video Controls](#video-controls)
39
- - [Screen Share Controls](#screen-share-controls)
40
- - [Getting Credentials](#getting-credentials)
41
- - [TypeScript Support](#typescript-support)
42
- - [Support](#support)
43
-
44
- ---
3
+ JavaScript/TypeScript SDK for Convai AI voice assistants. Build voice-powered AI interactions for web applications with real-time audio/video streaming. Supports both React and Vanilla JavaScript/TypeScript.
45
4
 
46
5
  ## Installation
47
6
 
@@ -49,25 +8,14 @@ JavaScript/TypeScript SDK for building AI voice assistants with real-time audio/
49
8
  npm install @convai/web-sdk
50
9
  ```
51
10
 
52
- **Peer Dependencies (React only):**
11
+ ## Basic Setup
53
12
 
54
- ```bash
55
- npm install react@^18.0.0 react-dom@^18.0.0
56
- ```
57
-
58
- ---
59
-
60
- ## Quick Start
61
-
62
- ### React - ConvaiWidget
63
-
64
- The `ConvaiWidget` is a complete, pre-built chat interface with voice/video capabilities.
13
+ ### React
65
14
 
66
15
  ```tsx
67
- import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
16
+ import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
68
17
 
69
18
  function App() {
70
- // Initialize the Convai client
71
19
  const convaiClient = useConvaiClient({
72
20
  apiKey: "your-api-key",
73
21
  characterId: "your-character-id",
@@ -77,169 +25,7 @@ function App() {
77
25
  }
78
26
  ```
79
27
 
80
- **That's it!** The widget auto-connects on first user interaction and handles all UI/audio for you.
81
-
82
- ---
83
-
84
- ## 🤖 For AI Code Generators (v0, Lovable, Bolt, etc.)
85
-
86
- **If you're using an AI coding assistant to add Convai to your project, use this exact template to avoid errors:**
87
-
88
- ### Copy-Paste Template (Works Every Time)
89
-
90
- ```tsx
91
- import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
92
-
93
- export default function App() {
94
- // Step 1: Create the client with your credentials
95
- const convaiClient = useConvaiClient({
96
- apiKey: "your-api-key-here",
97
- characterId: "your-character-id-here",
98
- });
99
-
100
- // Step 2: Pass ONLY the client to the widget
101
- return <ConvaiWidget convaiClient={convaiClient} />;
102
- }
103
- ```
104
-
105
- ### Common Mistakes AI Tools Make
106
-
107
- ❌ **DON'T DO THIS:**
108
-
109
- ```tsx
110
- // Wrong: Passing props directly to ConvaiWidget
111
- <ConvaiWidget apiKey="..." characterId="..." />
112
-
113
- // Wrong: Stringifying the client
114
- <ConvaiWidget convaiClient={JSON.stringify(convaiClient)} />
115
-
116
- // Wrong: Spreading client properties
117
- <ConvaiWidget {...convaiClient} />
118
-
119
- // Wrong: Using client in string context
120
- const info = `Client: ${convaiClient}`; // "Cannot convert object to primitive value"
121
-
122
- // Wrong: Passing client through env vars
123
- const client = process.env.CONVAI_CLIENT; // This won't work
124
- ```
125
-
126
- ✅ **DO THIS:**
127
-
128
- ```tsx
129
- // Correct: Client created in component, passed as object
130
- const convaiClient = useConvaiClient({
131
- apiKey: "your-api-key",
132
- characterId: "your-character-id",
133
- });
134
-
135
- return <ConvaiWidget convaiClient={convaiClient} />;
136
- ```
137
-
138
- ### If You Get "Cannot convert object to primitive value"
139
-
140
- This error means you're using the client object in a primitive context. Check for:
141
-
142
- 1. **String concatenation:**
143
-
144
- ```tsx
145
- // ❌ Wrong
146
- console.log("Client: " + convaiClient);
147
-
148
- // ✅ Correct
149
- console.log("Connected:", convaiClient.state.isConnected);
150
- ```
151
-
152
- 2. **Template literals:**
153
-
154
- ```tsx
155
- // ❌ Wrong
156
- const text = `Client: ${convaiClient}`;
157
-
158
- // ✅ Correct
159
- const text = `Status: ${convaiClient.state.agentState}`;
160
- ```
161
-
162
- 3. **Using as a key:**
163
-
164
- ```tsx
165
- // ❌ Wrong
166
- <div key={convaiClient}>...</div>
167
-
168
- // ✅ Correct
169
- <div key="convai-widget">...</div>
170
- ```
171
-
172
- ### Complete Working Example for AI Tools
173
-
174
- ```tsx
175
- import React from "react";
176
- import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
177
-
178
- function App() {
179
- // Initialize the client
180
- const convaiClient = useConvaiClient({
181
- apiKey: "your-api-key-here",
182
- characterId: "your-character-id-here",
183
- enableVideo: false, // Set to true if you need video
184
- });
185
-
186
- // Optional: Add debug logging
187
- console.log("Convai client initialized:", !!convaiClient);
188
- console.log("Connection status:", convaiClient?.state?.isConnected);
189
-
190
- return (
191
- <div style={{ width: "100vw", height: "100vh" }}>
192
- <ConvaiWidget convaiClient={convaiClient} />
193
- </div>
194
- );
195
- }
196
-
197
- export default App;
198
- ```
199
-
200
- ### Prompt for AI Code Generators
201
-
202
- **Use this prompt to get correct code:**
203
-
204
- ```
205
- Add Convai voice chat to this app using @convai/web-sdk.
206
-
207
- CRITICAL RULES:
208
- 1. Import: import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
209
- 2. Create client INSIDE component: const convaiClient = useConvaiClient({ apiKey: "...", characterId: "..." });
210
- 3. Pass ONLY client to widget: <ConvaiWidget convaiClient={convaiClient} />
211
- 4. DO NOT pass apiKey or characterId directly to ConvaiWidget
212
- 5. DO NOT stringify, spread, or destructure the client object
213
- 6. DO NOT use the client object in string contexts
214
-
215
- Example:
216
- const convaiClient = useConvaiClient({ apiKey: "KEY", characterId: "ID" });
217
- return <ConvaiWidget convaiClient={convaiClient} />;
218
- ```
219
-
220
- ### Video & Screen Share with AI Tools
221
-
222
- If you need video/screen share, add these TWO changes:
223
-
224
- ```tsx
225
- // Step 1: Add enableVideo to client config
226
- const convaiClient = useConvaiClient({
227
- apiKey: "your-api-key",
228
- characterId: "your-character-id",
229
- enableVideo: true, // ← Required for video features
230
- });
231
-
232
- // Step 2: Show controls in widget
233
- <ConvaiWidget
234
- convaiClient={convaiClient}
235
- showVideo={true} // ← Shows video button
236
- showScreenShare={true} // ← Shows screen share button
237
- />;
238
- ```
239
-
240
- **Without `enableVideo: true`, video and screen share will NOT work even if you show the buttons.**
241
-
242
- ### Vanilla JS/TS - ConvaiWidget
28
+ ### Vanilla TypeScript
243
29
 
244
30
  ```typescript
245
31
  import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
@@ -250,7 +36,7 @@ const client = new ConvaiClient({
250
36
  characterId: "your-character-id",
251
37
  });
252
38
 
253
- // Create and mount widget - auto-connects on first user click
39
+ // Create widget - auto-connects on first user click
254
40
  const widget = createConvaiWidget(document.body, {
255
41
  convaiClient: client,
256
42
  });
@@ -259,1068 +45,770 @@ const widget = createConvaiWidget(document.body, {
259
45
  widget.destroy();
260
46
  ```
261
47
 
262
- ---
48
+ ## Exports
263
49
 
264
- ## Core Concepts
50
+ ### React Exports (`@convai/web-sdk` or `@convai/web-sdk/react`)
265
51
 
266
- ### The Architecture
52
+ **Components:**
267
53
 
268
- ```
269
- ┌─────────────────────────────────────────────────┐
270
- │ ConvaiWidget (UI Layer) │
271
- │ ├─ Chat Interface │
272
- │ ├─ Voice Mode │
273
- │ └─ Video/Screen Share UI │
274
- └─────────────────────────────────────────────────┘
275
-
276
- ┌─────────────────────────────────────────────────┐
277
- │ ConvaiClient (Core Logic) │
278
- │ ├─ Connection Management │
279
- │ ├─ Message Handling │
280
- │ ├─ State Management │
281
- │ └─ Audio/Video Controls │
282
- └─────────────────────────────────────────────────┘
283
-
284
- ┌─────────────────────────────────────────────────┐
285
- │ WebRTC Room (Communication Layer) │
286
- │ ├─ Real-time Audio/Video Streaming │
287
- │ ├─ Track Management │
288
- │ └─ Network Communication │
289
- └─────────────────────────────────────────────────┘
290
-
291
- ┌─────────────────────────────────────────────────┐
292
- │ AudioRenderer (Critical for Playback) │
293
- │ ├─ Attaches audio tracks to DOM │
294
- │ ├─ Manages audio elements │
295
- │ └─ Enables bot voice playback │
296
- └─────────────────────────────────────────────────┘
297
- ```
54
+ - `ConvaiWidget` - Main chat widget component
298
55
 
299
- ### Key Principles
56
+ **Hooks:**
300
57
 
301
- 1. **ConvaiClient** - The brain. Manages connection, state, and communication with Convai servers.
302
- 2. **AudioRenderer** - **CRITICAL**: Without this, you won't hear the bot. It renders audio to the user's speakers.
303
- 3. **ConvaiWidget** - The complete UI. Uses both ConvaiClient and AudioRenderer internally.
304
- 4. **Connection Type** - Determines capabilities:
305
- - `"audio"` (default) - Audio only
306
- - `"video"` - Audio + Video + Screen Share
58
+ - `useConvaiClient(config?)` - Main client hook
59
+ - `useCharacterInfo(characterId, apiKey)` - Fetch character metadata
60
+ - `useLocalCameraTrack()` - Get local camera track
307
61
 
308
- ---
62
+ **Core Client:**
309
63
 
310
- ## React SDK
64
+ - `ConvaiClient` - Core client class
311
65
 
312
- ### useConvaiClient Hook
66
+ **Types:**
313
67
 
314
- **Purpose**: Returns a fully configured `ConvaiClient` instance with reactive state updates.
315
-
316
- **When to Use**: Every React app using Convai needs this hook.
317
-
318
- **What It Does**:
319
-
320
- - Creates and manages a ConvaiClient instance
321
- - Provides reactive state (connection, messages, activity)
322
- - Handles connection lifecycle
323
- - Exposes audio/video/screen share controls
324
-
325
- ```tsx
326
- import { useConvaiClient } from "@convai/web-sdk/react";
327
-
328
- function ChatbotWrapper() {
329
- const convaiClient = useConvaiClient({
330
- apiKey: "your-api-key",
331
- characterId: "your-character-id",
332
- enableVideo: false, // Default: audio only
333
- startWithAudioOn: false, // Mic starts muted
334
- });
68
+ - `ConvaiConfig` - Configuration interface
69
+ - `ConvaiClientState` - Client state interface
70
+ - `ChatMessage` - Message interface
71
+ - `IConvaiClient` - Client interface
72
+ - `AudioControls` - Audio control interface
73
+ - `VideoControls` - Video control interface
74
+ - `ScreenShareControls` - Screen share control interface
335
75
 
336
- // Access reactive state
337
- const { state, chatMessages, userTranscription, isBotReady } = convaiClient;
76
+ **Components:**
338
77
 
339
- // Use controls
340
- const handleMute = () => convaiClient.audioControls.muteAudio();
341
- const handleSend = () =>
342
- convaiClient.sendUserTextMessage("Hello, character!");
78
+ - `AudioRenderer` - Audio playback component
79
+ - `AudioContext` - Audio context provider
343
80
 
344
- return (
345
- <div>
346
- <p>Status: {state.agentState}</p>
347
- <p>Messages: {chatMessages.length}</p>
348
- <button onClick={handleMute}>Mute</button>
349
- <button onClick={handleSend}>Send</button>
350
- </div>
351
- );
352
- }
353
- ```
81
+ ### Vanilla Exports (`@convai/web-sdk/vanilla`)
354
82
 
355
- ### AudioRenderer Component
83
+ **Functions:**
356
84
 
357
- **Purpose**: Renders remote audio tracks to the user's speakers.
85
+ - `createConvaiWidget(container, options)` - Create widget instance
86
+ - `destroyConvaiWidget(widget)` - Destroy widget instance
358
87
 
359
- **⚠️ CRITICAL**: Without `AudioRenderer`, you will NOT hear the bot's voice.
88
+ **Classes:**
360
89
 
361
- **When to Use**:
90
+ - `ConvaiClient` - Core client class
91
+ - `AudioRenderer` - Audio playback handler
362
92
 
363
- - Always when building custom UIs
364
- - Already included in `ConvaiWidget` (no need to add separately)
93
+ **Types:**
365
94
 
366
- **How It Works**:
95
+ - `VanillaWidget` - Widget instance interface
96
+ - `VanillaWidgetOptions` - Widget options interface
97
+ - `IConvaiClient` - Client interface
98
+ - `ConvaiConfig` - Configuration interface
99
+ - `ConvaiClientState` - Client state interface
100
+ - `ChatMessage` - Message interface
367
101
 
368
- - Attaches to the WebRTC room
369
- - Automatically creates `<audio>` elements for remote participants (the bot)
370
- - Manages audio playback lifecycle
102
+ ### Core Exports (`@convai/web-sdk/core`)
371
103
 
372
- ```tsx
373
- import { useConvaiClient, AudioRenderer } from "@convai/web-sdk/react";
104
+ **Classes:**
374
105
 
375
- function CustomChatUI() {
376
- const convaiClient = useConvaiClient({
377
- apiKey: "your-api-key",
378
- characterId: "your-character-id",
379
- });
106
+ - `ConvaiClient` - Main client class
107
+ - `AudioManager` - Audio management
108
+ - `VideoManager` - Video management
109
+ - `ScreenShareManager` - Screen share management
110
+ - `MessageHandler` - Message handling
111
+ - `EventEmitter` - Event emitter base class
380
112
 
381
- return (
382
- <div>
383
- {/* CRITICAL: This component renders bot audio to speakers */}
384
- <AudioRenderer />
385
-
386
- {/* Your custom UI */}
387
- <div>
388
- {convaiClient.chatMessages.map((msg) => (
389
- <div key={msg.id}>{msg.content}</div>
390
- ))}
391
- </div>
392
- </div>
393
- );
394
- }
395
- ```
113
+ **Types:**
396
114
 
397
- ### AudioContext
115
+ - All types from React/Vanilla exports
116
+ - `ConvaiClientType` - Type alias for ConvaiClient
398
117
 
399
- **Purpose**: Provides the WebRTC Room to child components.
118
+ ## Props and Configuration
400
119
 
401
- **When to Use**: When building deeply nested custom UIs that need direct access to the audio room.
402
-
403
- **How It Works**: React Context that holds the active WebRTC room.
120
+ ### ConvaiWidget Props (React)
404
121
 
405
122
  ```tsx
406
- import {
407
- useConvaiClient,
408
- AudioRenderer,
409
- AudioContext,
410
- } from "@convai/web-sdk/react";
411
- import { useContext } from "react";
412
-
413
- function ChatbotWrapper() {
414
- const convaiClient = useConvaiClient({
415
- /* config */
416
- });
417
-
418
- return (
419
- <AudioContext.Provider value={convaiClient.room}>
420
- <AudioRenderer />
421
- <ChildComponent />
422
- </AudioContext.Provider>
423
- );
424
- }
425
-
426
- function ChildComponent() {
427
- const room = useContext(AudioContext);
428
- // Access WebRTC room directly
429
- console.log("Room state:", room?.state);
430
- return <div>Child has access to Room</div>;
123
+ interface ConvaiWidgetProps {
124
+ /** Convai client instance (required) */
125
+ convaiClient: IConvaiClient & {
126
+ activity?: string;
127
+ isAudioMuted: boolean;
128
+ isVideoEnabled: boolean;
129
+ isScreenShareActive: boolean;
130
+ };
131
+ /** Show video toggle button in settings (default: true) */
132
+ showVideo?: boolean;
133
+ /** Show screen share toggle button in settings (default: true) */
134
+ showScreenShare?: boolean;
431
135
  }
432
136
  ```
433
137
 
434
- ### React Exports Reference
435
-
436
- ```tsx
437
- // Components
438
- import { ConvaiWidget } from "@convai/web-sdk/react";
439
-
440
- // Hooks
441
- import { useConvaiClient, useCharacterInfo } from "@convai/web-sdk/react";
442
-
443
- // Audio Rendering (Critical)
444
- import { AudioRenderer, AudioContext } from "@convai/web-sdk/react";
445
-
446
- // Core Client (for advanced usage)
447
- import { ConvaiClient } from "@convai/web-sdk/react";
448
-
449
- // Types
450
- import type {
451
- ConvaiConfig,
452
- ConvaiClientState,
453
- ChatMessage,
454
- IConvaiClient,
455
- AudioControls,
456
- VideoControls,
457
- ScreenShareControls,
458
- } from "@convai/web-sdk/react";
459
- ```
460
-
461
- ---
462
-
463
- ## Vanilla SDK
464
-
465
- ### ConvaiClient Class
466
-
467
- **Purpose**: Core client for managing Convai connections in vanilla JavaScript/TypeScript.
468
-
469
- **When to Use**: Any non-React application or when you need full control.
470
-
471
- **What It Provides**:
472
-
473
- - Connection management
474
- - Message handling
475
- - State management (via events)
476
- - Audio/video/screen share controls
477
-
478
- ```typescript
479
- import { ConvaiClient } from "@convai/web-sdk/vanilla";
480
-
481
- const client = new ConvaiClient({
482
- apiKey: "your-api-key",
483
- characterId: "your-character-id",
484
- });
485
-
486
- // Connect
487
- await client.connect();
488
-
489
- // Listen to events
490
- client.on("stateChange", (state) => {
491
- console.log("Agent state:", state.agentState);
492
- });
493
-
494
- client.on("message", (message) => {
495
- console.log("New message:", message.content);
496
- });
497
-
498
- // Send messages
499
- client.sendUserTextMessage("Hello!");
500
-
501
- // Control audio
502
- await client.audioControls.muteAudio();
503
- await client.audioControls.unmuteAudio();
504
-
505
- // Disconnect
506
- await client.disconnect();
507
- ```
508
-
509
- ### AudioRenderer Class
510
-
511
- **Purpose**: Manages audio playback for vanilla JavaScript/TypeScript applications.
512
-
513
- **⚠️ CRITICAL**: Without this, you will NOT hear the bot's voice.
514
-
515
- **When to Use**:
516
-
517
- - Always when building custom vanilla UIs
518
- - Already included in vanilla `ConvaiWidget` (no need to add separately)
519
-
520
- **How It Works**:
521
-
522
- - Attaches to the WebRTC room
523
- - Automatically creates hidden `<audio>` elements
524
- - Manages audio playback for remote participants (the bot)
525
-
526
- ```typescript
527
- import { ConvaiClient, AudioRenderer } from "@convai/web-sdk/vanilla";
528
-
529
- const client = new ConvaiClient({
530
- apiKey: "your-api-key",
531
- characterId: "your-character-id",
532
- });
533
-
534
- await client.connect();
535
-
536
- // CRITICAL: Create AudioRenderer to hear bot audio
537
- const audioRenderer = new AudioRenderer(client.room);
538
-
539
- // Your custom UI logic...
540
-
541
- // Cleanup
542
- audioRenderer.destroy();
543
- await client.disconnect();
544
- ```
545
-
546
- ### ConvaiClient Returns
547
-
548
- The vanilla `ConvaiClient` returns the same interface as React's `useConvaiClient`:
138
+ ### createConvaiWidget Options (Vanilla)
549
139
 
550
140
  ```typescript
551
- const client = new ConvaiClient({ apiKey: "...", characterId: "..." });
552
- await client.connect();
553
-
554
- // State & Connection
555
- client.state.isConnected; // boolean
556
- client.state.agentState; // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
557
- client.isBotReady; // boolean - ready for messages
558
-
559
- // Convenience Properties (match React)
560
- client.isAudioMuted; // boolean - mic muted
561
- client.isVideoEnabled; // boolean - camera on
562
- client.isScreenShareActive; // boolean - sharing screen
563
-
564
- // Messages
565
- client.chatMessages; // ChatMessage[]
566
- client.userTranscription; // string
567
-
568
- // Controls
569
- client.audioControls.muteAudio();
570
- client.videoControls.enableVideo();
571
- client.screenShareControls.toggleScreenShare();
141
+ interface VanillaWidgetOptions {
142
+ /** Convai client instance (required) */
143
+ convaiClient: IConvaiClient & {
144
+ activity?: string;
145
+ chatMessages: ChatMessage[];
146
+ };
147
+ /** Show video toggle button in settings (default: true) */
148
+ showVideo?: boolean;
149
+ /** Show screen share toggle button in settings (default: true) */
150
+ showScreenShare?: boolean;
151
+ }
572
152
  ```
573
153
 
574
- ### Vanilla Exports Reference
154
+ ### ConvaiConfig
575
155
 
576
156
  ```typescript
577
- // Widget
578
- import {
579
- createConvaiWidget,
580
- destroyConvaiWidget,
581
- } from "@convai/web-sdk/vanilla";
582
-
583
- // Core Client
584
- import { ConvaiClient } from "@convai/web-sdk/vanilla";
585
-
586
- // Audio Rendering (Critical)
587
- import { AudioRenderer } from "@convai/web-sdk/vanilla";
588
-
589
- // Lipsync Helpers (3D animation)
590
- import { ARKIT_BLENDSHAPE_NAMES } from "@convai/web-sdk/vanilla";
591
-
592
- // Types
593
- import type {
594
- VanillaWidget,
595
- VanillaWidgetOptions,
596
- VanillaConvaiClient,
597
- ConvaiConfig,
598
- ConvaiClientState,
599
- ChatMessage,
600
- IConvaiClient,
601
- AudioControls,
602
- VideoControls,
603
- ScreenShareControls,
604
- BlendshapeFrame,
605
- BlendshapeControls,
606
- } from "@convai/web-sdk/vanilla";
157
+ interface ConvaiConfig {
158
+ /** Your Convai API key from convai.com dashboard (required) */
159
+ apiKey: string;
160
+ /** The Character ID to connect to (required) */
161
+ characterId: string;
162
+ /**
163
+ * End user identifier for speaker management (optional).
164
+ * When provided: enables long-term memory and analytics
165
+ * When not provided: anonymous mode, no persistent memory
166
+ */
167
+ endUserId?: string;
168
+ /** Custom Convai API URL (optional, defaults to production endpoint) */
169
+ url?: string;
170
+ /**
171
+ * Enable video capability (default: false).
172
+ * If true, connection_type will be "video" (supports audio, video, and screenshare).
173
+ * If false, connection_type will be "audio" (audio only).
174
+ */
175
+ enableVideo?: boolean;
176
+ /**
177
+ * Start with video camera on when connecting (default: false).
178
+ * Only works if enableVideo is true.
179
+ */
180
+ startWithVideoOn?: boolean;
181
+ /**
182
+ * Start with microphone on when connecting (default: false).
183
+ * If false, microphone stays off until user enables it.
184
+ */
185
+ startWithAudioOn?: boolean;
186
+ /** Enable text-to-speech audio generation (default: true) */
187
+ ttsEnabled?: boolean;
188
+ }
607
189
  ```
608
190
 
609
- ---
610
-
611
- ## Video & Screen Share
612
-
613
- ### Critical Requirements
614
-
615
- > ⚠️ **IMPORTANT**: Video and Screen Share features require **TWO** configuration changes:
616
-
617
- #### 1. Set `enableVideo: true` in Client Configuration
618
-
619
- This sets the connection type to `"video"` which enables video capabilities.
620
-
621
- #### 2. Set `showVideo` and/or `showScreenShare` in Widget Props
622
-
623
- This shows the UI controls for video/screen share.
191
+ ## Features
624
192
 
625
- **Without both, video features will NOT work.**
193
+ ### Video Enabled Chat
626
194
 
627
- ---
195
+ To enable video capabilities, set `enableVideo: true` in your configuration. This enables audio, video, and screen sharing.
628
196
 
629
- ### Enabling Video
630
-
631
- #### React
197
+ **React:**
632
198
 
633
199
  ```tsx
634
- import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
200
+ import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
635
201
 
636
202
  function App() {
637
- // ✅ STEP 1: Enable video in client config
638
203
  const convaiClient = useConvaiClient({
639
204
  apiKey: "your-api-key",
640
205
  characterId: "your-character-id",
641
- enableVideo: true, // ← REQUIRED for video
206
+ enableVideo: true,
642
207
  startWithVideoOn: false, // Camera off by default
643
208
  });
644
209
 
645
210
  return (
646
211
  <ConvaiWidget
647
212
  convaiClient={convaiClient}
648
- showVideo={true} // ← STEP 2: Show video controls
649
- showScreenShare={false} // Optional: hide screen share
213
+ showVideo={true}
214
+ showScreenShare={true}
650
215
  />
651
216
  );
652
217
  }
653
218
  ```
654
219
 
655
- #### Vanilla
220
+ **Vanilla:**
656
221
 
657
222
  ```typescript
658
223
  import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
659
224
 
660
- // ✅ STEP 1: Enable video in client config
661
225
  const client = new ConvaiClient({
662
226
  apiKey: "your-api-key",
663
227
  characterId: "your-character-id",
664
- enableVideo: true, // ← REQUIRED for video
228
+ enableVideo: true,
665
229
  startWithVideoOn: false,
666
230
  });
667
231
 
668
232
  const widget = createConvaiWidget(document.body, {
669
233
  convaiClient: client,
670
- showVideo: true, // ← STEP 2: Show video controls
671
- showScreenShare: false,
234
+ showVideo: true,
235
+ showScreenShare: true,
672
236
  });
673
237
  ```
674
238
 
675
- #### Manual Video Control
239
+ **Manual Video Controls:**
676
240
 
677
241
  ```typescript
678
- // Enable camera
242
+ // Enable video camera
679
243
  await convaiClient.videoControls.enableVideo();
680
244
 
681
- // Disable camera
245
+ // Disable video camera
682
246
  await convaiClient.videoControls.disableVideo();
683
247
 
684
- // Toggle camera
248
+ // Toggle video
685
249
  await convaiClient.videoControls.toggleVideo();
686
250
 
687
- // Check state
688
- console.log(convaiClient.isVideoEnabled);
251
+ // Check video state
252
+ const isVideoEnabled = convaiClient.videoControls.isVideoEnabled;
689
253
 
690
254
  // Set video quality
691
255
  await convaiClient.videoControls.setVideoQuality("high"); // 'low' | 'medium' | 'high'
692
256
 
693
- // Get available cameras
257
+ // Get available video devices
694
258
  const devices = await convaiClient.videoControls.getVideoDevices();
695
259
 
696
- // Switch camera
260
+ // Set specific video device
697
261
  await convaiClient.videoControls.setVideoDevice(deviceId);
698
262
  ```
699
263
 
700
- ---
701
-
702
- ### Enabling Screen Share
703
-
704
- Screen sharing **requires** `enableVideo: true` (connection type must be `"video"`).
705
-
706
- #### React
707
-
708
- ```tsx
709
- import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
710
-
711
- function App() {
712
- // ✅ STEP 1: Enable video (required for screen share)
713
- const convaiClient = useConvaiClient({
714
- apiKey: "your-api-key",
715
- characterId: "your-character-id",
716
- enableVideo: true, // ← REQUIRED for screen share
717
- });
718
-
719
- return (
720
- <ConvaiWidget
721
- convaiClient={convaiClient}
722
- showVideo={true} // Optional: show video controls
723
- showScreenShare={true} // ← STEP 2: Show screen share controls
724
- />
725
- );
726
- }
727
- ```
728
-
729
- #### Vanilla
264
+ **Screen Sharing:**
730
265
 
731
266
  ```typescript
732
- import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
733
-
734
- // ✅ STEP 1: Enable video (required for screen share)
735
- const client = new ConvaiClient({
736
- apiKey: "your-api-key",
737
- characterId: "your-character-id",
738
- enableVideo: true, // ← REQUIRED for screen share
739
- });
740
-
741
- const widget = createConvaiWidget(document.body, {
742
- convaiClient: client,
743
- showVideo: true,
744
- showScreenShare: true, // ← STEP 2: Show screen share controls
745
- });
746
- ```
747
-
748
- #### Manual Screen Share Control
749
-
750
- ```typescript
751
- // Start screen share
267
+ // Enable screen share
752
268
  await convaiClient.screenShareControls.enableScreenShare();
753
269
 
754
- // Start screen share with audio
270
+ // Enable screen share with audio
755
271
  await convaiClient.screenShareControls.enableScreenShareWithAudio();
756
272
 
757
- // Stop screen share
273
+ // Disable screen share
758
274
  await convaiClient.screenShareControls.disableScreenShare();
759
275
 
760
276
  // Toggle screen share
761
277
  await convaiClient.screenShareControls.toggleScreenShare();
762
278
 
763
- // Check state
764
- console.log(convaiClient.isScreenShareActive);
765
- ```
766
-
767
- ---
768
-
769
- ## Building Custom UIs
770
-
771
- ### Custom Chat Interface
772
-
773
- Use the `chatMessages` array from ConvaiClient to build your own chat UI.
774
-
775
- #### React Example
776
-
777
- ```tsx
778
- import { useConvaiClient, AudioRenderer } from "@convai/web-sdk/react";
779
- import { useState } from "react";
780
-
781
- function CustomChatUI() {
782
- const convaiClient = useConvaiClient({
783
- apiKey: "your-api-key",
784
- characterId: "your-character-id",
785
- });
786
-
787
- const { chatMessages, state } = convaiClient;
788
- const [inputValue, setInputValue] = useState("");
789
-
790
- const handleSend = () => {
791
- if (inputValue.trim() && state.isConnected) {
792
- convaiClient.sendUserTextMessage(inputValue);
793
- setInputValue("");
794
- }
795
- };
796
-
797
- return (
798
- <div>
799
- {/* CRITICAL: AudioRenderer for bot voice */}
800
- <AudioRenderer />
801
-
802
- {/* Chat Messages */}
803
- <div className="chat-container">
804
- {chatMessages.map((msg) => {
805
- const isUser = msg.type.includes("user");
806
- const displayMessage =
807
- msg.type === "user-llm-text" || msg.type === "bot-llm-text";
808
-
809
- if (!displayMessage) return null;
810
-
811
- return (
812
- <div
813
- key={msg.id}
814
- className={isUser ? "user-message" : "bot-message"}
815
- >
816
- <span className="sender">{isUser ? "You" : "Character"}</span>
817
- <p>{msg.content}</p>
818
- <span className="timestamp">
819
- {new Date(msg.timestamp).toLocaleTimeString()}
820
- </span>
821
- </div>
822
- );
823
- })}
824
- </div>
825
-
826
- {/* Input */}
827
- <div className="input-container">
828
- <input
829
- type="text"
830
- value={inputValue}
831
- onChange={(e) => setInputValue(e.target.value)}
832
- onKeyPress={(e) => e.key === "Enter" && handleSend()}
833
- placeholder="Type a message..."
834
- disabled={!state.isConnected}
835
- />
836
- <button onClick={handleSend} disabled={!state.isConnected}>
837
- Send
838
- </button>
839
- </div>
840
-
841
- {/* Status Indicator */}
842
- <div className="status">
843
- {state.isConnecting && "Connecting..."}
844
- {state.isConnected && state.agentState}
845
- {!state.isConnected && "Disconnected"}
846
- </div>
847
- </div>
848
- );
849
- }
279
+ // Check screen share state
280
+ const isActive = convaiClient.screenShareControls.isScreenShareActive;
850
281
  ```
851
282
 
852
- #### Vanilla Example
283
+ **Video State Monitoring:**
853
284
 
854
285
  ```typescript
855
- import { ConvaiClient, AudioRenderer } from "@convai/web-sdk/vanilla";
286
+ // React
287
+ const { isVideoEnabled } = convaiClient;
856
288
 
857
- const client = new ConvaiClient({
858
- apiKey: "your-api-key",
859
- characterId: "your-character-id",
289
+ // Core API (event-based)
290
+ convaiClient.videoControls.on("videoStateChange", (state) => {
291
+ console.log("Video enabled:", state.isVideoEnabled);
292
+ console.log("Video hidden:", state.isVideoHidden);
860
293
  });
294
+ ```
861
295
 
862
- await client.connect();
296
+ ### Interruption
863
297
 
864
- // CRITICAL: Create AudioRenderer for bot voice
865
- const audioRenderer = new AudioRenderer(client.room);
298
+ Interrupt the character's current response to allow the user to speak immediately.
866
299
 
867
- const chatContainer = document.getElementById("chat-container");
868
- const inputElement = document.getElementById("message-input");
869
- const sendButton = document.getElementById("send-button");
300
+ **React:**
870
301
 
871
- // Render messages
872
- client.on("messagesChange", (messages) => {
873
- chatContainer.innerHTML = "";
302
+ ```tsx
303
+ function ChatInterface() {
304
+ const convaiClient = useConvaiClient({
305
+ /* config */
306
+ });
874
307
 
875
- messages.forEach((msg) => {
876
- const isUser = msg.type.includes("user");
877
- const displayMessage =
878
- msg.type === "user-llm-text" || msg.type === "bot-llm-text";
308
+ const handleInterrupt = () => {
309
+ // Interrupt the bot's current response
310
+ convaiClient.sendInterruptMessage();
311
+ };
879
312
 
880
- if (!displayMessage) return;
313
+ return <button onClick={handleInterrupt}>Interrupt</button>;
314
+ }
315
+ ```
881
316
 
882
- const messageDiv = document.createElement("div");
883
- messageDiv.className = isUser ? "user-message" : "bot-message";
317
+ **Vanilla:**
884
318
 
885
- const sender = document.createElement("span");
886
- sender.textContent = isUser ? "You" : "Character";
887
- sender.className = "sender";
319
+ ```typescript
320
+ const interruptButton = document.getElementById("interrupt-btn");
888
321
 
889
- const content = document.createElement("p");
890
- content.textContent = msg.content;
322
+ interruptButton.addEventListener("click", () => {
323
+ client.sendInterruptMessage();
324
+ });
325
+ ```
891
326
 
892
- const timestamp = document.createElement("span");
893
- timestamp.textContent = new Date(msg.timestamp).toLocaleTimeString();
894
- timestamp.className = "timestamp";
327
+ **Voice Mode Interruption Pattern:**
895
328
 
896
- messageDiv.appendChild(sender);
897
- messageDiv.appendChild(content);
898
- messageDiv.appendChild(timestamp);
899
- chatContainer.appendChild(messageDiv);
900
- });
329
+ When implementing voice mode, interrupt the bot when the user starts speaking:
901
330
 
902
- // Auto-scroll
903
- chatContainer.scrollTop = chatContainer.scrollHeight;
904
- });
331
+ ```typescript
332
+ // When user enters voice mode
333
+ const enterVoiceMode = async () => {
334
+ // Interrupt any ongoing bot response
335
+ convaiClient.sendInterruptMessage();
905
336
 
906
- // Send message
907
- sendButton.addEventListener("click", () => {
908
- const text = inputElement.value.trim();
909
- if (text && client.state.isConnected) {
910
- client.sendUserTextMessage(text);
911
- inputElement.value = "";
912
- }
913
- });
337
+ // Unmute microphone
338
+ await convaiClient.audioControls.unmuteAudio();
339
+ };
914
340
 
915
- inputElement.addEventListener("keypress", (e) => {
916
- if (e.key === "Enter") {
917
- sendButton.click();
918
- }
919
- });
341
+ // When user exits voice mode
342
+ const exitVoiceMode = async () => {
343
+ // Interrupt any ongoing bot response
344
+ convaiClient.sendInterruptMessage();
920
345
 
921
- // Cleanup
922
- // audioRenderer.destroy();
923
- // await client.disconnect();
346
+ // Mute microphone
347
+ await convaiClient.audioControls.muteAudio();
348
+ };
924
349
  ```
925
350
 
926
- ---
927
-
928
- ### Audio Visualizer
351
+ ### User Microphone Mute/Unmute
929
352
 
930
- Create real-time audio visualizers using the WebRTC room's audio tracks.
353
+ Control the user's microphone input.
931
354
 
932
- #### React Example
355
+ **React:**
933
356
 
934
357
  ```tsx
935
- import { useConvaiClient } from "@convai/web-sdk/react";
936
- import { useEffect, useRef, useState } from "react";
937
-
938
- function AudioVisualizer() {
358
+ function AudioControls() {
939
359
  const convaiClient = useConvaiClient({
940
- apiKey: "your-api-key",
941
- characterId: "your-character-id",
360
+ /* config */
942
361
  });
943
362
 
944
- const canvasRef = useRef<HTMLCanvasElement>(null);
945
- const [audioLevel, setAudioLevel] = useState(0);
946
-
947
- useEffect(() => {
948
- if (!convaiClient.room) return;
949
-
950
- let animationId: number;
951
- let analyzer: AnalyserNode | null = null;
952
- let dataArray: Uint8Array | null = null;
953
-
954
- const setupAnalyzer = async () => {
955
- const audioContext = new AudioContext();
956
-
957
- // Get remote participant (bot)
958
- const remoteParticipants = Array.from(
959
- convaiClient.room.remoteParticipants.values(),
960
- );
961
-
962
- if (remoteParticipants.length === 0) return;
363
+ const handleMute = async () => {
364
+ await convaiClient.audioControls.muteAudio();
365
+ };
963
366
 
964
- const participant = remoteParticipants[0];
965
- const audioTracks = Array.from(
966
- participant.audioTrackPublications.values(),
967
- );
367
+ const handleUnmute = async () => {
368
+ await convaiClient.audioControls.unmuteAudio();
369
+ };
968
370
 
969
- if (audioTracks.length === 0) return;
371
+ const handleToggle = async () => {
372
+ await convaiClient.audioControls.toggleAudio();
373
+ };
970
374
 
971
- const audioTrack = audioTracks[0].track;
972
- if (!audioTrack) return;
375
+ return (
376
+ <div>
377
+ <button onClick={handleMute}>Mute</button>
378
+ <button onClick={handleUnmute}>Unmute</button>
379
+ <button onClick={handleToggle}>Toggle</button>
380
+ <p>Muted: {convaiClient.audioControls.isAudioMuted ? "Yes" : "No"}</p>
381
+ </div>
382
+ );
383
+ }
384
+ ```
973
385
 
974
- // Get MediaStream from track
975
- const mediaStream = new MediaStream([audioTrack.mediaStreamTrack]);
386
+ **Vanilla:**
976
387
 
977
- // Create analyzer
978
- const source = audioContext.createMediaStreamSource(mediaStream);
979
- analyzer = audioContext.createAnalyser();
980
- analyzer.fftSize = 256;
388
+ ```typescript
389
+ // Mute microphone
390
+ await client.audioControls.muteAudio();
981
391
 
982
- source.connect(analyzer);
983
- dataArray = new Uint8Array(analyzer.frequencyBinCount);
392
+ // Unmute microphone
393
+ await client.audioControls.unmuteAudio();
984
394
 
985
- // Animate
986
- const animate = () => {
987
- if (!analyzer || !dataArray) return;
395
+ // Toggle mute state
396
+ await client.audioControls.toggleAudio();
988
397
 
989
- analyzer.getByteFrequencyData(dataArray);
398
+ // Check mute state
399
+ const isMuted = client.audioControls.isAudioMuted;
990
400
 
991
- // Calculate average volume
992
- const sum = dataArray.reduce((a, b) => a + b, 0);
993
- const average = sum / dataArray.length;
994
- const normalizedLevel = average / 255;
401
+ // Enable audio (request permissions if needed)
402
+ await client.audioControls.enableAudio();
995
403
 
996
- setAudioLevel(normalizedLevel);
404
+ // Disable audio
405
+ await client.audioControls.disableAudio();
406
+ ```
997
407
 
998
- // Draw visualization
999
- drawVisualizer(dataArray);
408
+ **Audio Device Management:**
1000
409
 
1001
- animationId = requestAnimationFrame(animate);
1002
- };
410
+ ```typescript
411
+ // Get available audio devices
412
+ const devices = await convaiClient.audioControls.getAudioDevices();
1003
413
 
1004
- animate();
1005
- };
414
+ // Set specific audio device
415
+ await convaiClient.audioControls.setAudioDevice(deviceId);
1006
416
 
1007
- const drawVisualizer = (dataArray: Uint8Array) => {
1008
- const canvas = canvasRef.current;
1009
- if (!canvas) return;
417
+ // Monitor audio level
418
+ convaiClient.audioControls.startAudioLevelMonitoring();
1010
419
 
1011
- const ctx = canvas.getContext("2d");
1012
- if (!ctx) return;
420
+ convaiClient.audioControls.on("audioLevelChange", (level) => {
421
+ console.log("Audio level:", level);
422
+ // level is a number between 0 and 1
423
+ });
1013
424
 
1014
- const width = canvas.width;
1015
- const height = canvas.height;
425
+ convaiClient.audioControls.stopAudioLevelMonitoring();
426
+ ```
1016
427
 
1017
- ctx.clearRect(0, 0, width, height);
428
+ **Audio State Monitoring:**
1018
429
 
1019
- const barWidth = (width / dataArray.length) * 2.5;
1020
- let x = 0;
430
+ ```typescript
431
+ // React
432
+ const { isAudioMuted } = convaiClient;
433
+
434
+ // Core API (event-based)
435
+ convaiClient.audioControls.on("audioStateChange", (state) => {
436
+ console.log("Audio enabled:", state.isAudioEnabled);
437
+ console.log("Audio muted:", state.isAudioMuted);
438
+ console.log("Audio level:", state.audioLevel);
439
+ });
440
+ ```
1021
441
 
1022
- for (let i = 0; i < dataArray.length; i++) {
1023
- const barHeight = (dataArray[i] / 255) * height;
442
+ ### Character TTS Mute/Unmute
1024
443
 
1025
- ctx.fillStyle = `rgb(${barHeight + 100}, 50, 150)`;
1026
- ctx.fillRect(x, height - barHeight, barWidth, barHeight);
444
+ Control whether the character's responses are spoken aloud (text-to-speech).
1027
445
 
1028
- x += barWidth + 1;
1029
- }
1030
- };
446
+ **React:**
1031
447
 
1032
- if (convaiClient.state.isConnected) {
1033
- setupAnalyzer();
1034
- }
448
+ ```tsx
449
+ function TTSControls() {
450
+ const convaiClient = useConvaiClient({
451
+ /* config */
452
+ });
1035
453
 
1036
- return () => {
1037
- if (animationId) cancelAnimationFrame(animationId);
1038
- };
1039
- }, [convaiClient.room, convaiClient.state.isConnected]);
454
+ const handleToggleTTS = (enabled: boolean) => {
455
+ convaiClient.toggleTts(enabled);
456
+ };
1040
457
 
1041
458
  return (
1042
459
  <div>
1043
- <canvas
1044
- ref={canvasRef}
1045
- width={800}
1046
- height={200}
1047
- style={{ border: "1px solid #ccc" }}
1048
- />
1049
- <div>Audio Level: {(audioLevel * 100).toFixed(0)}%</div>
1050
- <div>Bot is {convaiClient.state.isSpeaking ? "speaking" : "silent"}</div>
460
+ <button onClick={() => handleToggleTTS(true)}>Enable TTS</button>
461
+ <button onClick={() => handleToggleTTS(false)}>Disable TTS</button>
1051
462
  </div>
1052
463
  );
1053
464
  }
1054
465
  ```
1055
466
 
1056
- #### Vanilla Example
467
+ **Vanilla:**
1057
468
 
1058
469
  ```typescript
1059
- import { ConvaiClient, AudioRenderer } from "@convai/web-sdk/vanilla";
470
+ // Enable text-to-speech (character will speak responses)
471
+ client.toggleTts(true);
472
+
473
+ // Disable text-to-speech (character will only send text, no audio)
474
+ client.toggleTts(false);
475
+ ```
1060
476
 
477
+ **Initial TTS Configuration:**
478
+
479
+ ```typescript
480
+ // Set TTS state during connection
1061
481
  const client = new ConvaiClient({
1062
482
  apiKey: "your-api-key",
1063
483
  characterId: "your-character-id",
484
+ ttsEnabled: true, // Enable TTS by default
1064
485
  });
1065
486
 
1066
- await client.connect();
1067
-
1068
- // CRITICAL: AudioRenderer for playback
1069
- const audioRenderer = new AudioRenderer(client.room);
1070
-
1071
- const canvas = document.getElementById("visualizer") as HTMLCanvasElement;
1072
- const ctx = canvas.getContext("2d")!;
1073
-
1074
- let analyzer: AnalyserNode | null = null;
1075
- let dataArray: Uint8Array | null = null;
1076
- let animationId: number;
487
+ // Or disable initially
488
+ const client = new ConvaiClient({
489
+ apiKey: "your-api-key",
490
+ characterId: "your-character-id",
491
+ ttsEnabled: false, // Disable TTS
492
+ });
493
+ ```
1077
494
 
1078
- // Setup analyzer
1079
- const audioContext = new AudioContext();
495
+ ### Voice Mode Implementation
1080
496
 
1081
- const remoteParticipants = Array.from(client.room.remoteParticipants.values());
1082
- const participant = remoteParticipants[0];
1083
- const audioTracks = Array.from(participant.audioTrackPublications.values());
1084
- const audioTrack = audioTracks[0].track;
497
+ Voice mode allows users to speak instead of typing. The widget automatically handles voice mode, but you can implement it manually.
1085
498
 
1086
- const mediaStream = new MediaStream([audioTrack.mediaStreamTrack]);
1087
- const source = audioContext.createMediaStreamSource(mediaStream);
499
+ **React - Manual Voice Mode:**
1088
500
 
1089
- analyzer = audioContext.createAnalyser();
1090
- analyzer.fftSize = 256;
1091
- source.connect(analyzer);
501
+ ```tsx
502
+ import { useConvaiClient } from "@convai/web-sdk";
503
+ import { useState, useEffect } from "react";
1092
504
 
1093
- dataArray = new Uint8Array(analyzer.frequencyBinCount);
505
+ function CustomChatInterface() {
506
+ const convaiClient = useConvaiClient({
507
+ /* config */
508
+ });
509
+ const [isVoiceMode, setIsVoiceMode] = useState(false);
1094
510
 
1095
- // Animate
1096
- function animate() {
1097
- if (!analyzer || !dataArray) return;
511
+ const enterVoiceMode = async () => {
512
+ // Interrupt any ongoing bot response
513
+ convaiClient.sendInterruptMessage();
1098
514
 
1099
- analyzer.getByteFrequencyData(dataArray);
515
+ // Unmute microphone
516
+ await convaiClient.audioControls.unmuteAudio();
1100
517
 
1101
- // Clear canvas
1102
- ctx.clearRect(0, 0, canvas.width, canvas.height);
518
+ setIsVoiceMode(true);
519
+ };
1103
520
 
1104
- const barWidth = (canvas.width / dataArray.length) * 2.5;
1105
- let x = 0;
521
+ const exitVoiceMode = async () => {
522
+ // Interrupt any ongoing bot response
523
+ convaiClient.sendInterruptMessage();
1106
524
 
1107
- for (let i = 0; i < dataArray.length; i++) {
1108
- const barHeight = (dataArray[i] / 255) * canvas.height;
525
+ // Mute microphone
526
+ await convaiClient.audioControls.muteAudio();
1109
527
 
1110
- ctx.fillStyle = `rgb(${barHeight + 100}, 50, 150)`;
1111
- ctx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
528
+ setIsVoiceMode(false);
529
+ };
1112
530
 
1113
- x += barWidth + 1;
1114
- }
531
+ // Monitor user transcription for voice input
532
+ useEffect(() => {
533
+ const transcription = convaiClient.userTranscription;
534
+ if (transcription && isVoiceMode) {
535
+ // Display real-time transcription
536
+ console.log("User is saying:", transcription);
537
+ }
538
+ }, [convaiClient.userTranscription, isVoiceMode]);
1115
539
 
1116
- animationId = requestAnimationFrame(animate);
540
+ return (
541
+ <div>
542
+ {isVoiceMode ? (
543
+ <div>
544
+ <p>Listening: {convaiClient.userTranscription}</p>
545
+ <button onClick={exitVoiceMode}>Stop Voice Mode</button>
546
+ </div>
547
+ ) : (
548
+ <button onClick={enterVoiceMode}>Start Voice Mode</button>
549
+ )}
550
+ </div>
551
+ );
1117
552
  }
1118
-
1119
- animate();
1120
-
1121
- // Cleanup
1122
- // cancelAnimationFrame(animationId);
1123
- // audioRenderer.destroy();
1124
- // await client.disconnect();
1125
- ```
1126
-
1127
- ---
1128
-
1129
- ### Message Types
1130
-
1131
- All messages from `convaiClient.chatMessages` have a `type` field:
1132
-
1133
- ```typescript
1134
- type ChatMessageType =
1135
- | "user" // User's sent message (raw)
1136
- | "user-transcription" // Real-time speech-to-text from user
1137
- | "user-llm-text" // User text processed by LLM (final)
1138
- | "convai" // Character's response (raw)
1139
- | "bot-llm-text" // Character's LLM-generated text (final)
1140
- | "bot-emotion" // Character's emotional state
1141
- | "emotion" // Generic emotion
1142
- | "behavior-tree" // Behavior tree response
1143
- | "action" // Action execution
1144
- | "interrupt-bot"; // Interrupt message
1145
553
  ```
1146
554
 
1147
- **For Chat UIs, filter to:**
555
+ **Vanilla - Manual Voice Mode:**
1148
556
 
1149
557
  ```typescript
1150
- const displayMessages = chatMessages.filter(
1151
- (msg) => msg.type === "user-llm-text" || msg.type === "bot-llm-text",
1152
- );
1153
- ```
558
+ let isVoiceMode = false;
1154
559
 
1155
- ---
560
+ const enterVoiceMode = async () => {
561
+ // Interrupt any ongoing bot response
562
+ client.sendInterruptMessage();
1156
563
 
1157
- ## API Reference
564
+ // Unmute microphone
565
+ await client.audioControls.unmuteAudio();
1158
566
 
1159
- ### Configuration
567
+ isVoiceMode = true;
568
+ updateUI();
569
+ };
1160
570
 
1161
- ```typescript
1162
- interface ConvaiConfig {
1163
- /** Your Convai API key from convai.com dashboard (required) */
1164
- apiKey: string;
571
+ const exitVoiceMode = async () => {
572
+ // Interrupt any ongoing bot response
573
+ client.sendInterruptMessage();
1165
574
 
1166
- /** The Character ID to connect to (required) */
1167
- characterId: string;
575
+ // Mute microphone
576
+ await client.audioControls.muteAudio();
1168
577
 
1169
- /**
1170
- * End user identifier for speaker management (optional).
1171
- * When provided: enables long-term memory and analytics
1172
- * When not provided: anonymous mode, no persistent memory
1173
- */
1174
- endUserId?: string;
578
+ isVoiceMode = false;
579
+ updateUI();
580
+ };
1175
581
 
1176
- /** Custom Convai API URL (optional) */
1177
- url?: string;
582
+ // Monitor user transcription
583
+ client.on("userTranscriptionChange", (transcription) => {
584
+ if (isVoiceMode && transcription) {
585
+ // Display real-time transcription
586
+ document.getElementById("transcription").textContent = transcription;
587
+ }
588
+ });
1178
589
 
1179
- /**
1180
- * Enable video capability (default: false).
1181
- * If true, connection_type will be "video" (supports audio, video, screenshare).
1182
- * If false, connection_type will be "audio" (audio only).
1183
- * ⚠️ REQUIRED for video and screen share features.
1184
- */
1185
- enableVideo?: boolean;
590
+ function updateUI() {
591
+ const voiceButton = document.getElementById("voice-btn");
592
+ const transcriptionDiv = document.getElementById("transcription");
1186
593
 
1187
- /**
1188
- * Start with video camera on when connecting (default: false).
1189
- * Only works if enableVideo is true.
1190
- */
1191
- startWithVideoOn?: boolean;
594
+ if (isVoiceMode) {
595
+ voiceButton.textContent = "Stop Voice Mode";
596
+ transcriptionDiv.style.display = "block";
597
+ } else {
598
+ voiceButton.textContent = "Start Voice Mode";
599
+ transcriptionDiv.style.display = "none";
600
+ }
601
+ }
602
+ ```
1192
603
 
1193
- /**
1194
- * Start with microphone on when connecting (default: false).
1195
- * If false, microphone stays off until user enables it.
1196
- */
1197
- startWithAudioOn?: boolean;
604
+ **Voice Mode with State Monitoring:**
1198
605
 
1199
- /**
1200
- * Enable text-to-speech audio generation (default: true).
1201
- */
1202
- ttsEnabled?: boolean;
1203
- }
606
+ ```typescript
607
+ // Monitor agent state to handle voice mode transitions
608
+ convaiClient.on("stateChange", (state) => {
609
+ if (isVoiceMode) {
610
+ switch (state.agentState) {
611
+ case "listening":
612
+ // User can speak
613
+ console.log("Bot is listening");
614
+ break;
615
+ case "thinking":
616
+ // Bot is processing
617
+ console.log("Bot is thinking");
618
+ break;
619
+ case "speaking":
620
+ // Bot is responding
621
+ console.log("Bot is speaking");
622
+ // Optionally interrupt if user wants to speak
623
+ break;
624
+ }
625
+ }
626
+ });
1204
627
  ```
1205
628
 
1206
629
  ### Connection Management
1207
630
 
631
+ **Connect:**
632
+
1208
633
  ```typescript
1209
- // Connect
634
+ // React - config passed to hook
635
+ const convaiClient = useConvaiClient({
636
+ apiKey: "your-api-key",
637
+ characterId: "your-character-id",
638
+ });
639
+
640
+ // Or connect manually
1210
641
  await convaiClient.connect({
1211
642
  apiKey: "your-api-key",
1212
643
  characterId: "your-character-id",
1213
- enableVideo: true,
1214
644
  });
1215
645
 
1216
- // Disconnect
646
+ // Vanilla
647
+ const client = new ConvaiClient();
648
+ await client.connect({
649
+ apiKey: "your-api-key",
650
+ characterId: "your-character-id",
651
+ });
652
+ ```
653
+
654
+ **Disconnect:**
655
+
656
+ ```typescript
1217
657
  await convaiClient.disconnect();
658
+ ```
659
+
660
+ **Reconnect:**
1218
661
 
1219
- // Reconnect
662
+ ```typescript
1220
663
  await convaiClient.reconnect();
664
+ ```
1221
665
 
1222
- // Reset session (clear conversation history)
666
+ **Reset Session:**
667
+
668
+ ```typescript
669
+ // Clear conversation history and start new session
1223
670
  convaiClient.resetSession();
671
+ ```
672
+
673
+ **Connection State:**
674
+
675
+ ```typescript
676
+ // React
677
+ const { state } = convaiClient;
678
+ console.log("Connected:", state.isConnected);
679
+ console.log("Connecting:", state.isConnecting);
680
+ console.log("Agent state:", state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
681
+
682
+ // Core API (event-based)
683
+ convaiClient.on("stateChange", (state) => {
684
+ console.log("State changed:", state);
685
+ });
686
+
687
+ convaiClient.on("connect", () => {
688
+ console.log("Connected");
689
+ });
1224
690
 
1225
- // Check connection state
1226
- console.log(convaiClient.state.isConnected);
1227
- console.log(convaiClient.state.isConnecting);
1228
- console.log(convaiClient.state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
1229
- console.log(convaiClient.isBotReady); // Bot ready to receive messages
691
+ convaiClient.on("disconnect", () => {
692
+ console.log("Disconnected");
693
+ });
1230
694
  ```
1231
695
 
1232
696
  ### Messaging
1233
697
 
698
+ **Send Text Message:**
699
+
1234
700
  ```typescript
1235
- // Send text message
1236
701
  convaiClient.sendUserTextMessage("Hello, how are you?");
702
+ ```
1237
703
 
1238
- // Send trigger message (invoke character action)
704
+ **Send Trigger Message:**
705
+
706
+ ```typescript
707
+ // Trigger specific character action
1239
708
  convaiClient.sendTriggerMessage("greet", "User entered the room");
1240
709
 
1241
- // Interrupt character's current response
1242
- convaiClient.sendInterruptMessage();
710
+ // Trigger without message
711
+ convaiClient.sendTriggerMessage("wave");
712
+ ```
1243
713
 
1244
- // Update context
1245
- convaiClient.updateTemplateKeys({ user_name: "John" });
1246
- convaiClient.updateDynamicInfo({ text: "User is browsing products" });
714
+ **Update Context:**
1247
715
 
1248
- // Access messages
1249
- console.log(convaiClient.chatMessages);
716
+ ```typescript
717
+ // Update template keys (e.g., user name, location)
718
+ convaiClient.updateTemplateKeys({
719
+ user_name: "John",
720
+ location: "New York",
721
+ });
1250
722
 
1251
- // Access real-time user transcription
1252
- console.log(convaiClient.userTranscription);
723
+ // Update dynamic information
724
+ convaiClient.updateDynamicInfo({
725
+ text: "User is currently browsing the products page",
726
+ });
1253
727
  ```
1254
728
 
1255
- ### Audio Controls
729
+ **Message History:**
1256
730
 
1257
731
  ```typescript
1258
- // Mute/unmute microphone
1259
- await convaiClient.audioControls.muteAudio();
1260
- await convaiClient.audioControls.unmuteAudio();
1261
- await convaiClient.audioControls.toggleAudio();
732
+ // React
733
+ const { chatMessages } = convaiClient;
1262
734
 
1263
- // Check mute state (two equivalent ways)
1264
- console.log(convaiClient.isAudioMuted); // convenience property
1265
- console.log(convaiClient.audioControls.isAudioMuted); // via controls
735
+ // Core API (event-based)
736
+ convaiClient.on("message", (message: ChatMessage) => {
737
+ console.log("New message:", message.content);
738
+ console.log("Message type:", message.type);
739
+ });
1266
740
 
1267
- // Get available microphones
1268
- const devices = await convaiClient.audioControls.getAudioDevices();
741
+ convaiClient.on("messagesChange", (messages: ChatMessage[]) => {
742
+ console.log("All messages:", messages);
743
+ });
744
+ ```
1269
745
 
1270
- // Set microphone
1271
- await convaiClient.audioControls.setAudioDevice(deviceId);
746
+ **Message Types:**
1272
747
 
1273
- // Monitor audio level
1274
- convaiClient.audioControls.startAudioLevelMonitoring();
1275
- convaiClient.audioControls.on("audioLevelChange", (level) => {
1276
- console.log("Audio level:", level); // 0 to 1
1277
- });
1278
- convaiClient.audioControls.stopAudioLevelMonitoring();
748
+ ```typescript
749
+ type ChatMessageType =
750
+ | "user" // User's sent message
751
+ | "convai" // Character's response
752
+ | "user-transcription" // Real-time speech-to-text from user
753
+ | "bot-llm-text" // Character's LLM-generated text
754
+ | "emotion" // Character's emotional state
755
+ | "behavior-tree" // Behavior tree response
756
+ | "action" // Action execution
757
+ | "bot-emotion" // Bot emotional response
758
+ | "user-llm-text" // User text processed by LLM
759
+ | "interrupt-bot"; // Interrupt message
1279
760
  ```
1280
761
 
1281
- ### Video Controls
762
+ ### State Monitoring
1282
763
 
1283
- **⚠️ Requires `enableVideo: true` in config.**
764
+ **Agent State:**
1284
765
 
1285
766
  ```typescript
1286
- // Enable/disable camera
1287
- await convaiClient.videoControls.enableVideo();
1288
- await convaiClient.videoControls.disableVideo();
1289
- await convaiClient.videoControls.toggleVideo();
767
+ // React
768
+ const { state } = convaiClient;
1290
769
 
1291
- // Check video state (two equivalent ways)
1292
- console.log(convaiClient.isVideoEnabled); // convenience property
1293
- console.log(convaiClient.videoControls.isVideoEnabled); // via controls
770
+ // Check specific states
771
+ if (state.isListening) {
772
+ console.log("Bot is listening");
773
+ }
1294
774
 
1295
- // Set video quality
1296
- await convaiClient.videoControls.setVideoQuality("high"); // 'low' | 'medium' | 'high'
775
+ if (state.isThinking) {
776
+ console.log("Bot is thinking");
777
+ }
1297
778
 
1298
- // Get available cameras
1299
- const devices = await convaiClient.videoControls.getVideoDevices();
779
+ if (state.isSpeaking) {
780
+ console.log("Bot is speaking");
781
+ }
1300
782
 
1301
- // Switch camera
1302
- await convaiClient.videoControls.setVideoDevice(deviceId);
783
+ // Combined state
784
+ console.log(state.agentState); // 'disconnected' | 'connected' | 'listening' | 'thinking' | 'speaking'
1303
785
  ```
1304
786
 
1305
- ### Screen Share Controls
1306
-
1307
- **⚠️ Requires `enableVideo: true` in config.**
787
+ **User Transcription:**
1308
788
 
1309
789
  ```typescript
1310
- // Start/stop screen share
1311
- await convaiClient.screenShareControls.enableScreenShare();
1312
- await convaiClient.screenShareControls.enableScreenShareWithAudio();
1313
- await convaiClient.screenShareControls.disableScreenShare();
1314
- await convaiClient.screenShareControls.toggleScreenShare();
790
+ // React
791
+ const { userTranscription } = convaiClient;
1315
792
 
1316
- // Check screen share state (two equivalent ways)
1317
- console.log(convaiClient.isScreenShareActive); // convenience property
1318
- console.log(convaiClient.screenShareControls.isScreenShareActive); // via controls
793
+ // Core API (event-based)
794
+ convaiClient.on("userTranscriptionChange", (transcription: string) => {
795
+ console.log("User is saying:", transcription);
796
+ });
1319
797
  ```
1320
798
 
1321
- ---
799
+ **Bot Ready State:**
800
+
801
+ ```typescript
802
+ // React
803
+ const { isBotReady } = convaiClient;
804
+
805
+ // Core API (event-based)
806
+ convaiClient.on("botReady", () => {
807
+ console.log("Bot is ready to receive messages");
808
+ });
809
+ ```
1322
810
 
1323
- ## Getting Credentials
811
+ ## Getting Convai Credentials
1324
812
 
1325
813
  1. Visit [convai.com](https://convai.com) and create an account
1326
814
  2. Navigate to your dashboard
@@ -1328,71 +816,41 @@ console.log(convaiClient.screenShareControls.isScreenShareActive); // via contro
1328
816
  4. Copy your **API Key** from the dashboard
1329
817
  5. Copy your **Character ID** from the character details
1330
818
 
1331
- ---
1332
-
1333
- ## TypeScript Support
1334
-
1335
- All exports are fully typed:
1336
-
1337
- **React:**
819
+ ## Import Paths
1338
820
 
1339
821
  ```typescript
1340
- import type {
1341
- // Configuration
1342
- ConvaiConfig,
822
+ // Default: React version (backward compatible)
823
+ import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk";
1343
824
 
1344
- // State
1345
- ConvaiClientState,
1346
-
1347
- // Messages
1348
- ChatMessage,
1349
- ChatMessageType,
825
+ // Explicit React import
826
+ import { useConvaiClient, ConvaiWidget } from "@convai/web-sdk/react";
1350
827
 
1351
- // Client
1352
- IConvaiClient,
1353
- ConvaiClient,
828
+ // Vanilla JS/TS
829
+ import { ConvaiClient, createConvaiWidget } from "@convai/web-sdk/vanilla";
1354
830
 
1355
- // Controls
1356
- AudioControls,
1357
- VideoControls,
1358
- ScreenShareControls,
1359
- } from "@convai/web-sdk/react";
831
+ // Core only (no UI, framework agnostic)
832
+ import { ConvaiClient } from "@convai/web-sdk/core";
1360
833
  ```
1361
834
 
1362
- **Vanilla:**
835
+ ## TypeScript Support
836
+
837
+ All exports are fully typed:
1363
838
 
1364
839
  ```typescript
1365
840
  import type {
1366
- // Configuration
841
+ ConvaiClient,
1367
842
  ConvaiConfig,
1368
-
1369
- // State
1370
843
  ConvaiClientState,
1371
-
1372
- // Messages
1373
844
  ChatMessage,
1374
- ChatMessageType,
1375
-
1376
- // Client
1377
- IConvaiClient,
1378
- ConvaiClient,
1379
-
1380
- // Controls
1381
845
  AudioControls,
1382
846
  VideoControls,
1383
847
  ScreenShareControls,
1384
-
1385
- // Widget
1386
- VanillaWidget,
1387
- VanillaWidgetOptions,
1388
- } from "@convai/web-sdk/vanilla";
848
+ IConvaiClient,
849
+ } from "@convai/web-sdk";
1389
850
  ```
1390
851
 
1391
- ---
1392
-
1393
852
  ## Support
1394
853
 
1395
- - **Documentation**: [API Reference](./API_REFERENCE.md)
1396
- - **Forum**: [Convai Forum](https://forum.convai.com)
1397
- - **Website**: [convai.com](https://convai.com)
1398
- - **Issues**: [GitHub Issues](https://github.com/convai/web-sdk/issues)
854
+ - [Convai Forum](https://forum.convai.com)
855
+ - [API Reference](./API_REFERENCE.md)
856
+ - [Convai Website](https://convai.com)