@convai/web-sdk 0.3.1-beta.3 → 0.3.2-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +535 -1077
- package/dist/core/AudioManager.d.ts.map +1 -0
- package/dist/core/AudioManager.js +262 -0
- package/dist/core/AudioManager.js.map +1 -0
- package/dist/core/BlendshapeQueue.d.ts +128 -0
- package/dist/core/BlendshapeQueue.d.ts.map +1 -0
- package/dist/core/BlendshapeQueue.js +229 -0
- package/dist/core/BlendshapeQueue.js.map +1 -0
- package/dist/{types/core → core}/ConvaiClient.d.ts +19 -15
- package/dist/core/ConvaiClient.d.ts.map +1 -0
- package/dist/core/ConvaiClient.js +623 -0
- package/dist/core/ConvaiClient.js.map +1 -0
- package/dist/core/EventEmitter.d.ts.map +1 -0
- package/dist/core/EventEmitter.js +68 -0
- package/dist/core/EventEmitter.js.map +1 -0
- package/dist/{types/core → core}/MessageHandler.d.ts +7 -0
- package/dist/core/MessageHandler.d.ts.map +1 -0
- package/dist/core/MessageHandler.js +333 -0
- package/dist/core/MessageHandler.js.map +1 -0
- package/dist/core/ScreenShareManager.d.ts.map +1 -0
- package/dist/core/ScreenShareManager.js +207 -0
- package/dist/core/ScreenShareManager.js.map +1 -0
- package/dist/core/VideoManager.d.ts.map +1 -0
- package/dist/core/VideoManager.js +205 -0
- package/dist/core/VideoManager.js.map +1 -0
- package/dist/{types/core → core}/index.d.ts +2 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +14 -1970
- package/dist/core/index.js.map +1 -0
- package/dist/{types/core → core}/types.d.ts +12 -21
- package/dist/core/types.d.ts.map +1 -0
- package/dist/core/types.js +2 -0
- package/dist/core/types.js.map +1 -0
- package/dist/dev.d.ts +12 -0
- package/dist/dev.d.ts.map +1 -0
- package/dist/dev.js +12 -0
- package/dist/dev.js.map +1 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +6 -0
- package/dist/index.js.map +1 -0
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.d.ts.map +1 -0
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.js +201 -0
- package/dist/lipsync-helpers/arkitBlendshapeHelpers.js.map +1 -0
- package/dist/lipsync-helpers/arkitOrder61.d.ts.map +1 -0
- package/dist/lipsync-helpers/arkitOrder61.js +287 -0
- package/dist/lipsync-helpers/arkitOrder61.js.map +1 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.d.ts.map +1 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.js +362 -0
- package/dist/lipsync-helpers/arkitPhonemeReference.js.map +1 -0
- package/dist/{types/lipsync-helpers → lipsync-helpers}/index.d.ts +1 -0
- package/dist/lipsync-helpers/index.d.ts.map +1 -0
- package/dist/lipsync-helpers/index.js +20 -1165
- package/dist/lipsync-helpers/index.js.map +1 -0
- package/dist/lipsync-helpers/metahumanOrder251.d.ts +115 -0
- package/dist/lipsync-helpers/metahumanOrder251.d.ts.map +1 -0
- package/dist/lipsync-helpers/metahumanOrder251.js +432 -0
- package/dist/lipsync-helpers/metahumanOrder251.js.map +1 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.d.ts.map +1 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.js +315 -0
- package/dist/lipsync-helpers/neurosyncBlendshapeMapper.js.map +1 -0
- package/dist/react/components/ConvaiWidget.d.ts +68 -0
- package/dist/react/components/ConvaiWidget.d.ts.map +1 -0
- package/dist/react/components/ConvaiWidget.js +505 -0
- package/dist/react/components/ConvaiWidget.js.map +1 -0
- package/dist/react/components/index.d.ts +2 -0
- package/dist/react/components/index.d.ts.map +1 -0
- package/dist/react/components/index.js +3 -0
- package/dist/react/components/index.js.map +1 -0
- package/dist/react/components/rtc-widget/components/AudioSettingsPanel.d.ts +10 -0
- package/dist/react/components/rtc-widget/components/AudioSettingsPanel.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/AudioSettingsPanel.js +316 -0
- package/dist/react/components/rtc-widget/components/AudioSettingsPanel.js.map +1 -0
- package/dist/react/components/rtc-widget/components/AudioVisualizer.d.ts +36 -0
- package/dist/react/components/rtc-widget/components/AudioVisualizer.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/AudioVisualizer.js +259 -0
- package/dist/react/components/rtc-widget/components/AudioVisualizer.js.map +1 -0
- package/dist/react/components/rtc-widget/components/ConviMessage.d.ts +10 -0
- package/dist/react/components/rtc-widget/components/ConviMessage.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/ConviMessage.js +14 -0
- package/dist/react/components/rtc-widget/components/ConviMessage.js.map +1 -0
- package/dist/react/components/rtc-widget/components/FloatingVideo.d.ts +9 -0
- package/dist/react/components/rtc-widget/components/FloatingVideo.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/FloatingVideo.js +122 -0
- package/dist/react/components/rtc-widget/components/FloatingVideo.js.map +1 -0
- package/dist/react/components/rtc-widget/components/MarkdownRenderer.d.ts +7 -0
- package/dist/react/components/rtc-widget/components/MarkdownRenderer.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/MarkdownRenderer.js +68 -0
- package/dist/react/components/rtc-widget/components/MarkdownRenderer.js.map +1 -0
- package/dist/react/components/rtc-widget/components/MessageBubble.d.ts +10 -0
- package/dist/react/components/rtc-widget/components/MessageBubble.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/MessageBubble.js +23 -0
- package/dist/react/components/rtc-widget/components/MessageBubble.js.map +1 -0
- package/dist/react/components/rtc-widget/components/MessageList.d.ts +11 -0
- package/dist/react/components/rtc-widget/components/MessageList.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/MessageList.js +89 -0
- package/dist/react/components/rtc-widget/components/MessageList.js.map +1 -0
- package/dist/react/components/rtc-widget/components/UserMessage.d.ts +9 -0
- package/dist/react/components/rtc-widget/components/UserMessage.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/UserMessage.js +15 -0
- package/dist/react/components/rtc-widget/components/UserMessage.js.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.d.ts +6 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.js +15 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviButton.js.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.d.ts +25 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.js +172 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviFooter.js.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.d.ts +17 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.js +66 -0
- package/dist/react/components/rtc-widget/components/conviComponents/ConviHeader.js.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.d.ts +12 -0
- package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.js +68 -0
- package/dist/react/components/rtc-widget/components/conviComponents/SettingsTray.js.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.d.ts +12 -0
- package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.js +255 -0
- package/dist/react/components/rtc-widget/components/conviComponents/VoiceModeOverlay.js.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/index.d.ts +6 -0
- package/dist/react/components/rtc-widget/components/conviComponents/index.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/conviComponents/index.js +6 -0
- package/dist/react/components/rtc-widget/components/conviComponents/index.js.map +1 -0
- package/dist/react/components/rtc-widget/components/index.d.ts +9 -0
- package/dist/react/components/rtc-widget/components/index.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/components/index.js +15 -0
- package/dist/react/components/rtc-widget/components/index.js.map +1 -0
- package/dist/react/components/rtc-widget/index.d.ts +6 -0
- package/dist/react/components/rtc-widget/index.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/index.js +9 -0
- package/dist/react/components/rtc-widget/index.js.map +1 -0
- package/dist/react/components/rtc-widget/styles/framerConfig.d.ts +116 -0
- package/dist/react/components/rtc-widget/styles/framerConfig.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/styles/framerConfig.js +73 -0
- package/dist/react/components/rtc-widget/styles/framerConfig.js.map +1 -0
- package/dist/react/components/rtc-widget/styles/icons.d.ts +28 -0
- package/dist/react/components/rtc-widget/styles/icons.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/styles/icons.js +257 -0
- package/dist/react/components/rtc-widget/styles/icons.js.map +1 -0
- package/dist/react/components/rtc-widget/styles/index.d.ts +6 -0
- package/dist/react/components/rtc-widget/styles/index.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/styles/index.js +9 -0
- package/dist/react/components/rtc-widget/styles/index.js.map +1 -0
- package/dist/react/components/rtc-widget/styles/styledComponents.d.ts +90 -0
- package/dist/react/components/rtc-widget/styles/styledComponents.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/styles/styledComponents.js +663 -0
- package/dist/react/components/rtc-widget/styles/styledComponents.js.map +1 -0
- package/dist/react/components/rtc-widget/styles/theme.d.ts +188 -0
- package/dist/react/components/rtc-widget/styles/theme.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/styles/theme.js +290 -0
- package/dist/react/components/rtc-widget/styles/theme.js.map +1 -0
- package/dist/react/components/rtc-widget/types/index.d.ts +60 -0
- package/dist/react/components/rtc-widget/types/index.d.ts.map +1 -0
- package/dist/react/components/rtc-widget/types/index.js +2 -0
- package/dist/react/components/rtc-widget/types/index.js.map +1 -0
- package/dist/react/hooks/index.d.ts +4 -0
- package/dist/react/hooks/index.d.ts.map +1 -0
- package/dist/react/hooks/index.js +6 -0
- package/dist/react/hooks/index.js.map +1 -0
- package/dist/react/hooks/useCharacterInfo.d.ts +17 -0
- package/dist/react/hooks/useCharacterInfo.d.ts.map +1 -0
- package/dist/react/hooks/useCharacterInfo.js +60 -0
- package/dist/react/hooks/useCharacterInfo.js.map +1 -0
- package/dist/react/hooks/useConvaiClient.d.ts +35 -0
- package/dist/react/hooks/useConvaiClient.d.ts.map +1 -0
- package/dist/react/hooks/useConvaiClient.js +183 -0
- package/dist/react/hooks/useConvaiClient.js.map +1 -0
- package/dist/react/hooks/useLocalCameraTrack.d.ts +22 -0
- package/dist/react/hooks/useLocalCameraTrack.d.ts.map +1 -0
- package/dist/react/hooks/useLocalCameraTrack.js +34 -0
- package/dist/react/hooks/useLocalCameraTrack.js.map +1 -0
- package/dist/react/index.d.ts +7 -0
- package/dist/react/index.d.ts.map +1 -0
- package/dist/react/index.js +13 -0
- package/dist/react/index.js.map +1 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +2 -0
- package/dist/types/index.js.map +1 -0
- package/dist/utils/LatencyMonitor.d.ts.map +1 -0
- package/dist/utils/LatencyMonitor.js +136 -0
- package/dist/utils/LatencyMonitor.js.map +1 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +96 -0
- package/dist/utils/logger.js.map +1 -0
- package/dist/utils/speakerManagement.d.ts.map +1 -0
- package/dist/utils/speakerManagement.js +64 -0
- package/dist/utils/speakerManagement.js.map +1 -0
- package/dist/{types/vanilla → vanilla}/AudioRenderer.d.ts +5 -0
- package/dist/vanilla/AudioRenderer.d.ts.map +1 -0
- package/dist/vanilla/AudioRenderer.js +135 -0
- package/dist/vanilla/AudioRenderer.js.map +1 -0
- package/dist/vanilla/ConvaiWidget.d.ts.map +1 -0
- package/dist/vanilla/ConvaiWidget.js +1786 -0
- package/dist/vanilla/ConvaiWidget.js.map +1 -0
- package/dist/vanilla/icons.d.ts.map +1 -0
- package/dist/vanilla/icons.js +222 -0
- package/dist/vanilla/icons.js.map +1 -0
- package/dist/{types/vanilla → vanilla}/index.d.ts +1 -3
- package/dist/vanilla/index.d.ts.map +1 -0
- package/dist/vanilla/index.js +20 -5509
- package/dist/vanilla/index.js.map +1 -0
- package/dist/vanilla/styles.d.ts.map +1 -0
- package/dist/vanilla/styles.js +287 -0
- package/dist/vanilla/styles.js.map +1 -0
- package/dist/vanilla/types.d.ts +43 -0
- package/dist/vanilla/types.d.ts.map +1 -0
- package/dist/vanilla/types.js +2 -0
- package/dist/vanilla/types.js.map +1 -0
- package/package.json +33 -38
- package/CHANGELOG.md +0 -165
- package/dist/core/index.cjs +0 -1977
- package/dist/lipsync-helpers/index.cjs +0 -1195
- package/dist/types/core/AudioManager.d.ts.map +0 -1
- package/dist/types/core/ConvaiClient.d.ts.map +0 -1
- package/dist/types/core/EventEmitter.d.ts.map +0 -1
- package/dist/types/core/MessageHandler.d.ts.map +0 -1
- package/dist/types/core/ScreenShareManager.d.ts.map +0 -1
- package/dist/types/core/VideoManager.d.ts.map +0 -1
- package/dist/types/core/index.d.ts.map +0 -1
- package/dist/types/core/types.d.ts.map +0 -1
- package/dist/types/lipsync-helpers/arkitBlendshapeHelpers.d.ts.map +0 -1
- package/dist/types/lipsync-helpers/arkitOrder61.d.ts.map +0 -1
- package/dist/types/lipsync-helpers/arkitPhonemeReference.d.ts.map +0 -1
- package/dist/types/lipsync-helpers/index.d.ts.map +0 -1
- package/dist/types/lipsync-helpers/neurosyncBlendshapeMapper.d.ts.map +0 -1
- package/dist/types/types/index.d.ts.map +0 -1
- package/dist/types/utils/LatencyMonitor.d.ts.map +0 -1
- package/dist/types/utils/logger.d.ts.map +0 -1
- package/dist/types/utils/speakerManagement.d.ts.map +0 -1
- package/dist/types/vanilla/AudioRenderer.d.ts.map +0 -1
- package/dist/types/vanilla/ConvaiWidget.d.ts.map +0 -1
- package/dist/types/vanilla/icons.d.ts.map +0 -1
- package/dist/types/vanilla/index.d.ts.map +0 -1
- package/dist/types/vanilla/styles.d.ts.map +0 -1
- package/dist/types/vanilla/types.d.ts +0 -106
- package/dist/types/vanilla/types.d.ts.map +0 -1
- package/dist/umd/convai.umd.js +0 -1
- package/dist/vanilla/index.cjs +0 -5559
- /package/dist/{types/core → core}/AudioManager.d.ts +0 -0
- /package/dist/{types/core → core}/EventEmitter.d.ts +0 -0
- /package/dist/{types/core → core}/ScreenShareManager.d.ts +0 -0
- /package/dist/{types/core → core}/VideoManager.d.ts +0 -0
- /package/dist/{types/lipsync-helpers → lipsync-helpers}/arkitBlendshapeHelpers.d.ts +0 -0
- /package/dist/{types/lipsync-helpers → lipsync-helpers}/arkitOrder61.d.ts +0 -0
- /package/dist/{types/lipsync-helpers → lipsync-helpers}/arkitPhonemeReference.d.ts +0 -0
- /package/dist/{types/lipsync-helpers → lipsync-helpers}/neurosyncBlendshapeMapper.d.ts +0 -0
- /package/dist/types/{types/index.d.ts → index.d.ts} +0 -0
- /package/dist/{types/utils → utils}/LatencyMonitor.d.ts +0 -0
- /package/dist/{types/utils → utils}/logger.d.ts +0 -0
- /package/dist/{types/utils → utils}/speakerManagement.d.ts +0 -0
- /package/dist/{types/vanilla → vanilla}/ConvaiWidget.d.ts +0 -0
- /package/dist/{types/vanilla → vanilla}/icons.d.ts +0 -0
- /package/dist/{types/vanilla → vanilla}/styles.d.ts +0 -0
package/dist/core/index.cjs
DELETED
|
@@ -1,1977 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var livekitClient = require('livekit-client');
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Logger utility that only logs in development mode.
|
|
7
|
-
* Completely silent in production builds and npm packages.
|
|
8
|
-
*
|
|
9
|
-
* This logger is designed to be tree-shakeable and completely removed
|
|
10
|
-
* from production builds when not used.
|
|
11
|
-
*
|
|
12
|
-
* To test logger behavior:
|
|
13
|
-
* - Development: Set NODE_ENV=development or run on localhost
|
|
14
|
-
* - Production: Set NODE_ENV=production or deploy to production
|
|
15
|
-
*
|
|
16
|
-
* In npm packages, this logger will be completely silent by default.
|
|
17
|
-
*/
|
|
18
|
-
// Environment detection with multiple fallbacks
|
|
19
|
-
const detectEnvironment = () => {
|
|
20
|
-
// Check if we're in a browser environment
|
|
21
|
-
const isBrowser = typeof window !== 'undefined';
|
|
22
|
-
// Safe process.env access (Vite/Webpack replaces this at build time)
|
|
23
|
-
const getEnv = (key) => {
|
|
24
|
-
try {
|
|
25
|
-
// @ts-ignore - process.env is replaced at build time by bundlers
|
|
26
|
-
return typeof process !== 'undefined' && process.env ? process.env[key] : undefined;
|
|
27
|
-
}
|
|
28
|
-
catch {
|
|
29
|
-
return undefined;
|
|
30
|
-
}
|
|
31
|
-
};
|
|
32
|
-
// Priority 1: Check NEXT_PUBLIC_ENVIRONMENT first (most explicit)
|
|
33
|
-
const publicEnv = getEnv('NEXT_PUBLIC_ENVIRONMENT');
|
|
34
|
-
if (publicEnv === 'PRODUCTION') {
|
|
35
|
-
return 'production';
|
|
36
|
-
}
|
|
37
|
-
if (publicEnv === 'PREVIEW' || publicEnv === 'STAGING') {
|
|
38
|
-
return 'development';
|
|
39
|
-
}
|
|
40
|
-
// Priority 2: Check NODE_ENV and REACT_APP_ENV (Vite replaces import.meta.env.MODE)
|
|
41
|
-
const nodeEnv = getEnv('NODE_ENV');
|
|
42
|
-
const reactEnv = getEnv('REACT_APP_ENV');
|
|
43
|
-
if (nodeEnv === 'production' || reactEnv === 'production') {
|
|
44
|
-
return 'production';
|
|
45
|
-
}
|
|
46
|
-
if (nodeEnv === 'development' || reactEnv === 'development') {
|
|
47
|
-
return 'development';
|
|
48
|
-
}
|
|
49
|
-
// Priority 3: Check for Vite dev server
|
|
50
|
-
const viteDevUrl = getEnv('VITE_DEV_SERVER_URL');
|
|
51
|
-
if (viteDevUrl !== undefined) {
|
|
52
|
-
return 'development';
|
|
53
|
-
}
|
|
54
|
-
// Priority 4: Check browser environment (only if no explicit env vars are set)
|
|
55
|
-
if (isBrowser) {
|
|
56
|
-
// Localhost is typically development
|
|
57
|
-
if (window.location?.hostname === 'localhost' || window.location?.hostname === '127.0.0.1') {
|
|
58
|
-
return 'development';
|
|
59
|
-
}
|
|
60
|
-
// Check for development ports
|
|
61
|
-
if (window.location?.port && ['3000', '3001', '5173', '8080'].includes(window.location.port)) {
|
|
62
|
-
return 'development';
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
// Default to production for safety
|
|
66
|
-
return 'production';
|
|
67
|
-
};
|
|
68
|
-
const environment = detectEnvironment();
|
|
69
|
-
const isDevelopment = environment === 'development';
|
|
70
|
-
// No-op function that gets completely removed by tree shaking
|
|
71
|
-
const noop = () => {
|
|
72
|
-
// This function is intentionally empty and will be removed in production
|
|
73
|
-
};
|
|
74
|
-
// Development logger functions
|
|
75
|
-
const devLogger = {
|
|
76
|
-
log: (...args) => console.log('[Convai]', ...args),
|
|
77
|
-
warn: (...args) => console.warn('[Convai]', ...args),
|
|
78
|
-
error: (...args) => console.error('[Convai]', ...args),
|
|
79
|
-
info: (...args) => console.info('[Convai]', ...args),
|
|
80
|
-
debug: (...args) => console.debug('[Convai]', ...args),
|
|
81
|
-
trace: (...args) => console.trace('[Convai]', ...args),
|
|
82
|
-
};
|
|
83
|
-
// Production logger functions (all no-ops)
|
|
84
|
-
const prodLogger = {
|
|
85
|
-
log: noop,
|
|
86
|
-
warn: noop,
|
|
87
|
-
error: noop,
|
|
88
|
-
info: noop,
|
|
89
|
-
debug: noop,
|
|
90
|
-
trace: noop,
|
|
91
|
-
};
|
|
92
|
-
// Export the appropriate logger based on environment
|
|
93
|
-
const logger = isDevelopment ? devLogger : prodLogger;
|
|
94
|
-
|
|
95
|
-
class EventEmitter {
|
|
96
|
-
constructor() {
|
|
97
|
-
this.events = new Map();
|
|
98
|
-
}
|
|
99
|
-
/**
|
|
100
|
-
* Subscribe to an event
|
|
101
|
-
* @param event Event name
|
|
102
|
-
* @param callback Callback function
|
|
103
|
-
* @returns Unsubscribe function
|
|
104
|
-
*/
|
|
105
|
-
on(event, callback) {
|
|
106
|
-
if (!this.events.has(event)) {
|
|
107
|
-
this.events.set(event, new Set());
|
|
108
|
-
}
|
|
109
|
-
this.events.get(event).add(callback);
|
|
110
|
-
// Return unsubscribe function
|
|
111
|
-
return () => {
|
|
112
|
-
this.off(event, callback);
|
|
113
|
-
};
|
|
114
|
-
}
|
|
115
|
-
/**
|
|
116
|
-
* Unsubscribe from an event
|
|
117
|
-
* @param event Event name
|
|
118
|
-
* @param callback Callback function to remove
|
|
119
|
-
*/
|
|
120
|
-
off(event, callback) {
|
|
121
|
-
const callbacks = this.events.get(event);
|
|
122
|
-
if (callbacks) {
|
|
123
|
-
callbacks.delete(callback);
|
|
124
|
-
if (callbacks.size === 0) {
|
|
125
|
-
this.events.delete(event);
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
/**
|
|
130
|
-
* Emit an event to all subscribers
|
|
131
|
-
* @param event Event name
|
|
132
|
-
* @param args Arguments to pass to callbacks
|
|
133
|
-
*/
|
|
134
|
-
emit(event, ...args) {
|
|
135
|
-
const callbacks = this.events.get(event);
|
|
136
|
-
if (callbacks) {
|
|
137
|
-
callbacks.forEach((callback) => {
|
|
138
|
-
try {
|
|
139
|
-
callback(...args);
|
|
140
|
-
}
|
|
141
|
-
catch (error) {
|
|
142
|
-
console.error(`Error in event handler for "${event}":`, error);
|
|
143
|
-
}
|
|
144
|
-
});
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
/**
|
|
148
|
-
* Remove all event listeners
|
|
149
|
-
*/
|
|
150
|
-
removeAllListeners() {
|
|
151
|
-
this.events.clear();
|
|
152
|
-
}
|
|
153
|
-
/**
|
|
154
|
-
* Get the number of listeners for an event
|
|
155
|
-
* @param event Event name
|
|
156
|
-
* @returns Number of listeners
|
|
157
|
-
*/
|
|
158
|
-
listenerCount(event) {
|
|
159
|
-
return this.events.get(event)?.size ?? 0;
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
/**
|
|
164
|
-
* Manages audio controls for LiveKit room
|
|
165
|
-
* Provides methods to enable/disable microphone, manage audio devices,
|
|
166
|
-
* and control audio state.
|
|
167
|
-
*/
|
|
168
|
-
class AudioManager extends EventEmitter {
|
|
169
|
-
constructor(room) {
|
|
170
|
-
super();
|
|
171
|
-
this.room = null;
|
|
172
|
-
this._isAudioEnabled = false;
|
|
173
|
-
this._isAudioMuted = false;
|
|
174
|
-
this._audioLevel = 0;
|
|
175
|
-
if (room) {
|
|
176
|
-
this.setRoom(room);
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
get isAudioEnabled() {
|
|
180
|
-
return this._isAudioEnabled;
|
|
181
|
-
}
|
|
182
|
-
get isAudioMuted() {
|
|
183
|
-
return this._isAudioMuted;
|
|
184
|
-
}
|
|
185
|
-
get audioLevel() {
|
|
186
|
-
return this._audioLevel;
|
|
187
|
-
}
|
|
188
|
-
/**
|
|
189
|
-
* Set the LiveKit room instance
|
|
190
|
-
*/
|
|
191
|
-
setRoom(room) {
|
|
192
|
-
// Clean up previous room listeners
|
|
193
|
-
if (this.room) {
|
|
194
|
-
this.cleanupListeners();
|
|
195
|
-
}
|
|
196
|
-
this.room = room;
|
|
197
|
-
if (room && room.state !== 'disconnected') {
|
|
198
|
-
this.setupListeners();
|
|
199
|
-
this.updateState();
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
/**
|
|
203
|
-
* Setup event listeners for room
|
|
204
|
-
*/
|
|
205
|
-
setupListeners() {
|
|
206
|
-
if (!this.room)
|
|
207
|
-
return;
|
|
208
|
-
const localParticipant = this.room.localParticipant;
|
|
209
|
-
const handleTrackMuted = (track) => {
|
|
210
|
-
if (track.source === 'microphone') {
|
|
211
|
-
this._isAudioMuted = true;
|
|
212
|
-
this._isAudioEnabled = false;
|
|
213
|
-
this.emit('audioStateChange', { isAudioMuted: true, isAudioEnabled: false });
|
|
214
|
-
}
|
|
215
|
-
};
|
|
216
|
-
const handleTrackUnmuted = (track) => {
|
|
217
|
-
if (track.source === 'microphone') {
|
|
218
|
-
this._isAudioMuted = false;
|
|
219
|
-
this._isAudioEnabled = true;
|
|
220
|
-
this.emit('audioStateChange', { isAudioMuted: false, isAudioEnabled: true });
|
|
221
|
-
}
|
|
222
|
-
};
|
|
223
|
-
const handleTrackPublished = (track) => {
|
|
224
|
-
if (track.source === 'microphone') {
|
|
225
|
-
this._isAudioEnabled = true;
|
|
226
|
-
this._isAudioMuted = false;
|
|
227
|
-
this.emit('audioStateChange', { isAudioMuted: false, isAudioEnabled: true });
|
|
228
|
-
}
|
|
229
|
-
};
|
|
230
|
-
const handleTrackUnpublished = (track) => {
|
|
231
|
-
if (track.source === 'microphone') {
|
|
232
|
-
this._isAudioEnabled = false;
|
|
233
|
-
this._isAudioMuted = true;
|
|
234
|
-
this.emit('audioStateChange', { isAudioMuted: true, isAudioEnabled: false });
|
|
235
|
-
}
|
|
236
|
-
};
|
|
237
|
-
localParticipant.on('trackMuted', handleTrackMuted);
|
|
238
|
-
localParticipant.on('trackUnmuted', handleTrackUnmuted);
|
|
239
|
-
localParticipant.on('trackPublished', handleTrackPublished);
|
|
240
|
-
localParticipant.on('trackUnpublished', handleTrackUnpublished);
|
|
241
|
-
}
|
|
242
|
-
/**
|
|
243
|
-
* Clean up event listeners
|
|
244
|
-
*/
|
|
245
|
-
cleanupListeners() {
|
|
246
|
-
if (!this.room)
|
|
247
|
-
return;
|
|
248
|
-
// LiveKit handles cleanup automatically when participant is removed
|
|
249
|
-
}
|
|
250
|
-
/**
|
|
251
|
-
* Force-sync the stored audio state with the underlying LiveKit room.
|
|
252
|
-
* Useful right after a connection completes so the UI can reflect
|
|
253
|
-
* the current microphone permission without waiting for a toggle event.
|
|
254
|
-
*/
|
|
255
|
-
syncStateFromRoom(options = {}) {
|
|
256
|
-
const { emit = false } = options;
|
|
257
|
-
if (!this.room || this.room.state === 'disconnected') {
|
|
258
|
-
const stateChanged = this._isAudioEnabled !== false ||
|
|
259
|
-
this._isAudioMuted !== true ||
|
|
260
|
-
this._audioLevel !== 0;
|
|
261
|
-
this._isAudioEnabled = false;
|
|
262
|
-
this._isAudioMuted = true;
|
|
263
|
-
this._audioLevel = 0;
|
|
264
|
-
if (emit && stateChanged) {
|
|
265
|
-
this.emit('audioStateChange', {
|
|
266
|
-
isAudioMuted: this._isAudioMuted,
|
|
267
|
-
isAudioEnabled: this._isAudioEnabled,
|
|
268
|
-
audioLevel: this._audioLevel,
|
|
269
|
-
});
|
|
270
|
-
}
|
|
271
|
-
return;
|
|
272
|
-
}
|
|
273
|
-
const localParticipant = this.room.localParticipant;
|
|
274
|
-
const isMicEnabled = localParticipant?.isMicrophoneEnabled ?? false;
|
|
275
|
-
const nextMutedState = !isMicEnabled;
|
|
276
|
-
const stateChanged = this._isAudioEnabled !== isMicEnabled ||
|
|
277
|
-
this._isAudioMuted !== nextMutedState;
|
|
278
|
-
this._isAudioEnabled = isMicEnabled;
|
|
279
|
-
this._isAudioMuted = nextMutedState;
|
|
280
|
-
if (emit && stateChanged) {
|
|
281
|
-
this.emit('audioStateChange', {
|
|
282
|
-
isAudioMuted: this._isAudioMuted,
|
|
283
|
-
isAudioEnabled: this._isAudioEnabled,
|
|
284
|
-
audioLevel: this._audioLevel,
|
|
285
|
-
});
|
|
286
|
-
}
|
|
287
|
-
}
|
|
288
|
-
/**
|
|
289
|
-
* Update audio state from room
|
|
290
|
-
*/
|
|
291
|
-
updateState() {
|
|
292
|
-
this.syncStateFromRoom();
|
|
293
|
-
}
|
|
294
|
-
/**
|
|
295
|
-
* Enable audio
|
|
296
|
-
*/
|
|
297
|
-
async enableAudio() {
|
|
298
|
-
if (!this.room) {
|
|
299
|
-
throw new Error('Room not initialized');
|
|
300
|
-
}
|
|
301
|
-
try {
|
|
302
|
-
await this.room.localParticipant.setMicrophoneEnabled(true);
|
|
303
|
-
this._isAudioEnabled = true;
|
|
304
|
-
this._isAudioMuted = false;
|
|
305
|
-
this.emit('audioStateChange', {
|
|
306
|
-
isAudioMuted: false,
|
|
307
|
-
isAudioEnabled: true,
|
|
308
|
-
audioLevel: this._audioLevel,
|
|
309
|
-
});
|
|
310
|
-
}
|
|
311
|
-
catch (error) {
|
|
312
|
-
logger.error("Failed to enable audio:", error);
|
|
313
|
-
throw error;
|
|
314
|
-
}
|
|
315
|
-
}
|
|
316
|
-
/**
|
|
317
|
-
* Disable audio and completely release microphone
|
|
318
|
-
*/
|
|
319
|
-
async disableAudio() {
|
|
320
|
-
if (!this.room) {
|
|
321
|
-
throw new Error('Room not initialized');
|
|
322
|
-
}
|
|
323
|
-
try {
|
|
324
|
-
// Get the microphone track before disabling
|
|
325
|
-
const micTrack = Array.from(this.room.localParticipant.audioTrackPublications.values()).find((publication) => publication.source === 'microphone' && publication.track)?.track;
|
|
326
|
-
// Disable microphone (unpublishes the track)
|
|
327
|
-
await this.room.localParticipant.setMicrophoneEnabled(false);
|
|
328
|
-
// Explicitly stop the underlying MediaStreamTrack to release hardware
|
|
329
|
-
// This removes the browser recording indicator
|
|
330
|
-
if (micTrack) {
|
|
331
|
-
micTrack.stop();
|
|
332
|
-
}
|
|
333
|
-
this._isAudioEnabled = false;
|
|
334
|
-
this._isAudioMuted = true;
|
|
335
|
-
this.emit('audioStateChange', {
|
|
336
|
-
isAudioMuted: true,
|
|
337
|
-
isAudioEnabled: false,
|
|
338
|
-
audioLevel: 0,
|
|
339
|
-
});
|
|
340
|
-
}
|
|
341
|
-
catch (error) {
|
|
342
|
-
logger.error("Failed to disable audio:", error);
|
|
343
|
-
throw error;
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
/**
|
|
347
|
-
* Mute audio
|
|
348
|
-
*/
|
|
349
|
-
async muteAudio() {
|
|
350
|
-
await this.disableAudio();
|
|
351
|
-
}
|
|
352
|
-
/**
|
|
353
|
-
* Unmute audio
|
|
354
|
-
*/
|
|
355
|
-
async unmuteAudio() {
|
|
356
|
-
await this.enableAudio();
|
|
357
|
-
}
|
|
358
|
-
/**
|
|
359
|
-
* Toggle audio
|
|
360
|
-
*/
|
|
361
|
-
async toggleAudio() {
|
|
362
|
-
if (this._isAudioMuted) {
|
|
363
|
-
await this.unmuteAudio();
|
|
364
|
-
}
|
|
365
|
-
else {
|
|
366
|
-
await this.muteAudio();
|
|
367
|
-
}
|
|
368
|
-
}
|
|
369
|
-
/**
|
|
370
|
-
* Set audio device
|
|
371
|
-
*/
|
|
372
|
-
async setAudioDevice(deviceId) {
|
|
373
|
-
if (!this.room) {
|
|
374
|
-
throw new Error('Room not initialized');
|
|
375
|
-
}
|
|
376
|
-
try {
|
|
377
|
-
await this.room.localParticipant.setMicrophoneEnabled(true, { deviceId });
|
|
378
|
-
}
|
|
379
|
-
catch (error) {
|
|
380
|
-
logger.error("Failed to set audio device:", error);
|
|
381
|
-
throw error;
|
|
382
|
-
}
|
|
383
|
-
}
|
|
384
|
-
/**
|
|
385
|
-
* Get available audio devices
|
|
386
|
-
*/
|
|
387
|
-
async getAudioDevices() {
|
|
388
|
-
try {
|
|
389
|
-
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
390
|
-
const audioDevices = devices.filter(device => device.kind === 'audioinput');
|
|
391
|
-
return audioDevices;
|
|
392
|
-
}
|
|
393
|
-
catch (error) {
|
|
394
|
-
logger.error("Failed to get audio devices:", error);
|
|
395
|
-
throw error;
|
|
396
|
-
}
|
|
397
|
-
}
|
|
398
|
-
/**
|
|
399
|
-
* Start audio level monitoring
|
|
400
|
-
*/
|
|
401
|
-
startAudioLevelMonitoring() {
|
|
402
|
-
if (!this.room) {
|
|
403
|
-
return;
|
|
404
|
-
}
|
|
405
|
-
// TODO: Implement audio level monitoring when LiveKit provides the API
|
|
406
|
-
}
|
|
407
|
-
/**
|
|
408
|
-
* Stop audio level monitoring
|
|
409
|
-
*/
|
|
410
|
-
stopAudioLevelMonitoring() {
|
|
411
|
-
this._audioLevel = 0;
|
|
412
|
-
}
|
|
413
|
-
/**
|
|
414
|
-
* Reset state on disconnect
|
|
415
|
-
*/
|
|
416
|
-
reset() {
|
|
417
|
-
this._isAudioEnabled = false;
|
|
418
|
-
this._isAudioMuted = false;
|
|
419
|
-
this._audioLevel = 0;
|
|
420
|
-
this.emit('audioStateChange', {
|
|
421
|
-
isAudioMuted: false,
|
|
422
|
-
isAudioEnabled: false,
|
|
423
|
-
audioLevel: 0
|
|
424
|
-
});
|
|
425
|
-
}
|
|
426
|
-
}
|
|
427
|
-
|
|
428
|
-
/**
|
|
429
|
-
* Manages video controls for LiveKit room
|
|
430
|
-
* Provides methods to enable/disable camera, manage video devices,
|
|
431
|
-
* and control video state.
|
|
432
|
-
*/
|
|
433
|
-
class VideoManager extends EventEmitter {
|
|
434
|
-
constructor(room) {
|
|
435
|
-
super();
|
|
436
|
-
this.room = null;
|
|
437
|
-
this._isVideoEnabled = false;
|
|
438
|
-
this._isVideoHidden = false;
|
|
439
|
-
if (room) {
|
|
440
|
-
this.setRoom(room);
|
|
441
|
-
}
|
|
442
|
-
}
|
|
443
|
-
get isVideoEnabled() {
|
|
444
|
-
return this._isVideoEnabled;
|
|
445
|
-
}
|
|
446
|
-
get isVideoHidden() {
|
|
447
|
-
return this._isVideoHidden;
|
|
448
|
-
}
|
|
449
|
-
/**
|
|
450
|
-
* Set the LiveKit room instance
|
|
451
|
-
*/
|
|
452
|
-
setRoom(room) {
|
|
453
|
-
// Clean up previous room listeners
|
|
454
|
-
if (this.room) {
|
|
455
|
-
this.cleanupListeners();
|
|
456
|
-
}
|
|
457
|
-
this.room = room;
|
|
458
|
-
if (room && room.state !== 'disconnected') {
|
|
459
|
-
this.setupListeners();
|
|
460
|
-
this.updateState();
|
|
461
|
-
}
|
|
462
|
-
}
|
|
463
|
-
/**
|
|
464
|
-
* Setup event listeners for room
|
|
465
|
-
*/
|
|
466
|
-
setupListeners() {
|
|
467
|
-
if (!this.room)
|
|
468
|
-
return;
|
|
469
|
-
const localParticipant = this.room.localParticipant;
|
|
470
|
-
const handleTrackMuted = (track) => {
|
|
471
|
-
if (track.source === 'camera') {
|
|
472
|
-
this._isVideoHidden = true;
|
|
473
|
-
this.emit('videoStateChange', { isVideoHidden: true });
|
|
474
|
-
}
|
|
475
|
-
};
|
|
476
|
-
const handleTrackUnmuted = (track) => {
|
|
477
|
-
if (track.source === 'camera') {
|
|
478
|
-
this._isVideoHidden = false;
|
|
479
|
-
this.emit('videoStateChange', { isVideoHidden: false });
|
|
480
|
-
}
|
|
481
|
-
};
|
|
482
|
-
const handleTrackPublished = (publication) => {
|
|
483
|
-
if (publication.source === 'camera') {
|
|
484
|
-
this._isVideoEnabled = true;
|
|
485
|
-
this.emit('videoStateChange', { isVideoEnabled: true });
|
|
486
|
-
}
|
|
487
|
-
};
|
|
488
|
-
const handleTrackUnpublished = (publication) => {
|
|
489
|
-
if (publication.source === 'camera') {
|
|
490
|
-
this._isVideoEnabled = false;
|
|
491
|
-
this.emit('videoStateChange', { isVideoEnabled: false });
|
|
492
|
-
}
|
|
493
|
-
};
|
|
494
|
-
localParticipant.on('trackMuted', handleTrackMuted);
|
|
495
|
-
localParticipant.on('trackUnmuted', handleTrackUnmuted);
|
|
496
|
-
localParticipant.on('trackPublished', handleTrackPublished);
|
|
497
|
-
localParticipant.on('trackUnpublished', handleTrackUnpublished);
|
|
498
|
-
}
|
|
499
|
-
/**
|
|
500
|
-
* Clean up event listeners
|
|
501
|
-
*/
|
|
502
|
-
cleanupListeners() {
|
|
503
|
-
if (!this.room)
|
|
504
|
-
return;
|
|
505
|
-
// LiveKit handles cleanup automatically when participant is removed
|
|
506
|
-
}
|
|
507
|
-
/**
|
|
508
|
-
* Update video state from room
|
|
509
|
-
*/
|
|
510
|
-
updateState() {
|
|
511
|
-
if (!this.room || this.room.state === 'disconnected') {
|
|
512
|
-
this._isVideoEnabled = false;
|
|
513
|
-
this._isVideoHidden = false;
|
|
514
|
-
return;
|
|
515
|
-
}
|
|
516
|
-
const localParticipant = this.room.localParticipant;
|
|
517
|
-
this._isVideoEnabled = localParticipant.isCameraEnabled;
|
|
518
|
-
this._isVideoHidden = false; // LiveKit doesn't have a direct hidden state for video
|
|
519
|
-
}
|
|
520
|
-
/**
|
|
521
|
-
* Enable video
|
|
522
|
-
*/
|
|
523
|
-
async enableVideo() {
|
|
524
|
-
if (!this.room) {
|
|
525
|
-
throw new Error('Room not initialized');
|
|
526
|
-
}
|
|
527
|
-
try {
|
|
528
|
-
await this.room.localParticipant.setCameraEnabled(true);
|
|
529
|
-
this._isVideoEnabled = true;
|
|
530
|
-
this.emit('videoStateChange', { isVideoEnabled: true });
|
|
531
|
-
}
|
|
532
|
-
catch (error) {
|
|
533
|
-
logger.error("Failed to enable video:", error);
|
|
534
|
-
throw error;
|
|
535
|
-
}
|
|
536
|
-
}
|
|
537
|
-
/**
|
|
538
|
-
* Disable video
|
|
539
|
-
*/
|
|
540
|
-
async disableVideo() {
|
|
541
|
-
if (!this.room) {
|
|
542
|
-
throw new Error('Room not initialized');
|
|
543
|
-
}
|
|
544
|
-
try {
|
|
545
|
-
await this.room.localParticipant.setCameraEnabled(false);
|
|
546
|
-
this._isVideoEnabled = false;
|
|
547
|
-
this.emit('videoStateChange', { isVideoEnabled: false });
|
|
548
|
-
}
|
|
549
|
-
catch (error) {
|
|
550
|
-
logger.error("Failed to disable video:", error);
|
|
551
|
-
throw error;
|
|
552
|
-
}
|
|
553
|
-
}
|
|
554
|
-
/**
|
|
555
|
-
* Hide video
|
|
556
|
-
*/
|
|
557
|
-
async hideVideo() {
|
|
558
|
-
await this.disableVideo();
|
|
559
|
-
}
|
|
560
|
-
/**
|
|
561
|
-
* Show video
|
|
562
|
-
*/
|
|
563
|
-
async showVideo() {
|
|
564
|
-
await this.enableVideo();
|
|
565
|
-
}
|
|
566
|
-
/**
|
|
567
|
-
* Toggle video
|
|
568
|
-
*/
|
|
569
|
-
async toggleVideo() {
|
|
570
|
-
if (this._isVideoEnabled) {
|
|
571
|
-
await this.disableVideo();
|
|
572
|
-
}
|
|
573
|
-
else {
|
|
574
|
-
await this.enableVideo();
|
|
575
|
-
}
|
|
576
|
-
}
|
|
577
|
-
/**
|
|
578
|
-
* Set video device
|
|
579
|
-
*/
|
|
580
|
-
async setVideoDevice(deviceId) {
|
|
581
|
-
if (!this.room) {
|
|
582
|
-
throw new Error('Room not initialized');
|
|
583
|
-
}
|
|
584
|
-
try {
|
|
585
|
-
await this.room.localParticipant.setCameraEnabled(true, { deviceId });
|
|
586
|
-
}
|
|
587
|
-
catch (error) {
|
|
588
|
-
logger.error("Failed to set video device:", error);
|
|
589
|
-
throw error;
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
/**
|
|
593
|
-
* Get available video devices
|
|
594
|
-
*/
|
|
595
|
-
async getVideoDevices() {
|
|
596
|
-
try {
|
|
597
|
-
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
598
|
-
const videoDevices = devices.filter(device => device.kind === 'videoinput');
|
|
599
|
-
return videoDevices;
|
|
600
|
-
}
|
|
601
|
-
catch (error) {
|
|
602
|
-
logger.error("Failed to get video devices:", error);
|
|
603
|
-
throw error;
|
|
604
|
-
}
|
|
605
|
-
}
|
|
606
|
-
/**
|
|
607
|
-
* Set video quality
|
|
608
|
-
*/
|
|
609
|
-
async setVideoQuality(quality) {
|
|
610
|
-
if (!this.room) {
|
|
611
|
-
throw new Error('Room not initialized');
|
|
612
|
-
}
|
|
613
|
-
try {
|
|
614
|
-
// TODO: Implement video quality settings when LiveKit provides the API
|
|
615
|
-
logger.warn('Video quality setting not yet implemented');
|
|
616
|
-
}
|
|
617
|
-
catch (error) {
|
|
618
|
-
logger.error("Failed to set video quality:", error);
|
|
619
|
-
throw error;
|
|
620
|
-
}
|
|
621
|
-
}
|
|
622
|
-
/**
|
|
623
|
-
* Reset state on disconnect
|
|
624
|
-
*/
|
|
625
|
-
reset() {
|
|
626
|
-
this._isVideoEnabled = false;
|
|
627
|
-
this._isVideoHidden = false;
|
|
628
|
-
this.emit('videoStateChange', {
|
|
629
|
-
isVideoEnabled: false,
|
|
630
|
-
isVideoHidden: false
|
|
631
|
-
});
|
|
632
|
-
}
|
|
633
|
-
}
|
|
634
|
-
|
|
635
|
-
/**
|
|
636
|
-
* Manages screen sharing controls for LiveKit room
|
|
637
|
-
* Provides methods to enable/disable screen sharing, manage screen share state,
|
|
638
|
-
* and handle screen sharing with audio support.
|
|
639
|
-
*/
|
|
640
|
-
class ScreenShareManager extends EventEmitter {
|
|
641
|
-
constructor(room) {
|
|
642
|
-
super();
|
|
643
|
-
this.room = null;
|
|
644
|
-
this._isScreenShareEnabled = false;
|
|
645
|
-
this._isScreenShareActive = false;
|
|
646
|
-
if (room) {
|
|
647
|
-
this.setRoom(room);
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
get isScreenShareEnabled() {
|
|
651
|
-
return this._isScreenShareEnabled;
|
|
652
|
-
}
|
|
653
|
-
get isScreenShareActive() {
|
|
654
|
-
return this._isScreenShareActive;
|
|
655
|
-
}
|
|
656
|
-
/**
|
|
657
|
-
* Set the LiveKit room instance
|
|
658
|
-
*/
|
|
659
|
-
setRoom(room) {
|
|
660
|
-
// Clean up previous room listeners
|
|
661
|
-
if (this.room) {
|
|
662
|
-
this.cleanupListeners();
|
|
663
|
-
}
|
|
664
|
-
this.room = room;
|
|
665
|
-
if (room && room.state !== 'disconnected') {
|
|
666
|
-
this.setupListeners();
|
|
667
|
-
this.updateState();
|
|
668
|
-
}
|
|
669
|
-
}
|
|
670
|
-
/**
|
|
671
|
-
* Setup event listeners for room
|
|
672
|
-
*/
|
|
673
|
-
setupListeners() {
|
|
674
|
-
if (!this.room)
|
|
675
|
-
return;
|
|
676
|
-
const localParticipant = this.room.localParticipant;
|
|
677
|
-
const handleTrackPublished = (track) => {
|
|
678
|
-
if (track.source === 'screen_share') {
|
|
679
|
-
this._isScreenShareActive = true;
|
|
680
|
-
this.emit('screenShareStateChange', { isScreenShareActive: true });
|
|
681
|
-
}
|
|
682
|
-
};
|
|
683
|
-
const handleTrackUnpublished = (track) => {
|
|
684
|
-
if (track.source === 'screen_share') {
|
|
685
|
-
this._isScreenShareActive = false;
|
|
686
|
-
this.emit('screenShareStateChange', { isScreenShareActive: false });
|
|
687
|
-
}
|
|
688
|
-
};
|
|
689
|
-
const handleTrackMuted = (track) => {
|
|
690
|
-
if (track.source === 'screen_share') {
|
|
691
|
-
this._isScreenShareActive = false;
|
|
692
|
-
this.emit('screenShareStateChange', { isScreenShareActive: false });
|
|
693
|
-
}
|
|
694
|
-
};
|
|
695
|
-
const handleTrackUnmuted = (track) => {
|
|
696
|
-
if (track.source === 'screen_share') {
|
|
697
|
-
this._isScreenShareActive = true;
|
|
698
|
-
this.emit('screenShareStateChange', { isScreenShareActive: true });
|
|
699
|
-
}
|
|
700
|
-
};
|
|
701
|
-
localParticipant.on('trackPublished', handleTrackPublished);
|
|
702
|
-
localParticipant.on('trackUnpublished', handleTrackUnpublished);
|
|
703
|
-
localParticipant.on('trackMuted', handleTrackMuted);
|
|
704
|
-
localParticipant.on('trackUnmuted', handleTrackUnmuted);
|
|
705
|
-
}
|
|
706
|
-
/**
|
|
707
|
-
* Clean up event listeners
|
|
708
|
-
*/
|
|
709
|
-
cleanupListeners() {
|
|
710
|
-
if (!this.room)
|
|
711
|
-
return;
|
|
712
|
-
// LiveKit handles cleanup automatically when participant is removed
|
|
713
|
-
}
|
|
714
|
-
/**
|
|
715
|
-
* Update screen share state from room
|
|
716
|
-
*/
|
|
717
|
-
updateState() {
|
|
718
|
-
if (!this.room || this.room.state === 'disconnected') {
|
|
719
|
-
this._isScreenShareEnabled = false;
|
|
720
|
-
this._isScreenShareActive = false;
|
|
721
|
-
return;
|
|
722
|
-
}
|
|
723
|
-
const localParticipant = this.room.localParticipant;
|
|
724
|
-
this._isScreenShareEnabled = localParticipant.isScreenShareEnabled;
|
|
725
|
-
this._isScreenShareActive = localParticipant.isScreenShareEnabled;
|
|
726
|
-
}
|
|
727
|
-
/**
|
|
728
|
-
* Enable screen share
|
|
729
|
-
*/
|
|
730
|
-
async enableScreenShare() {
|
|
731
|
-
if (!this.room) {
|
|
732
|
-
throw new Error('Room not initialized');
|
|
733
|
-
}
|
|
734
|
-
try {
|
|
735
|
-
// Use setScreenShareEnabled with options to include current tab
|
|
736
|
-
await this.room.localParticipant.setScreenShareEnabled(true, {
|
|
737
|
-
// @ts-ignore - selfBrowserSurface is a valid Chrome option but may not be in types
|
|
738
|
-
selfBrowserSurface: "include",
|
|
739
|
-
surfaceSwitching: "include",
|
|
740
|
-
systemAudio: "include",
|
|
741
|
-
});
|
|
742
|
-
this._isScreenShareEnabled = true;
|
|
743
|
-
this._isScreenShareActive = true;
|
|
744
|
-
this.emit('screenShareStateChange', { isScreenShareEnabled: true, isScreenShareActive: true });
|
|
745
|
-
}
|
|
746
|
-
catch (error) {
|
|
747
|
-
logger.error("Failed to enable screen share:", error);
|
|
748
|
-
throw error;
|
|
749
|
-
}
|
|
750
|
-
}
|
|
751
|
-
/**
|
|
752
|
-
* Disable screen share
|
|
753
|
-
*/
|
|
754
|
-
async disableScreenShare() {
|
|
755
|
-
if (!this.room) {
|
|
756
|
-
throw new Error('Room not initialized');
|
|
757
|
-
}
|
|
758
|
-
try {
|
|
759
|
-
await this.room.localParticipant.setScreenShareEnabled(false);
|
|
760
|
-
this._isScreenShareEnabled = false;
|
|
761
|
-
this._isScreenShareActive = false;
|
|
762
|
-
this.emit('screenShareStateChange', { isScreenShareEnabled: false, isScreenShareActive: false });
|
|
763
|
-
}
|
|
764
|
-
catch (error) {
|
|
765
|
-
logger.error("Failed to disable screen share:", error);
|
|
766
|
-
throw error;
|
|
767
|
-
}
|
|
768
|
-
}
|
|
769
|
-
/**
|
|
770
|
-
* Toggle screen share
|
|
771
|
-
*/
|
|
772
|
-
async toggleScreenShare() {
|
|
773
|
-
if (this._isScreenShareEnabled) {
|
|
774
|
-
await this.disableScreenShare();
|
|
775
|
-
}
|
|
776
|
-
else {
|
|
777
|
-
await this.enableScreenShare();
|
|
778
|
-
}
|
|
779
|
-
}
|
|
780
|
-
/**
|
|
781
|
-
* Enable screen share with audio
|
|
782
|
-
*/
|
|
783
|
-
async enableScreenShareWithAudio() {
|
|
784
|
-
if (!this.room) {
|
|
785
|
-
throw new Error('Room not initialized');
|
|
786
|
-
}
|
|
787
|
-
try {
|
|
788
|
-
// Create screen tracks with audio enabled and include current tab
|
|
789
|
-
const tracks = await this.room.localParticipant.createScreenTracks({
|
|
790
|
-
audio: true,
|
|
791
|
-
// @ts-ignore - selfBrowserSurface is a valid Chrome option but may not be in types
|
|
792
|
-
selfBrowserSurface: "include",
|
|
793
|
-
surfaceSwitching: "include",
|
|
794
|
-
systemAudio: "include",
|
|
795
|
-
});
|
|
796
|
-
// Publish each track
|
|
797
|
-
tracks.forEach((track) => {
|
|
798
|
-
this.room.localParticipant.publishTrack(track);
|
|
799
|
-
});
|
|
800
|
-
this._isScreenShareEnabled = true;
|
|
801
|
-
}
|
|
802
|
-
catch (error) {
|
|
803
|
-
logger.error("Failed to enable screen share with audio:", error);
|
|
804
|
-
throw error;
|
|
805
|
-
}
|
|
806
|
-
}
|
|
807
|
-
/**
|
|
808
|
-
* Get screen share tracks
|
|
809
|
-
*/
|
|
810
|
-
async getScreenShareTracks() {
|
|
811
|
-
if (!this.room) {
|
|
812
|
-
return [];
|
|
813
|
-
}
|
|
814
|
-
try {
|
|
815
|
-
const localParticipant = this.room.localParticipant;
|
|
816
|
-
const screenShareTracks = [];
|
|
817
|
-
// Get all published tracks
|
|
818
|
-
localParticipant.trackPublications.forEach((publication) => {
|
|
819
|
-
if (publication.source === 'screen_share' && publication.track) {
|
|
820
|
-
screenShareTracks.push(publication.track);
|
|
821
|
-
}
|
|
822
|
-
});
|
|
823
|
-
return screenShareTracks;
|
|
824
|
-
}
|
|
825
|
-
catch (error) {
|
|
826
|
-
logger.error("Failed to get screen share tracks:", error);
|
|
827
|
-
throw error;
|
|
828
|
-
}
|
|
829
|
-
}
|
|
830
|
-
/**
|
|
831
|
-
* Reset state on disconnect
|
|
832
|
-
*/
|
|
833
|
-
reset() {
|
|
834
|
-
this._isScreenShareEnabled = false;
|
|
835
|
-
this._isScreenShareActive = false;
|
|
836
|
-
this.emit('screenShareStateChange', {
|
|
837
|
-
isScreenShareEnabled: false,
|
|
838
|
-
isScreenShareActive: false
|
|
839
|
-
});
|
|
840
|
-
}
|
|
841
|
-
}
|
|
842
|
-
|
|
843
|
-
/**
|
|
844
|
-
* Handles incoming messages from Convai
|
|
845
|
-
* Processes various message types from the AI assistant and updates chat history
|
|
846
|
-
*/
|
|
847
|
-
class MessageHandler extends EventEmitter {
|
|
848
|
-
constructor(room, latencyMonitor) {
|
|
849
|
-
super();
|
|
850
|
-
this.room = null;
|
|
851
|
-
this.chatMessages = [];
|
|
852
|
-
this.userTranscription = "";
|
|
853
|
-
this.isBotResponding = false;
|
|
854
|
-
this.isSpeaking = false;
|
|
855
|
-
this.latencyMonitor = null;
|
|
856
|
-
if (latencyMonitor) {
|
|
857
|
-
this.latencyMonitor = latencyMonitor;
|
|
858
|
-
}
|
|
859
|
-
if (room) {
|
|
860
|
-
this.setRoom(room);
|
|
861
|
-
}
|
|
862
|
-
}
|
|
863
|
-
/**
|
|
864
|
-
* Set the latency monitor
|
|
865
|
-
*/
|
|
866
|
-
setLatencyMonitor(monitor) {
|
|
867
|
-
this.latencyMonitor = monitor;
|
|
868
|
-
}
|
|
869
|
-
/**
|
|
870
|
-
* Get current chat messages
|
|
871
|
-
*/
|
|
872
|
-
getChatMessages() {
|
|
873
|
-
return this.chatMessages;
|
|
874
|
-
}
|
|
875
|
-
/**
|
|
876
|
-
* Get current user transcription
|
|
877
|
-
*/
|
|
878
|
-
getUserTranscription() {
|
|
879
|
-
return this.userTranscription;
|
|
880
|
-
}
|
|
881
|
-
/**
|
|
882
|
-
* Get bot responding state
|
|
883
|
-
*/
|
|
884
|
-
getIsBotResponding() {
|
|
885
|
-
return this.isBotResponding;
|
|
886
|
-
}
|
|
887
|
-
/**
|
|
888
|
-
* Get speaking state
|
|
889
|
-
*/
|
|
890
|
-
getIsSpeaking() {
|
|
891
|
-
return this.isSpeaking;
|
|
892
|
-
}
|
|
893
|
-
/**
|
|
894
|
-
* Set the LiveKit room instance
|
|
895
|
-
*/
|
|
896
|
-
setRoom(room) {
|
|
897
|
-
// Clean up previous room listeners
|
|
898
|
-
if (this.room) {
|
|
899
|
-
this.cleanupListeners();
|
|
900
|
-
}
|
|
901
|
-
this.room = room;
|
|
902
|
-
this.setupListeners();
|
|
903
|
-
}
|
|
904
|
-
/**
|
|
905
|
-
* Setup message listeners
|
|
906
|
-
*/
|
|
907
|
-
setupListeners() {
|
|
908
|
-
if (!this.room)
|
|
909
|
-
return;
|
|
910
|
-
this.room.on(livekitClient.RoomEvent.DataReceived, this.handleDataReceived.bind(this));
|
|
911
|
-
}
|
|
912
|
-
/**
|
|
913
|
-
* Clean up message listeners
|
|
914
|
-
*/
|
|
915
|
-
cleanupListeners() {
|
|
916
|
-
if (!this.room)
|
|
917
|
-
return;
|
|
918
|
-
this.room.off(livekitClient.RoomEvent.DataReceived, this.handleDataReceived.bind(this));
|
|
919
|
-
}
|
|
920
|
-
/**
|
|
921
|
-
* Handle incoming data message
|
|
922
|
-
*/
|
|
923
|
-
handleDataReceived(payload, participant) {
|
|
924
|
-
try {
|
|
925
|
-
// Decode bytes to string
|
|
926
|
-
const decoder = new TextDecoder();
|
|
927
|
-
const messageString = decoder.decode(payload);
|
|
928
|
-
// Parse JSON
|
|
929
|
-
const messageData = JSON.parse(messageString);
|
|
930
|
-
// Extract and categorize messages for chat display
|
|
931
|
-
const timestamp = new Date().toISOString();
|
|
932
|
-
const messageId = `${messageData.type}-${Date.now()}-${Math.random()}`;
|
|
933
|
-
// Handle different message types
|
|
934
|
-
switch (messageData.type) {
|
|
935
|
-
// User text messages - Skip these, only allow user-transcription
|
|
936
|
-
case "user_text_message":
|
|
937
|
-
case "user-llm-text":
|
|
938
|
-
case "text-input":
|
|
939
|
-
case "user-input":
|
|
940
|
-
case "text-message":
|
|
941
|
-
case "chat-message":
|
|
942
|
-
case "input-text":
|
|
943
|
-
case "message":
|
|
944
|
-
// Skip user text messages - only allow user-transcription
|
|
945
|
-
break;
|
|
946
|
-
// Bot Ready - Bot is ready, connection should be true
|
|
947
|
-
case "bot-ready":
|
|
948
|
-
this.emit("botReady");
|
|
949
|
-
break;
|
|
950
|
-
// Bot LLM Started - Begin streaming response
|
|
951
|
-
case "bot-llm-started":
|
|
952
|
-
this.isBotResponding = true;
|
|
953
|
-
this.emit("botRespondingChange", true);
|
|
954
|
-
// Mark any accumulated user message as final before starting bot response
|
|
955
|
-
const lastMsg = this.chatMessages[this.chatMessages.length - 1];
|
|
956
|
-
if (lastMsg &&
|
|
957
|
-
lastMsg.type === "user-transcription" &&
|
|
958
|
-
lastMsg.isFinal) {
|
|
959
|
-
this.chatMessages[this.chatMessages.length - 1] = {
|
|
960
|
-
...lastMsg,
|
|
961
|
-
isFinal: false,
|
|
962
|
-
};
|
|
963
|
-
}
|
|
964
|
-
// Add initial streaming message to chat
|
|
965
|
-
const streamingMessage = {
|
|
966
|
-
id: messageId,
|
|
967
|
-
type: "bot-llm-text",
|
|
968
|
-
content: "",
|
|
969
|
-
timestamp: timestamp,
|
|
970
|
-
isFinal: true,
|
|
971
|
-
};
|
|
972
|
-
this.chatMessages.push(streamingMessage);
|
|
973
|
-
this.emit("messagesChange", this.chatMessages);
|
|
974
|
-
break;
|
|
975
|
-
// Bot LLM Text Messages - Streaming chunks
|
|
976
|
-
case "bot-llm-text":
|
|
977
|
-
if (messageData.data?.text) {
|
|
978
|
-
const newChunk = messageData.data.text;
|
|
979
|
-
// Update the streaming message in chat
|
|
980
|
-
const lastMessage = this.chatMessages[this.chatMessages.length - 1];
|
|
981
|
-
if (lastMessage && lastMessage.isFinal) {
|
|
982
|
-
this.chatMessages[this.chatMessages.length - 1] = {
|
|
983
|
-
...lastMessage,
|
|
984
|
-
content: lastMessage.content + newChunk,
|
|
985
|
-
};
|
|
986
|
-
}
|
|
987
|
-
else {
|
|
988
|
-
// If no streaming message exists yet, create one
|
|
989
|
-
const streamingMsg = {
|
|
990
|
-
id: `${messageData.type}-${Date.now()}-${Math.random()}`,
|
|
991
|
-
type: "bot-llm-text",
|
|
992
|
-
content: newChunk,
|
|
993
|
-
timestamp: new Date().toISOString(),
|
|
994
|
-
isFinal: true,
|
|
995
|
-
};
|
|
996
|
-
this.chatMessages.push(streamingMsg);
|
|
997
|
-
}
|
|
998
|
-
this.emit("messagesChange", this.chatMessages);
|
|
999
|
-
}
|
|
1000
|
-
break;
|
|
1001
|
-
// Bot LLM Stopped - Complete streaming response
|
|
1002
|
-
case "bot-llm-stopped":
|
|
1003
|
-
const lastBotMsg = this.chatMessages[this.chatMessages.length - 1];
|
|
1004
|
-
if (lastBotMsg && lastBotMsg.isFinal) {
|
|
1005
|
-
this.chatMessages[this.chatMessages.length - 1] = {
|
|
1006
|
-
...lastBotMsg,
|
|
1007
|
-
isFinal: false,
|
|
1008
|
-
};
|
|
1009
|
-
}
|
|
1010
|
-
// Clear streaming state
|
|
1011
|
-
this.isBotResponding = false;
|
|
1012
|
-
this.emit("botRespondingChange", false);
|
|
1013
|
-
this.emit("messagesChange", this.chatMessages);
|
|
1014
|
-
break;
|
|
1015
|
-
// User Transcription Messages
|
|
1016
|
-
case "user-transcription":
|
|
1017
|
-
if (messageData.data?.text) {
|
|
1018
|
-
const messageContent = messageData.data.text;
|
|
1019
|
-
if (messageData.data?.final) {
|
|
1020
|
-
// Final transcription - accumulate in single bubble until bot responds
|
|
1021
|
-
const lastUserMsg = this.chatMessages[this.chatMessages.length - 1];
|
|
1022
|
-
// Start latency measurement for voice when user finishes speaking
|
|
1023
|
-
if (this.latencyMonitor) {
|
|
1024
|
-
this.latencyMonitor.startMeasurement("voice", messageContent);
|
|
1025
|
-
}
|
|
1026
|
-
// Check if last message is an accumulating user message (isFinal=true)
|
|
1027
|
-
if (lastUserMsg &&
|
|
1028
|
-
lastUserMsg.type === "user-transcription" &&
|
|
1029
|
-
lastUserMsg.isFinal === true) {
|
|
1030
|
-
// Accumulate: append new content to existing message with space
|
|
1031
|
-
const accumulatedContent = lastUserMsg.content.trim() + " " + messageContent.trim();
|
|
1032
|
-
this.chatMessages[this.chatMessages.length - 1] = {
|
|
1033
|
-
...lastUserMsg,
|
|
1034
|
-
content: accumulatedContent,
|
|
1035
|
-
timestamp: timestamp,
|
|
1036
|
-
};
|
|
1037
|
-
}
|
|
1038
|
-
else {
|
|
1039
|
-
// Start new accumulating message (isFinal=true means "still accumulating")
|
|
1040
|
-
const userMessage = {
|
|
1041
|
-
id: messageId,
|
|
1042
|
-
type: "user-transcription",
|
|
1043
|
-
content: messageContent,
|
|
1044
|
-
timestamp: timestamp,
|
|
1045
|
-
isFinal: true,
|
|
1046
|
-
};
|
|
1047
|
-
this.chatMessages.push(userMessage);
|
|
1048
|
-
}
|
|
1049
|
-
this.emit("messagesChange", this.chatMessages);
|
|
1050
|
-
// Clear live transcription when final
|
|
1051
|
-
this.userTranscription = "";
|
|
1052
|
-
this.emit("userTranscriptionChange", "");
|
|
1053
|
-
}
|
|
1054
|
-
else {
|
|
1055
|
-
// Non-final transcription - show in input field
|
|
1056
|
-
this.userTranscription = messageContent;
|
|
1057
|
-
this.emit("userTranscriptionChange", messageContent);
|
|
1058
|
-
}
|
|
1059
|
-
}
|
|
1060
|
-
break;
|
|
1061
|
-
// Bot Emotion Messages
|
|
1062
|
-
case "bot-emotion":
|
|
1063
|
-
if (messageData.data?.emotion) {
|
|
1064
|
-
const emotionMessage = {
|
|
1065
|
-
id: messageId,
|
|
1066
|
-
type: "bot-emotion",
|
|
1067
|
-
content: `${messageData.data.emotion} (scale: ${messageData.data.scale || 1})`,
|
|
1068
|
-
timestamp,
|
|
1069
|
-
};
|
|
1070
|
-
this.chatMessages.push(emotionMessage);
|
|
1071
|
-
console.log("BotEmotions", emotionMessage);
|
|
1072
|
-
this.emit("messagesChange", this.chatMessages);
|
|
1073
|
-
}
|
|
1074
|
-
break;
|
|
1075
|
-
// Bot Started Speaking
|
|
1076
|
-
case "bot-started-speaking":
|
|
1077
|
-
this.isSpeaking = true;
|
|
1078
|
-
this.emit("speakingChange", true);
|
|
1079
|
-
break;
|
|
1080
|
-
// Bot Stopped Speaking
|
|
1081
|
-
case "bot-stopped-speaking":
|
|
1082
|
-
this.isSpeaking = false;
|
|
1083
|
-
this.emit("speakingChange", false);
|
|
1084
|
-
break;
|
|
1085
|
-
// Action Response Messages
|
|
1086
|
-
case "action-response":
|
|
1087
|
-
if (messageData.data?.actions) {
|
|
1088
|
-
const actionMessage = {
|
|
1089
|
-
id: messageId,
|
|
1090
|
-
type: "action",
|
|
1091
|
-
content: `Actions: ${messageData.data.actions.join(", ")}`,
|
|
1092
|
-
timestamp,
|
|
1093
|
-
};
|
|
1094
|
-
this.chatMessages.push(actionMessage);
|
|
1095
|
-
this.emit("messagesChange", this.chatMessages);
|
|
1096
|
-
}
|
|
1097
|
-
break;
|
|
1098
|
-
// Behavior Tree Response Messages
|
|
1099
|
-
case "behavior-tree-response":
|
|
1100
|
-
if (messageData.data?.narrative_section_id) {
|
|
1101
|
-
const behaviorMessage = {
|
|
1102
|
-
id: messageId,
|
|
1103
|
-
type: "behavior-tree",
|
|
1104
|
-
content: `Narrative Section: ${messageData.data.narrative_section_id}`,
|
|
1105
|
-
timestamp,
|
|
1106
|
-
};
|
|
1107
|
-
this.chatMessages.push(behaviorMessage);
|
|
1108
|
-
this.emit("messagesChange", this.chatMessages);
|
|
1109
|
-
}
|
|
1110
|
-
break;
|
|
1111
|
-
// Server Message - Contains various data types like blendshapes
|
|
1112
|
-
case "server-message":
|
|
1113
|
-
if (messageData.data) {
|
|
1114
|
-
const serverData = messageData.data;
|
|
1115
|
-
// Check if this is blendshape data
|
|
1116
|
-
if (serverData.type === "chunked-neurosync-blendshapes" &&
|
|
1117
|
-
serverData.blendshapes) {
|
|
1118
|
-
// Emit event for external listeners
|
|
1119
|
-
this.emit("blendshapes", serverData);
|
|
1120
|
-
}
|
|
1121
|
-
else {
|
|
1122
|
-
// Log other server message types
|
|
1123
|
-
logger.info("[MessageHandler] Server message:", {
|
|
1124
|
-
type: serverData.type,
|
|
1125
|
-
dataKeys: Object.keys(serverData),
|
|
1126
|
-
});
|
|
1127
|
-
}
|
|
1128
|
-
}
|
|
1129
|
-
break;
|
|
1130
|
-
// Silently ignore other message types
|
|
1131
|
-
case "moderation-response":
|
|
1132
|
-
case "trigger-message":
|
|
1133
|
-
case "update-template-keys":
|
|
1134
|
-
case "update-dynamic-info":
|
|
1135
|
-
case "interrupt-bot":
|
|
1136
|
-
break;
|
|
1137
|
-
}
|
|
1138
|
-
}
|
|
1139
|
-
catch (error) {
|
|
1140
|
-
logger.error("Failed to parse data message:", error);
|
|
1141
|
-
}
|
|
1142
|
-
}
|
|
1143
|
-
/**
|
|
1144
|
-
* Reset message state
|
|
1145
|
-
*/
|
|
1146
|
-
reset() {
|
|
1147
|
-
this.chatMessages = [];
|
|
1148
|
-
this.userTranscription = "";
|
|
1149
|
-
this.isBotResponding = false;
|
|
1150
|
-
this.isSpeaking = false;
|
|
1151
|
-
this.emit("messagesChange", this.chatMessages);
|
|
1152
|
-
this.emit("userTranscriptionChange", "");
|
|
1153
|
-
this.emit("botRespondingChange", false);
|
|
1154
|
-
this.emit("speakingChange", false);
|
|
1155
|
-
}
|
|
1156
|
-
}
|
|
1157
|
-
|
|
1158
|
-
/**
|
|
1159
|
-
* Resolves the speaker ID based on the end user ID.
|
|
1160
|
-
*
|
|
1161
|
-
* - If endUserId is not provided (anonymous mode), returns null (no speaker ID sent to API)
|
|
1162
|
-
* - If endUserId is provided, calls Convai API to get or create a speaker ID for the user
|
|
1163
|
-
*
|
|
1164
|
-
* @param endUserId - The end user identifier (optional). If not provided, anonymous mode is used.
|
|
1165
|
-
* @param apiKey - Convai API key for authentication
|
|
1166
|
-
* @returns Promise resolving to the speaker ID or null if no endUserId provided
|
|
1167
|
-
*/
|
|
1168
|
-
async function resolveSpeakerId(endUserId, apiKey) {
|
|
1169
|
-
// Anonymous mode: No speaker ID (no persistent memory or analytics)
|
|
1170
|
-
if (!endUserId) {
|
|
1171
|
-
logger.info('Anonymous mode: No endUserId provided, speaker ID will not be sent');
|
|
1172
|
-
return null;
|
|
1173
|
-
}
|
|
1174
|
-
// User tracking mode: Get or create speaker ID via Convai API (enables memory and analytics)
|
|
1175
|
-
try {
|
|
1176
|
-
const speakerId = await getOrCreateSpeakerId(endUserId, apiKey);
|
|
1177
|
-
logger.info(`Resolved speaker ID: ${speakerId.substring(0, 8)}...`);
|
|
1178
|
-
return speakerId;
|
|
1179
|
-
}
|
|
1180
|
-
catch (error) {
|
|
1181
|
-
logger.error('Failed to resolve speaker ID:', error);
|
|
1182
|
-
throw error;
|
|
1183
|
-
}
|
|
1184
|
-
}
|
|
1185
|
-
/**
|
|
1186
|
-
* Gets or creates a speaker ID from Convai's speaker management API.
|
|
1187
|
-
* If the same endUserId is used multiple times, the same speaker ID is returned (idempotent).
|
|
1188
|
-
*
|
|
1189
|
-
* @param endUserId - The device or user identifier
|
|
1190
|
-
* @param apiKey - Convai API key for authentication
|
|
1191
|
-
* @returns Promise resolving to the speaker ID
|
|
1192
|
-
* @throws Error if API request fails
|
|
1193
|
-
*/
|
|
1194
|
-
async function getOrCreateSpeakerId(endUserId, apiKey) {
|
|
1195
|
-
const url = 'https://api.convai.com/user/speaker/new';
|
|
1196
|
-
logger.info(`Requesting speaker ID for endUserId: ${endUserId.substring(0, 8)}...`);
|
|
1197
|
-
try {
|
|
1198
|
-
const response = await fetch(url, {
|
|
1199
|
-
method: 'POST',
|
|
1200
|
-
headers: {
|
|
1201
|
-
'Content-Type': 'application/json',
|
|
1202
|
-
'CONVAI-API-KEY': apiKey,
|
|
1203
|
-
},
|
|
1204
|
-
body: JSON.stringify({
|
|
1205
|
-
deviceId: endUserId,
|
|
1206
|
-
}),
|
|
1207
|
-
});
|
|
1208
|
-
if (!response.ok) {
|
|
1209
|
-
const errorText = await response.text();
|
|
1210
|
-
throw new Error(`Speaker API request failed: ${response.status} ${response.statusText} - ${errorText}`);
|
|
1211
|
-
}
|
|
1212
|
-
const data = await response.json();
|
|
1213
|
-
if (!data.speakerId) {
|
|
1214
|
-
throw new Error('Speaker API did not return a speaker ID');
|
|
1215
|
-
}
|
|
1216
|
-
return data.speakerId;
|
|
1217
|
-
}
|
|
1218
|
-
catch (error) {
|
|
1219
|
-
if (error instanceof Error) {
|
|
1220
|
-
throw new Error(`Failed to get speaker ID: ${error.message}`);
|
|
1221
|
-
}
|
|
1222
|
-
throw new Error('Failed to get speaker ID: Unknown error');
|
|
1223
|
-
}
|
|
1224
|
-
}
|
|
1225
|
-
|
|
1226
|
-
/**
|
|
1227
|
-
* LatencyMonitor - Tracks and manages latency measurements
|
|
1228
|
-
* This is for dev/debugging purposes only
|
|
1229
|
-
*/
|
|
1230
|
-
class LatencyMonitor extends EventEmitter {
|
|
1231
|
-
constructor(enabled = false) {
|
|
1232
|
-
super();
|
|
1233
|
-
this.measurements = [];
|
|
1234
|
-
this.pendingMeasurement = null;
|
|
1235
|
-
this.maxMeasurements = 100; // Keep last 100 measurements
|
|
1236
|
-
this.isEnabled = false;
|
|
1237
|
-
this.isEnabled = enabled;
|
|
1238
|
-
}
|
|
1239
|
-
/**
|
|
1240
|
-
* Enable latency monitoring
|
|
1241
|
-
*/
|
|
1242
|
-
enable() {
|
|
1243
|
-
this.isEnabled = true;
|
|
1244
|
-
this.emit("enabledChange", true);
|
|
1245
|
-
}
|
|
1246
|
-
/**
|
|
1247
|
-
* Disable latency monitoring
|
|
1248
|
-
*/
|
|
1249
|
-
disable() {
|
|
1250
|
-
this.isEnabled = false;
|
|
1251
|
-
this.emit("enabledChange", false);
|
|
1252
|
-
}
|
|
1253
|
-
/**
|
|
1254
|
-
* Check if monitoring is enabled
|
|
1255
|
-
*/
|
|
1256
|
-
get enabled() {
|
|
1257
|
-
return this.isEnabled;
|
|
1258
|
-
}
|
|
1259
|
-
/**
|
|
1260
|
-
* Start a new latency measurement
|
|
1261
|
-
*/
|
|
1262
|
-
startMeasurement(type, userMessage) {
|
|
1263
|
-
if (!this.isEnabled)
|
|
1264
|
-
return;
|
|
1265
|
-
const id = `${type}-${Date.now()}-${Math.random()}`;
|
|
1266
|
-
this.pendingMeasurement = {
|
|
1267
|
-
id,
|
|
1268
|
-
startTime: Date.now(),
|
|
1269
|
-
type,
|
|
1270
|
-
userMessage: userMessage?.substring(0, 50), // Keep first 50 chars
|
|
1271
|
-
};
|
|
1272
|
-
}
|
|
1273
|
-
/**
|
|
1274
|
-
* Complete the current measurement
|
|
1275
|
-
*/
|
|
1276
|
-
endMeasurement() {
|
|
1277
|
-
if (!this.isEnabled || !this.pendingMeasurement)
|
|
1278
|
-
return;
|
|
1279
|
-
const endTime = Date.now();
|
|
1280
|
-
const latency = endTime - this.pendingMeasurement.startTime;
|
|
1281
|
-
const measurement = {
|
|
1282
|
-
id: this.pendingMeasurement.id,
|
|
1283
|
-
startTime: this.pendingMeasurement.startTime,
|
|
1284
|
-
endTime,
|
|
1285
|
-
latency,
|
|
1286
|
-
type: this.pendingMeasurement.type,
|
|
1287
|
-
userMessage: this.pendingMeasurement.userMessage,
|
|
1288
|
-
};
|
|
1289
|
-
// Add to measurements array
|
|
1290
|
-
this.measurements.push(measurement);
|
|
1291
|
-
// Keep only the last N measurements
|
|
1292
|
-
if (this.measurements.length > this.maxMeasurements) {
|
|
1293
|
-
this.measurements.shift();
|
|
1294
|
-
}
|
|
1295
|
-
// Clear pending measurement
|
|
1296
|
-
this.pendingMeasurement = null;
|
|
1297
|
-
// Emit new measurement
|
|
1298
|
-
this.emit("measurement", measurement);
|
|
1299
|
-
this.emit("measurementsChange", this.measurements);
|
|
1300
|
-
}
|
|
1301
|
-
/**
|
|
1302
|
-
* Cancel the current pending measurement
|
|
1303
|
-
*/
|
|
1304
|
-
cancelMeasurement() {
|
|
1305
|
-
this.pendingMeasurement = null;
|
|
1306
|
-
}
|
|
1307
|
-
/**
|
|
1308
|
-
* Get all measurements
|
|
1309
|
-
*/
|
|
1310
|
-
getMeasurements() {
|
|
1311
|
-
return [...this.measurements];
|
|
1312
|
-
}
|
|
1313
|
-
/**
|
|
1314
|
-
* Get the latest measurement
|
|
1315
|
-
*/
|
|
1316
|
-
getLatestMeasurement() {
|
|
1317
|
-
return this.measurements.length > 0
|
|
1318
|
-
? this.measurements[this.measurements.length - 1]
|
|
1319
|
-
: null;
|
|
1320
|
-
}
|
|
1321
|
-
/**
|
|
1322
|
-
* Get latency statistics
|
|
1323
|
-
*/
|
|
1324
|
-
getStats() {
|
|
1325
|
-
if (this.measurements.length === 0)
|
|
1326
|
-
return null;
|
|
1327
|
-
const latencies = this.measurements.map((m) => m.latency).sort((a, b) => a - b);
|
|
1328
|
-
const sum = latencies.reduce((acc, val) => acc + val, 0);
|
|
1329
|
-
const p95Index = Math.floor(latencies.length * 0.95);
|
|
1330
|
-
return {
|
|
1331
|
-
average: sum / latencies.length,
|
|
1332
|
-
min: latencies[0],
|
|
1333
|
-
max: latencies[latencies.length - 1],
|
|
1334
|
-
median: latencies[Math.floor(latencies.length / 2)],
|
|
1335
|
-
p95: latencies[p95Index],
|
|
1336
|
-
count: latencies.length,
|
|
1337
|
-
};
|
|
1338
|
-
}
|
|
1339
|
-
/**
|
|
1340
|
-
* Clear all measurements
|
|
1341
|
-
*/
|
|
1342
|
-
clear() {
|
|
1343
|
-
this.measurements = [];
|
|
1344
|
-
this.pendingMeasurement = null;
|
|
1345
|
-
this.emit("measurementsChange", this.measurements);
|
|
1346
|
-
}
|
|
1347
|
-
/**
|
|
1348
|
-
* Check if there's a pending measurement
|
|
1349
|
-
*/
|
|
1350
|
-
get hasPendingMeasurement() {
|
|
1351
|
-
return this.pendingMeasurement !== null;
|
|
1352
|
-
}
|
|
1353
|
-
/**
|
|
1354
|
-
* Get pending measurement info
|
|
1355
|
-
*/
|
|
1356
|
-
getPendingMeasurement() {
|
|
1357
|
-
return this.pendingMeasurement ? { ...this.pendingMeasurement } : null;
|
|
1358
|
-
}
|
|
1359
|
-
}
|
|
1360
|
-
|
|
1361
|
-
/**
|
|
1362
|
-
* Main Convai client class for managing AI voice assistant connections
|
|
1363
|
-
* Provides complete interface for connecting to Convai's voice assistants,
|
|
1364
|
-
* managing real-time audio/video conversations, and handling messages.
|
|
1365
|
-
*
|
|
1366
|
-
* @example
|
|
1367
|
-
* ```typescript
|
|
1368
|
-
* import { ConvaiClient } from '@convai/web-sdk/core';
|
|
1369
|
-
*
|
|
1370
|
-
* // Recommended: Pass config to constructor
|
|
1371
|
-
* const client = new ConvaiClient({
|
|
1372
|
-
* apiKey: 'your-api-key',
|
|
1373
|
-
* characterId: 'your-character-id',
|
|
1374
|
-
* endUserId: 'user-uuid', // Optional: enables memory & analytics
|
|
1375
|
-
* enableVideo: true,
|
|
1376
|
-
* });
|
|
1377
|
-
*
|
|
1378
|
-
* // Listen for state changes
|
|
1379
|
-
* client.on('stateChange', (state) => {
|
|
1380
|
-
* console.log('State:', state);
|
|
1381
|
-
* });
|
|
1382
|
-
*
|
|
1383
|
-
* // Connect (uses stored config from constructor)
|
|
1384
|
-
* await client.connect();
|
|
1385
|
-
*
|
|
1386
|
-
* // Send a message
|
|
1387
|
-
* client.sendUserTextMessage('Hello!');
|
|
1388
|
-
*
|
|
1389
|
-
* // Advanced: Override config or connect without constructor config
|
|
1390
|
-
* await client.connect({ apiKey: 'different-key', characterId: 'different-id' });
|
|
1391
|
-
*
|
|
1392
|
-
* // Disconnect and reconnect
|
|
1393
|
-
* await client.disconnect();
|
|
1394
|
-
* await client.reconnect();
|
|
1395
|
-
* ```
|
|
1396
|
-
*/
|
|
1397
|
-
class ConvaiClient extends EventEmitter {
|
|
1398
|
-
constructor(config) {
|
|
1399
|
-
super();
|
|
1400
|
-
this._connectionType = null;
|
|
1401
|
-
this._apiKey = null;
|
|
1402
|
-
this._characterId = null;
|
|
1403
|
-
this._speakerId = null;
|
|
1404
|
-
this._characterSessionId = "-1";
|
|
1405
|
-
this._isBotReady = false;
|
|
1406
|
-
this._participantSid = "";
|
|
1407
|
-
this._storedConfig = null;
|
|
1408
|
-
// Store config if provided
|
|
1409
|
-
if (config) {
|
|
1410
|
-
this._storedConfig = config;
|
|
1411
|
-
}
|
|
1412
|
-
// Initialize room
|
|
1413
|
-
this._room = new livekitClient.Room();
|
|
1414
|
-
// Initialize state
|
|
1415
|
-
this._state = {
|
|
1416
|
-
isConnected: false,
|
|
1417
|
-
isConnecting: false,
|
|
1418
|
-
isListening: false,
|
|
1419
|
-
isThinking: false,
|
|
1420
|
-
isSpeaking: false,
|
|
1421
|
-
agentState: "disconnected",
|
|
1422
|
-
};
|
|
1423
|
-
// Default audio settings for optimal interruption handling
|
|
1424
|
-
this._audioSettings = {
|
|
1425
|
-
echoCancellation: true,
|
|
1426
|
-
noiseSuppression: true,
|
|
1427
|
-
autoGainControl: true,
|
|
1428
|
-
sampleRate: 48000,
|
|
1429
|
-
channelCount: 1,
|
|
1430
|
-
};
|
|
1431
|
-
// Initialize managers
|
|
1432
|
-
this._audioManager = new AudioManager(this._room);
|
|
1433
|
-
this._videoManager = new VideoManager(this._room);
|
|
1434
|
-
this._screenShareManager = new ScreenShareManager(this._room);
|
|
1435
|
-
this._latencyMonitor = new LatencyMonitor(false); // Disabled by default
|
|
1436
|
-
this._messageHandler = new MessageHandler(this._room, this._latencyMonitor);
|
|
1437
|
-
// Setup event listeners
|
|
1438
|
-
this.setupEventListeners();
|
|
1439
|
-
}
|
|
1440
|
-
// Getters
|
|
1441
|
-
get state() {
|
|
1442
|
-
return { ...this._state };
|
|
1443
|
-
}
|
|
1444
|
-
get connectionType() {
|
|
1445
|
-
return this._connectionType;
|
|
1446
|
-
}
|
|
1447
|
-
get apiKey() {
|
|
1448
|
-
return this._apiKey;
|
|
1449
|
-
}
|
|
1450
|
-
get characterId() {
|
|
1451
|
-
return this._characterId;
|
|
1452
|
-
}
|
|
1453
|
-
get speakerId() {
|
|
1454
|
-
return this._speakerId;
|
|
1455
|
-
}
|
|
1456
|
-
get room() {
|
|
1457
|
-
return this._room;
|
|
1458
|
-
}
|
|
1459
|
-
get chatMessages() {
|
|
1460
|
-
return this._messageHandler.getChatMessages();
|
|
1461
|
-
}
|
|
1462
|
-
get userTranscription() {
|
|
1463
|
-
return this._messageHandler.getUserTranscription();
|
|
1464
|
-
}
|
|
1465
|
-
get characterSessionId() {
|
|
1466
|
-
return this._characterSessionId;
|
|
1467
|
-
}
|
|
1468
|
-
get isBotReady() {
|
|
1469
|
-
return this._isBotReady;
|
|
1470
|
-
}
|
|
1471
|
-
get audioControls() {
|
|
1472
|
-
return this._audioManager;
|
|
1473
|
-
}
|
|
1474
|
-
get videoControls() {
|
|
1475
|
-
return this._videoManager;
|
|
1476
|
-
}
|
|
1477
|
-
get screenShareControls() {
|
|
1478
|
-
return this._screenShareManager;
|
|
1479
|
-
}
|
|
1480
|
-
get latencyMonitor() {
|
|
1481
|
-
return this._latencyMonitor;
|
|
1482
|
-
}
|
|
1483
|
-
// Convenience getters to match React hook's top-level reactive state
|
|
1484
|
-
// These provide direct access without going through the control managers
|
|
1485
|
-
/**
|
|
1486
|
-
* Whether the user's microphone is currently muted
|
|
1487
|
-
* Convenience getter - equivalent to audioControls.isAudioMuted
|
|
1488
|
-
*/
|
|
1489
|
-
get isAudioMuted() {
|
|
1490
|
-
return this._audioManager.isAudioMuted;
|
|
1491
|
-
}
|
|
1492
|
-
/**
|
|
1493
|
-
* Whether the user's video is currently enabled
|
|
1494
|
-
* Convenience getter - equivalent to videoControls.isVideoEnabled
|
|
1495
|
-
*/
|
|
1496
|
-
get isVideoEnabled() {
|
|
1497
|
-
return this._videoManager.isVideoEnabled;
|
|
1498
|
-
}
|
|
1499
|
-
/**
|
|
1500
|
-
* Whether screen sharing is currently active
|
|
1501
|
-
* Convenience getter - equivalent to screenShareControls.isScreenShareActive
|
|
1502
|
-
*/
|
|
1503
|
-
get isScreenShareActive() {
|
|
1504
|
-
return this._screenShareManager.isScreenShareActive;
|
|
1505
|
-
}
|
|
1506
|
-
/**
|
|
1507
|
-
* Setup event listeners for room and managers
|
|
1508
|
-
*/
|
|
1509
|
-
setupEventListeners() {
|
|
1510
|
-
// Room event listeners
|
|
1511
|
-
this._room.on(livekitClient.RoomEvent.Disconnected, this.handleDisconnected.bind(this));
|
|
1512
|
-
this._room.on(livekitClient.RoomEvent.ConnectionStateChanged, this.handleConnectionStateChanged.bind(this));
|
|
1513
|
-
// Message handler events
|
|
1514
|
-
this._messageHandler.on("botReady", () => {
|
|
1515
|
-
this._isBotReady = true;
|
|
1516
|
-
this.updateState({ isConnected: true });
|
|
1517
|
-
this.emit("botReady");
|
|
1518
|
-
});
|
|
1519
|
-
this._messageHandler.on("messagesChange", (messages) => {
|
|
1520
|
-
this.emit("message", messages[messages.length - 1]);
|
|
1521
|
-
this.emit("messagesChange", messages);
|
|
1522
|
-
});
|
|
1523
|
-
this._messageHandler.on("userTranscriptionChange", (transcription) => {
|
|
1524
|
-
this.emit("userTranscriptionChange", transcription);
|
|
1525
|
-
});
|
|
1526
|
-
this._messageHandler.on("speakingChange", (isSpeaking) => {
|
|
1527
|
-
this.updateState({ isSpeaking });
|
|
1528
|
-
this.emit("speakingChange", isSpeaking);
|
|
1529
|
-
// End latency measurement when bot starts speaking
|
|
1530
|
-
if (isSpeaking) {
|
|
1531
|
-
this._latencyMonitor.endMeasurement();
|
|
1532
|
-
}
|
|
1533
|
-
});
|
|
1534
|
-
// Forward blendshapes events
|
|
1535
|
-
this._messageHandler.on("blendshapes", (data) => {
|
|
1536
|
-
this.emit("blendshapes", data);
|
|
1537
|
-
});
|
|
1538
|
-
this._messageHandler.on("botRespondingChange", (isResponding) => {
|
|
1539
|
-
this.updateState({ isThinking: isResponding });
|
|
1540
|
-
});
|
|
1541
|
-
// Forward latency monitor events
|
|
1542
|
-
this._latencyMonitor.on("measurement", (measurement) => {
|
|
1543
|
-
this.emit("latencyMeasurement", measurement);
|
|
1544
|
-
});
|
|
1545
|
-
}
|
|
1546
|
-
/**
|
|
1547
|
-
* Handle room disconnection
|
|
1548
|
-
*/
|
|
1549
|
-
handleDisconnected() {
|
|
1550
|
-
this.updateState({
|
|
1551
|
-
isConnected: false,
|
|
1552
|
-
isConnecting: false,
|
|
1553
|
-
isSpeaking: false,
|
|
1554
|
-
isThinking: false,
|
|
1555
|
-
isListening: false,
|
|
1556
|
-
});
|
|
1557
|
-
this._isBotReady = false;
|
|
1558
|
-
this._messageHandler.reset();
|
|
1559
|
-
this.emit("disconnect");
|
|
1560
|
-
}
|
|
1561
|
-
/**
|
|
1562
|
-
* Handle connection state changes
|
|
1563
|
-
*/
|
|
1564
|
-
handleConnectionStateChanged(state) {
|
|
1565
|
-
if (state === "disconnected") {
|
|
1566
|
-
this.updateState({
|
|
1567
|
-
isConnected: false,
|
|
1568
|
-
isConnecting: false,
|
|
1569
|
-
});
|
|
1570
|
-
this._isBotReady = false;
|
|
1571
|
-
this._messageHandler.reset();
|
|
1572
|
-
}
|
|
1573
|
-
else if (state === "connected") {
|
|
1574
|
-
this.updateState({
|
|
1575
|
-
isConnected: true,
|
|
1576
|
-
isConnecting: false,
|
|
1577
|
-
});
|
|
1578
|
-
this._isBotReady = false; // Wait for bot-ready message
|
|
1579
|
-
}
|
|
1580
|
-
else if (state === "connecting") {
|
|
1581
|
-
this.updateState({
|
|
1582
|
-
isConnecting: true,
|
|
1583
|
-
isConnected: false,
|
|
1584
|
-
});
|
|
1585
|
-
this._isBotReady = false;
|
|
1586
|
-
}
|
|
1587
|
-
}
|
|
1588
|
-
/**
|
|
1589
|
-
* Update state and emit changes
|
|
1590
|
-
*/
|
|
1591
|
-
updateState(updates) {
|
|
1592
|
-
const oldState = { ...this._state };
|
|
1593
|
-
this._state = { ...this._state, ...updates };
|
|
1594
|
-
// Update agentState based on individual flags
|
|
1595
|
-
if (!this._state.isConnected) {
|
|
1596
|
-
this._state.agentState = "disconnected";
|
|
1597
|
-
}
|
|
1598
|
-
else if (this._state.isSpeaking) {
|
|
1599
|
-
this._state.agentState = "speaking";
|
|
1600
|
-
}
|
|
1601
|
-
else if (this._state.isThinking) {
|
|
1602
|
-
this._state.agentState = "thinking";
|
|
1603
|
-
}
|
|
1604
|
-
else if (this._state.isListening) {
|
|
1605
|
-
this._state.agentState = "listening";
|
|
1606
|
-
}
|
|
1607
|
-
else {
|
|
1608
|
-
this._state.agentState = "connected";
|
|
1609
|
-
}
|
|
1610
|
-
// Emit state change if changed
|
|
1611
|
-
if (JSON.stringify(oldState) !== JSON.stringify(this._state)) {
|
|
1612
|
-
this.emit("stateChange", this._state);
|
|
1613
|
-
}
|
|
1614
|
-
}
|
|
1615
|
-
/**
|
|
1616
|
-
* Connect to a Convai character
|
|
1617
|
-
*/
|
|
1618
|
-
async connect(config) {
|
|
1619
|
-
// Use provided config or stored config
|
|
1620
|
-
const finalConfig = config || this._storedConfig;
|
|
1621
|
-
if (!finalConfig) {
|
|
1622
|
-
throw new Error("No configuration provided. Pass config to connect() or store it in the client");
|
|
1623
|
-
}
|
|
1624
|
-
// Store config for reconnection
|
|
1625
|
-
this._storedConfig = finalConfig;
|
|
1626
|
-
// Add default URL if not provided
|
|
1627
|
-
const configWithDefaults = {
|
|
1628
|
-
// url: "https://realtime-api.convai.com",
|
|
1629
|
-
url: "https://realtime-api-stg.convai.com",
|
|
1630
|
-
...finalConfig,
|
|
1631
|
-
};
|
|
1632
|
-
if (!configWithDefaults.apiKey || !configWithDefaults.characterId) {
|
|
1633
|
-
throw new Error("apiKey and characterId are required");
|
|
1634
|
-
}
|
|
1635
|
-
this.updateState({ isConnecting: true });
|
|
1636
|
-
try {
|
|
1637
|
-
// Store connection config
|
|
1638
|
-
this._apiKey = configWithDefaults.apiKey;
|
|
1639
|
-
this._characterId = configWithDefaults.characterId;
|
|
1640
|
-
// Resolve speaker ID based on endUserId
|
|
1641
|
-
if (configWithDefaults.endUserId) {
|
|
1642
|
-
logger.info(`Resolving speaker ID for endUserId: ${configWithDefaults.endUserId}`);
|
|
1643
|
-
}
|
|
1644
|
-
else {
|
|
1645
|
-
logger.info("Connecting in anonymous mode (no endUserId provided)");
|
|
1646
|
-
}
|
|
1647
|
-
const resolvedSpeakerId = await resolveSpeakerId(configWithDefaults.endUserId, configWithDefaults.apiKey);
|
|
1648
|
-
this._speakerId = resolvedSpeakerId;
|
|
1649
|
-
if (resolvedSpeakerId) {
|
|
1650
|
-
logger.info(`Speaker ID resolved: ${resolvedSpeakerId.substring(0, 8)}...`);
|
|
1651
|
-
}
|
|
1652
|
-
else {
|
|
1653
|
-
logger.info("No speaker ID - connecting without persistent memory");
|
|
1654
|
-
}
|
|
1655
|
-
// Determine connection type based on enableVideo
|
|
1656
|
-
const connType = configWithDefaults.enableVideo ? "video" : "audio";
|
|
1657
|
-
this._connectionType = connType;
|
|
1658
|
-
// Prepare request body with required parameters
|
|
1659
|
-
const requestBody = {
|
|
1660
|
-
character_id: configWithDefaults.characterId,
|
|
1661
|
-
...(resolvedSpeakerId && { speaker_id: resolvedSpeakerId }),
|
|
1662
|
-
transport: "livekit",
|
|
1663
|
-
connection_type: connType,
|
|
1664
|
-
use_dynamic_pipeline: false,
|
|
1665
|
-
blendshape_provider: configWithDefaults.blendshapeProvider || "neurosync",
|
|
1666
|
-
blendshape_config: {
|
|
1667
|
-
enable_chunking: true,
|
|
1668
|
-
format: configWithDefaults.blendshapeConfig?.format || "mha",
|
|
1669
|
-
output_fps: 60,
|
|
1670
|
-
},
|
|
1671
|
-
llm_provider: "dynamic",
|
|
1672
|
-
default_tts_enabled: configWithDefaults.ttsEnabled !== undefined
|
|
1673
|
-
? configWithDefaults.ttsEnabled
|
|
1674
|
-
: true,
|
|
1675
|
-
...(this._characterSessionId &&
|
|
1676
|
-
this._characterSessionId !== "-1" && {
|
|
1677
|
-
character_session_id: this._characterSessionId,
|
|
1678
|
-
}),
|
|
1679
|
-
...(configWithDefaults.actionConfig && {
|
|
1680
|
-
action_config: configWithDefaults.actionConfig,
|
|
1681
|
-
}),
|
|
1682
|
-
};
|
|
1683
|
-
// Call Core Service API
|
|
1684
|
-
const response = await fetch(`${configWithDefaults.url}/connect`, {
|
|
1685
|
-
method: "POST",
|
|
1686
|
-
headers: {
|
|
1687
|
-
"x-api-key": configWithDefaults.apiKey,
|
|
1688
|
-
"Content-Type": "application/json",
|
|
1689
|
-
},
|
|
1690
|
-
body: JSON.stringify(requestBody),
|
|
1691
|
-
});
|
|
1692
|
-
if (!response.ok) {
|
|
1693
|
-
const errorText = await response.text();
|
|
1694
|
-
logger.error("API Error Response:", {
|
|
1695
|
-
status: response.status,
|
|
1696
|
-
statusText: response.statusText,
|
|
1697
|
-
body: errorText,
|
|
1698
|
-
});
|
|
1699
|
-
let errorMessage = `HTTP error! status: ${response.status}`;
|
|
1700
|
-
try {
|
|
1701
|
-
const errorData = JSON.parse(errorText);
|
|
1702
|
-
if (errorData.message) {
|
|
1703
|
-
errorMessage = errorData.message;
|
|
1704
|
-
}
|
|
1705
|
-
else if (errorData.error) {
|
|
1706
|
-
errorMessage = errorData.error;
|
|
1707
|
-
}
|
|
1708
|
-
else if (errorData.detail) {
|
|
1709
|
-
errorMessage = errorData.detail;
|
|
1710
|
-
}
|
|
1711
|
-
}
|
|
1712
|
-
catch (e) {
|
|
1713
|
-
if (errorText) {
|
|
1714
|
-
errorMessage = `${errorMessage}: ${errorText}`;
|
|
1715
|
-
}
|
|
1716
|
-
}
|
|
1717
|
-
throw new Error(errorMessage);
|
|
1718
|
-
}
|
|
1719
|
-
const connectionData = await response.json();
|
|
1720
|
-
// Capture character_session_id from connection response
|
|
1721
|
-
if (connectionData.character_session_id) {
|
|
1722
|
-
this._characterSessionId = connectionData.character_session_id;
|
|
1723
|
-
}
|
|
1724
|
-
// Connect to LiveKit room
|
|
1725
|
-
await this._room.connect(connectionData.room_url, connectionData.token, {
|
|
1726
|
-
rtcConfig: {
|
|
1727
|
-
iceTransportPolicy: "relay",
|
|
1728
|
-
},
|
|
1729
|
-
});
|
|
1730
|
-
// Enable microphone only if startWithAudioOn is true (default: false)
|
|
1731
|
-
// If false, microphone stays off until user enables it via audioControls
|
|
1732
|
-
if (configWithDefaults.startWithAudioOn) {
|
|
1733
|
-
await this._room.localParticipant.setMicrophoneEnabled(true, {
|
|
1734
|
-
echoCancellation: this._audioSettings.echoCancellation,
|
|
1735
|
-
noiseSuppression: this._audioSettings.noiseSuppression,
|
|
1736
|
-
autoGainControl: this._audioSettings.autoGainControl,
|
|
1737
|
-
sampleRate: this._audioSettings.sampleRate,
|
|
1738
|
-
channelCount: this._audioSettings.channelCount,
|
|
1739
|
-
});
|
|
1740
|
-
}
|
|
1741
|
-
// Enable camera only if enableVideo is true AND startWithVideoOn is true
|
|
1742
|
-
if (configWithDefaults.enableVideo &&
|
|
1743
|
-
configWithDefaults.startWithVideoOn) {
|
|
1744
|
-
await this._room.localParticipant.setCameraEnabled(true);
|
|
1745
|
-
}
|
|
1746
|
-
// Ensure audio manager mirrors the actual microphone permission state
|
|
1747
|
-
this._audioManager.syncStateFromRoom({ emit: true });
|
|
1748
|
-
// Capture participant SID
|
|
1749
|
-
this._participantSid = this._room.localParticipant.sid;
|
|
1750
|
-
this.updateState({
|
|
1751
|
-
isConnected: true,
|
|
1752
|
-
isConnecting: false,
|
|
1753
|
-
});
|
|
1754
|
-
this.emit("connect");
|
|
1755
|
-
}
|
|
1756
|
-
catch (error) {
|
|
1757
|
-
logger.error("Connection failed:", error);
|
|
1758
|
-
this.updateState({
|
|
1759
|
-
isConnected: false,
|
|
1760
|
-
isConnecting: false,
|
|
1761
|
-
});
|
|
1762
|
-
this.emit("error", error);
|
|
1763
|
-
throw error;
|
|
1764
|
-
}
|
|
1765
|
-
}
|
|
1766
|
-
/**
|
|
1767
|
-
* Disconnect from the current character session
|
|
1768
|
-
*/
|
|
1769
|
-
async disconnect() {
|
|
1770
|
-
if (this._room && this._room.state !== "disconnected") {
|
|
1771
|
-
try {
|
|
1772
|
-
await this._room.disconnect();
|
|
1773
|
-
this.resetConnectionState();
|
|
1774
|
-
}
|
|
1775
|
-
catch (error) {
|
|
1776
|
-
// Handle disconnect errors gracefully
|
|
1777
|
-
if (!(error instanceof Error && error.message.includes("disconnect"))) {
|
|
1778
|
-
logger.error("Disconnect error:", error);
|
|
1779
|
-
}
|
|
1780
|
-
this.resetConnectionState();
|
|
1781
|
-
}
|
|
1782
|
-
}
|
|
1783
|
-
}
|
|
1784
|
-
/**
|
|
1785
|
-
* Reset connection state
|
|
1786
|
-
*/
|
|
1787
|
-
resetConnectionState() {
|
|
1788
|
-
this.updateState({
|
|
1789
|
-
isConnected: false,
|
|
1790
|
-
isConnecting: false,
|
|
1791
|
-
isSpeaking: false,
|
|
1792
|
-
isThinking: false,
|
|
1793
|
-
isListening: false,
|
|
1794
|
-
});
|
|
1795
|
-
this._isBotReady = false;
|
|
1796
|
-
this._connectionType = null;
|
|
1797
|
-
this._apiKey = null;
|
|
1798
|
-
this._characterId = null;
|
|
1799
|
-
this._speakerId = null;
|
|
1800
|
-
this._messageHandler.reset();
|
|
1801
|
-
this._audioManager.reset();
|
|
1802
|
-
this._videoManager.reset();
|
|
1803
|
-
this._screenShareManager.reset();
|
|
1804
|
-
}
|
|
1805
|
-
/**
|
|
1806
|
-
* Reconnect - disconnect and connect again using stored config
|
|
1807
|
-
*/
|
|
1808
|
-
async reconnect() {
|
|
1809
|
-
if (!this._storedConfig) {
|
|
1810
|
-
throw new Error("No stored config available for reconnection");
|
|
1811
|
-
}
|
|
1812
|
-
await this.disconnect();
|
|
1813
|
-
await this.connect(this._storedConfig);
|
|
1814
|
-
}
|
|
1815
|
-
/**
|
|
1816
|
-
* Reset the session ID to start a new conversation
|
|
1817
|
-
*/
|
|
1818
|
-
resetSession() {
|
|
1819
|
-
this._characterSessionId = "-1";
|
|
1820
|
-
this._messageHandler.reset();
|
|
1821
|
-
}
|
|
1822
|
-
/**
|
|
1823
|
-
* Send a text message to the character
|
|
1824
|
-
*/
|
|
1825
|
-
sendUserTextMessage(text) {
|
|
1826
|
-
if (!this._room ||
|
|
1827
|
-
this._room.state === "disconnected" ||
|
|
1828
|
-
!this._room.localParticipant) {
|
|
1829
|
-
logger.warn("Cannot send message: not connected");
|
|
1830
|
-
return;
|
|
1831
|
-
}
|
|
1832
|
-
if (!text || !text.trim()) {
|
|
1833
|
-
return;
|
|
1834
|
-
}
|
|
1835
|
-
try {
|
|
1836
|
-
// Start latency measurement for text message
|
|
1837
|
-
this._latencyMonitor.startMeasurement("text", text.trim());
|
|
1838
|
-
const message = {
|
|
1839
|
-
type: "user_text_message",
|
|
1840
|
-
data: {
|
|
1841
|
-
text: text.trim(),
|
|
1842
|
-
participant_sid: this._participantSid || this._room.localParticipant.sid,
|
|
1843
|
-
},
|
|
1844
|
-
};
|
|
1845
|
-
const encodedData = new TextEncoder().encode(JSON.stringify(message));
|
|
1846
|
-
this._room.localParticipant.publishData(encodedData, {
|
|
1847
|
-
reliable: true,
|
|
1848
|
-
});
|
|
1849
|
-
}
|
|
1850
|
-
catch (error) {
|
|
1851
|
-
logger.error("Failed to send user text message:", error);
|
|
1852
|
-
// Cancel latency measurement on error
|
|
1853
|
-
this._latencyMonitor.cancelMeasurement();
|
|
1854
|
-
throw error;
|
|
1855
|
-
}
|
|
1856
|
-
}
|
|
1857
|
-
/**
|
|
1858
|
-
* Send a trigger message to invoke specific character actions
|
|
1859
|
-
*/
|
|
1860
|
-
sendTriggerMessage(triggerName, triggerMessage) {
|
|
1861
|
-
if (this._room && this._room.localParticipant) {
|
|
1862
|
-
const message = {
|
|
1863
|
-
type: "trigger-message",
|
|
1864
|
-
data: {
|
|
1865
|
-
...(triggerName && { trigger_name: triggerName }),
|
|
1866
|
-
...(triggerMessage && { trigger_message: triggerMessage }),
|
|
1867
|
-
participant_sid: this._participantSid || this._room.localParticipant.sid,
|
|
1868
|
-
},
|
|
1869
|
-
};
|
|
1870
|
-
const encodedData = new TextEncoder().encode(JSON.stringify(message));
|
|
1871
|
-
this._room.localParticipant.publishData(encodedData, {
|
|
1872
|
-
reliable: true,
|
|
1873
|
-
});
|
|
1874
|
-
}
|
|
1875
|
-
}
|
|
1876
|
-
/**
|
|
1877
|
-
* Send an interrupt message to stop the bot's current response
|
|
1878
|
-
*/
|
|
1879
|
-
sendInterruptMessage() {
|
|
1880
|
-
if (!this._room ||
|
|
1881
|
-
this._room.state === "disconnected" ||
|
|
1882
|
-
!this._room.localParticipant) {
|
|
1883
|
-
logger.warn("Cannot send interrupt message: not connected");
|
|
1884
|
-
return;
|
|
1885
|
-
}
|
|
1886
|
-
try {
|
|
1887
|
-
const message = {
|
|
1888
|
-
type: "interrupt-bot",
|
|
1889
|
-
data: {
|
|
1890
|
-
participant_sid: this._participantSid || this._room.localParticipant.sid,
|
|
1891
|
-
},
|
|
1892
|
-
};
|
|
1893
|
-
const encodedData = new TextEncoder().encode(JSON.stringify(message));
|
|
1894
|
-
this._room.localParticipant.publishData(encodedData, {
|
|
1895
|
-
reliable: true,
|
|
1896
|
-
});
|
|
1897
|
-
logger.info("Interrupt message sent");
|
|
1898
|
-
}
|
|
1899
|
-
catch (error) {
|
|
1900
|
-
logger.error("Failed to send interrupt message:", error);
|
|
1901
|
-
throw error;
|
|
1902
|
-
}
|
|
1903
|
-
}
|
|
1904
|
-
/**
|
|
1905
|
-
* Update template keys in the character's context
|
|
1906
|
-
*/
|
|
1907
|
-
updateTemplateKeys(templateKeys) {
|
|
1908
|
-
if (this._room &&
|
|
1909
|
-
this._room.localParticipant &&
|
|
1910
|
-
Object.keys(templateKeys).length > 0) {
|
|
1911
|
-
const message = {
|
|
1912
|
-
type: "update-template-keys",
|
|
1913
|
-
data: {
|
|
1914
|
-
template_keys: templateKeys,
|
|
1915
|
-
participant_sid: this._participantSid || this._room.localParticipant.sid,
|
|
1916
|
-
},
|
|
1917
|
-
};
|
|
1918
|
-
const encodedData = new TextEncoder().encode(JSON.stringify(message));
|
|
1919
|
-
this._room.localParticipant.publishData(encodedData, {
|
|
1920
|
-
reliable: true,
|
|
1921
|
-
});
|
|
1922
|
-
}
|
|
1923
|
-
}
|
|
1924
|
-
/**
|
|
1925
|
-
* Update dynamic information about the current context
|
|
1926
|
-
*/
|
|
1927
|
-
updateDynamicInfo(dynamicInfo) {
|
|
1928
|
-
if (this._room && this._room.localParticipant && dynamicInfo.text?.trim()) {
|
|
1929
|
-
const message = {
|
|
1930
|
-
type: "update-dynamic-info",
|
|
1931
|
-
data: {
|
|
1932
|
-
dynamic_info: {
|
|
1933
|
-
text: dynamicInfo.text.trim(),
|
|
1934
|
-
},
|
|
1935
|
-
participant_sid: this._participantSid || this._room.localParticipant.sid,
|
|
1936
|
-
},
|
|
1937
|
-
};
|
|
1938
|
-
const encodedData = new TextEncoder().encode(JSON.stringify(message));
|
|
1939
|
-
this._room.localParticipant.publishData(encodedData, {
|
|
1940
|
-
reliable: true,
|
|
1941
|
-
});
|
|
1942
|
-
}
|
|
1943
|
-
}
|
|
1944
|
-
/**
|
|
1945
|
-
* Toggle text-to-speech on or off
|
|
1946
|
-
*/
|
|
1947
|
-
toggleTts(enabled) {
|
|
1948
|
-
if (!this._room ||
|
|
1949
|
-
this._room.state === "disconnected" ||
|
|
1950
|
-
!this._room.localParticipant) {
|
|
1951
|
-
return;
|
|
1952
|
-
}
|
|
1953
|
-
try {
|
|
1954
|
-
const message = {
|
|
1955
|
-
type: "tts-toggle",
|
|
1956
|
-
data: {
|
|
1957
|
-
enabled: enabled,
|
|
1958
|
-
},
|
|
1959
|
-
};
|
|
1960
|
-
const encodedData = new TextEncoder().encode(JSON.stringify(message));
|
|
1961
|
-
this._room.localParticipant.publishData(encodedData, {
|
|
1962
|
-
reliable: true,
|
|
1963
|
-
});
|
|
1964
|
-
}
|
|
1965
|
-
catch (error) {
|
|
1966
|
-
logger.error("Failed to toggle TTS:", error);
|
|
1967
|
-
throw error;
|
|
1968
|
-
}
|
|
1969
|
-
}
|
|
1970
|
-
}
|
|
1971
|
-
|
|
1972
|
-
exports.AudioManager = AudioManager;
|
|
1973
|
-
exports.ConvaiClient = ConvaiClient;
|
|
1974
|
-
exports.EventEmitter = EventEmitter;
|
|
1975
|
-
exports.MessageHandler = MessageHandler;
|
|
1976
|
-
exports.ScreenShareManager = ScreenShareManager;
|
|
1977
|
-
exports.VideoManager = VideoManager;
|