@mobileai/react-native 0.9.17 → 0.9.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +28 -20
- package/MobileAIFloatingOverlay.podspec +25 -0
- package/android/build.gradle +61 -0
- package/android/src/main/AndroidManifest.xml +3 -0
- package/android/src/main/java/com/mobileai/overlay/FloatingOverlayView.kt +151 -0
- package/android/src/main/java/com/mobileai/overlay/MobileAIOverlayPackage.kt +23 -0
- package/android/src/newarch/com/mobileai/overlay/FloatingOverlayViewManager.kt +45 -0
- package/android/src/oldarch/com/mobileai/overlay/FloatingOverlayViewManager.kt +29 -0
- package/ios/MobileAIFloatingOverlayComponentView.mm +73 -0
- package/lib/module/components/AIAgent.js +902 -136
- package/lib/module/components/AIConsentDialog.js +439 -0
- package/lib/module/components/AgentChatBar.js +828 -134
- package/lib/module/components/AgentOverlay.js +2 -1
- package/lib/module/components/DiscoveryTooltip.js +21 -9
- package/lib/module/components/FloatingOverlayWrapper.js +108 -0
- package/lib/module/components/Icons.js +123 -0
- package/lib/module/config/endpoints.js +12 -2
- package/lib/module/core/AgentRuntime.js +373 -27
- package/lib/module/core/FiberAdapter.js +56 -0
- package/lib/module/core/FiberTreeWalker.js +186 -80
- package/lib/module/core/IdleDetector.js +19 -0
- package/lib/module/core/NativeAlertInterceptor.js +191 -0
- package/lib/module/core/systemPrompt.js +203 -45
- package/lib/module/index.js +3 -0
- package/lib/module/providers/GeminiProvider.js +72 -56
- package/lib/module/providers/ProviderFactory.js +6 -2
- package/lib/module/services/AudioInputService.js +3 -12
- package/lib/module/services/AudioOutputService.js +1 -13
- package/lib/module/services/ConversationService.js +166 -0
- package/lib/module/services/MobileAIKnowledgeRetriever.js +41 -0
- package/lib/module/services/VoiceService.js +29 -8
- package/lib/module/services/telemetry/MobileAI.js +44 -0
- package/lib/module/services/telemetry/TelemetryService.js +13 -1
- package/lib/module/services/telemetry/TouchAutoCapture.js +44 -18
- package/lib/module/specs/FloatingOverlayNativeComponent.ts +19 -0
- package/lib/module/support/CSATSurvey.js +95 -12
- package/lib/module/support/EscalationSocket.js +70 -1
- package/lib/module/support/ReportedIssueEventSource.js +148 -0
- package/lib/module/support/escalateTool.js +4 -2
- package/lib/module/support/index.js +1 -0
- package/lib/module/support/reportIssueTool.js +127 -0
- package/lib/module/support/supportPrompt.js +77 -9
- package/lib/module/tools/guideTool.js +2 -1
- package/lib/module/tools/longPressTool.js +4 -3
- package/lib/module/tools/pickerTool.js +6 -4
- package/lib/module/tools/tapTool.js +12 -3
- package/lib/module/tools/typeTool.js +19 -10
- package/lib/module/utils/logger.js +175 -6
- package/lib/typescript/react-native.config.d.ts +11 -0
- package/lib/typescript/src/components/AIAgent.d.ts +28 -2
- package/lib/typescript/src/components/AIConsentDialog.d.ts +153 -0
- package/lib/typescript/src/components/AgentChatBar.d.ts +15 -2
- package/lib/typescript/src/components/DiscoveryTooltip.d.ts +3 -1
- package/lib/typescript/src/components/FloatingOverlayWrapper.d.ts +51 -0
- package/lib/typescript/src/components/Icons.d.ts +8 -0
- package/lib/typescript/src/config/endpoints.d.ts +5 -3
- package/lib/typescript/src/core/AgentRuntime.d.ts +4 -0
- package/lib/typescript/src/core/FiberAdapter.d.ts +25 -0
- package/lib/typescript/src/core/FiberTreeWalker.d.ts +2 -0
- package/lib/typescript/src/core/IdleDetector.d.ts +11 -0
- package/lib/typescript/src/core/NativeAlertInterceptor.d.ts +55 -0
- package/lib/typescript/src/core/types.d.ts +106 -1
- package/lib/typescript/src/index.d.ts +9 -4
- package/lib/typescript/src/providers/GeminiProvider.d.ts +6 -5
- package/lib/typescript/src/services/ConversationService.d.ts +55 -0
- package/lib/typescript/src/services/MobileAIKnowledgeRetriever.d.ts +9 -0
- package/lib/typescript/src/services/telemetry/MobileAI.d.ts +7 -0
- package/lib/typescript/src/services/telemetry/TelemetryService.d.ts +1 -1
- package/lib/typescript/src/services/telemetry/TouchAutoCapture.d.ts +9 -6
- package/lib/typescript/src/services/telemetry/types.d.ts +3 -1
- package/lib/typescript/src/specs/FloatingOverlayNativeComponent.d.ts +17 -0
- package/lib/typescript/src/support/EscalationSocket.d.ts +17 -0
- package/lib/typescript/src/support/ReportedIssueEventSource.d.ts +24 -0
- package/lib/typescript/src/support/escalateTool.d.ts +5 -0
- package/lib/typescript/src/support/index.d.ts +2 -1
- package/lib/typescript/src/support/reportIssueTool.d.ts +20 -0
- package/lib/typescript/src/support/types.d.ts +56 -1
- package/lib/typescript/src/utils/logger.d.ts +15 -0
- package/package.json +20 -9
- package/react-native.config.js +12 -0
- package/lib/module/__cli_tmp__.js.map +0 -1
- package/lib/module/components/AIAgent.js.map +0 -1
- package/lib/module/components/AIZone.js.map +0 -1
- package/lib/module/components/AgentChatBar.js.map +0 -1
- package/lib/module/components/AgentErrorBoundary.js.map +0 -1
- package/lib/module/components/AgentOverlay.js.map +0 -1
- package/lib/module/components/DiscoveryTooltip.js.map +0 -1
- package/lib/module/components/HighlightOverlay.js.map +0 -1
- package/lib/module/components/Icons.js.map +0 -1
- package/lib/module/components/ProactiveHint.js.map +0 -1
- package/lib/module/components/cards/InfoCard.js.map +0 -1
- package/lib/module/components/cards/ReviewSummary.js.map +0 -1
- package/lib/module/config/endpoints.js.map +0 -1
- package/lib/module/core/ActionRegistry.js.map +0 -1
- package/lib/module/core/AgentRuntime.js.map +0 -1
- package/lib/module/core/FiberTreeWalker.js.map +0 -1
- package/lib/module/core/IdleDetector.js.map +0 -1
- package/lib/module/core/MCPBridge.js.map +0 -1
- package/lib/module/core/ScreenDehydrator.js.map +0 -1
- package/lib/module/core/ZoneRegistry.js.map +0 -1
- package/lib/module/core/systemPrompt.js.map +0 -1
- package/lib/module/core/types.js.map +0 -1
- package/lib/module/hooks/useAction.js.map +0 -1
- package/lib/module/index.js.map +0 -1
- package/lib/module/plugin/withAppIntents.js.map +0 -1
- package/lib/module/providers/GeminiProvider.js.map +0 -1
- package/lib/module/providers/OpenAIProvider.js.map +0 -1
- package/lib/module/providers/ProviderFactory.js.map +0 -1
- package/lib/module/services/AudioInputService.js.map +0 -1
- package/lib/module/services/AudioOutputService.js.map +0 -1
- package/lib/module/services/KnowledgeBaseService.js.map +0 -1
- package/lib/module/services/VoiceService.js.map +0 -1
- package/lib/module/services/flags/FlagService.js.map +0 -1
- package/lib/module/services/telemetry/MobileAI.js.map +0 -1
- package/lib/module/services/telemetry/PiiScrubber.js.map +0 -1
- package/lib/module/services/telemetry/TelemetryService.js.map +0 -1
- package/lib/module/services/telemetry/TouchAutoCapture.js.map +0 -1
- package/lib/module/services/telemetry/device.js.map +0 -1
- package/lib/module/services/telemetry/deviceMetadata.js.map +0 -1
- package/lib/module/services/telemetry/index.js.map +0 -1
- package/lib/module/services/telemetry/types.js.map +0 -1
- package/lib/module/support/CSATSurvey.js.map +0 -1
- package/lib/module/support/EscalationEventSource.js.map +0 -1
- package/lib/module/support/EscalationSocket.js.map +0 -1
- package/lib/module/support/SupportChatModal.js.map +0 -1
- package/lib/module/support/SupportGreeting.js.map +0 -1
- package/lib/module/support/TicketStore.js.map +0 -1
- package/lib/module/support/escalateTool.js.map +0 -1
- package/lib/module/support/index.js.map +0 -1
- package/lib/module/support/supportPrompt.js.map +0 -1
- package/lib/module/support/types.js.map +0 -1
- package/lib/module/tools/datePickerTool.js.map +0 -1
- package/lib/module/tools/guideTool.js.map +0 -1
- package/lib/module/tools/index.js.map +0 -1
- package/lib/module/tools/keyboardTool.js.map +0 -1
- package/lib/module/tools/longPressTool.js.map +0 -1
- package/lib/module/tools/pickerTool.js.map +0 -1
- package/lib/module/tools/restoreTool.js.map +0 -1
- package/lib/module/tools/scrollTool.js.map +0 -1
- package/lib/module/tools/simplifyTool.js.map +0 -1
- package/lib/module/tools/sliderTool.js.map +0 -1
- package/lib/module/tools/tapTool.js.map +0 -1
- package/lib/module/tools/typeTool.js.map +0 -1
- package/lib/module/tools/types.js.map +0 -1
- package/lib/module/types/jsx.d.js.map +0 -1
- package/lib/module/utils/audioUtils.js.map +0 -1
- package/lib/module/utils/logger.js.map +0 -1
- package/lib/typescript/babel.config.d.ts.map +0 -1
- package/lib/typescript/bin/generate-map.d.cts.map +0 -1
- package/lib/typescript/eslint.config.d.mts.map +0 -1
- package/lib/typescript/generate-map.d.ts.map +0 -1
- package/lib/typescript/src/__cli_tmp__.d.ts.map +0 -1
- package/lib/typescript/src/components/AIAgent.d.ts.map +0 -1
- package/lib/typescript/src/components/AIZone.d.ts.map +0 -1
- package/lib/typescript/src/components/AgentChatBar.d.ts.map +0 -1
- package/lib/typescript/src/components/AgentErrorBoundary.d.ts.map +0 -1
- package/lib/typescript/src/components/AgentOverlay.d.ts.map +0 -1
- package/lib/typescript/src/components/DiscoveryTooltip.d.ts.map +0 -1
- package/lib/typescript/src/components/HighlightOverlay.d.ts.map +0 -1
- package/lib/typescript/src/components/Icons.d.ts.map +0 -1
- package/lib/typescript/src/components/ProactiveHint.d.ts.map +0 -1
- package/lib/typescript/src/components/cards/InfoCard.d.ts.map +0 -1
- package/lib/typescript/src/components/cards/ReviewSummary.d.ts.map +0 -1
- package/lib/typescript/src/config/endpoints.d.ts.map +0 -1
- package/lib/typescript/src/core/ActionRegistry.d.ts.map +0 -1
- package/lib/typescript/src/core/AgentRuntime.d.ts.map +0 -1
- package/lib/typescript/src/core/FiberTreeWalker.d.ts.map +0 -1
- package/lib/typescript/src/core/IdleDetector.d.ts.map +0 -1
- package/lib/typescript/src/core/MCPBridge.d.ts.map +0 -1
- package/lib/typescript/src/core/ScreenDehydrator.d.ts.map +0 -1
- package/lib/typescript/src/core/ZoneRegistry.d.ts.map +0 -1
- package/lib/typescript/src/core/systemPrompt.d.ts.map +0 -1
- package/lib/typescript/src/core/types.d.ts.map +0 -1
- package/lib/typescript/src/hooks/useAction.d.ts.map +0 -1
- package/lib/typescript/src/index.d.ts.map +0 -1
- package/lib/typescript/src/plugin/withAppIntents.d.ts.map +0 -1
- package/lib/typescript/src/providers/GeminiProvider.d.ts.map +0 -1
- package/lib/typescript/src/providers/OpenAIProvider.d.ts.map +0 -1
- package/lib/typescript/src/providers/ProviderFactory.d.ts.map +0 -1
- package/lib/typescript/src/services/AudioInputService.d.ts.map +0 -1
- package/lib/typescript/src/services/AudioOutputService.d.ts.map +0 -1
- package/lib/typescript/src/services/KnowledgeBaseService.d.ts.map +0 -1
- package/lib/typescript/src/services/VoiceService.d.ts.map +0 -1
- package/lib/typescript/src/services/flags/FlagService.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/MobileAI.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/PiiScrubber.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/TelemetryService.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/TouchAutoCapture.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/device.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/deviceMetadata.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/index.d.ts.map +0 -1
- package/lib/typescript/src/services/telemetry/types.d.ts.map +0 -1
- package/lib/typescript/src/support/CSATSurvey.d.ts.map +0 -1
- package/lib/typescript/src/support/EscalationEventSource.d.ts.map +0 -1
- package/lib/typescript/src/support/EscalationSocket.d.ts.map +0 -1
- package/lib/typescript/src/support/SupportChatModal.d.ts.map +0 -1
- package/lib/typescript/src/support/SupportGreeting.d.ts.map +0 -1
- package/lib/typescript/src/support/TicketStore.d.ts.map +0 -1
- package/lib/typescript/src/support/escalateTool.d.ts.map +0 -1
- package/lib/typescript/src/support/index.d.ts.map +0 -1
- package/lib/typescript/src/support/supportPrompt.d.ts.map +0 -1
- package/lib/typescript/src/support/types.d.ts.map +0 -1
- package/lib/typescript/src/tools/datePickerTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/guideTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/index.d.ts.map +0 -1
- package/lib/typescript/src/tools/keyboardTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/longPressTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/pickerTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/restoreTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/scrollTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/simplifyTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/sliderTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/tapTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/typeTool.d.ts.map +0 -1
- package/lib/typescript/src/tools/types.d.ts.map +0 -1
- package/lib/typescript/src/utils/audioUtils.d.ts.map +0 -1
- package/lib/typescript/src/utils/logger.d.ts.map +0 -1
- package/src/__cli_tmp__.tsx +0 -9
- package/src/cli/analyzers/chain-analyzer.ts +0 -183
- package/src/cli/extractors/ai-extractor.ts +0 -6
- package/src/cli/extractors/ast-extractor.ts +0 -551
- package/src/cli/generate-intents.ts +0 -140
- package/src/cli/generate-map.ts +0 -121
- package/src/cli/generate-swift.ts +0 -116
- package/src/cli/scanners/expo-scanner.ts +0 -203
- package/src/cli/scanners/rn-scanner.ts +0 -445
- package/src/components/AIAgent.tsx +0 -1716
- package/src/components/AIZone.tsx +0 -147
- package/src/components/AgentChatBar.tsx +0 -1143
- package/src/components/AgentErrorBoundary.tsx +0 -78
- package/src/components/AgentOverlay.tsx +0 -73
- package/src/components/DiscoveryTooltip.tsx +0 -148
- package/src/components/HighlightOverlay.tsx +0 -136
- package/src/components/Icons.tsx +0 -253
- package/src/components/ProactiveHint.tsx +0 -145
- package/src/components/cards/InfoCard.tsx +0 -58
- package/src/components/cards/ReviewSummary.tsx +0 -76
- package/src/config/endpoints.ts +0 -22
- package/src/core/ActionRegistry.ts +0 -105
- package/src/core/AgentRuntime.ts +0 -1471
- package/src/core/FiberTreeWalker.ts +0 -930
- package/src/core/IdleDetector.ts +0 -72
- package/src/core/MCPBridge.ts +0 -163
- package/src/core/ScreenDehydrator.ts +0 -53
- package/src/core/ZoneRegistry.ts +0 -44
- package/src/core/systemPrompt.ts +0 -431
- package/src/core/types.ts +0 -521
- package/src/hooks/useAction.ts +0 -182
- package/src/index.ts +0 -83
- package/src/plugin/withAppIntents.ts +0 -98
- package/src/providers/GeminiProvider.ts +0 -357
- package/src/providers/OpenAIProvider.ts +0 -379
- package/src/providers/ProviderFactory.ts +0 -36
- package/src/services/AudioInputService.ts +0 -226
- package/src/services/AudioOutputService.ts +0 -236
- package/src/services/KnowledgeBaseService.ts +0 -156
- package/src/services/VoiceService.ts +0 -451
- package/src/services/flags/FlagService.ts +0 -137
- package/src/services/telemetry/MobileAI.ts +0 -66
- package/src/services/telemetry/PiiScrubber.ts +0 -17
- package/src/services/telemetry/TelemetryService.ts +0 -323
- package/src/services/telemetry/TouchAutoCapture.ts +0 -165
- package/src/services/telemetry/device.ts +0 -93
- package/src/services/telemetry/deviceMetadata.ts +0 -13
- package/src/services/telemetry/index.ts +0 -13
- package/src/services/telemetry/types.ts +0 -75
- package/src/support/CSATSurvey.tsx +0 -304
- package/src/support/EscalationEventSource.ts +0 -190
- package/src/support/EscalationSocket.ts +0 -152
- package/src/support/SupportChatModal.tsx +0 -563
- package/src/support/SupportGreeting.tsx +0 -161
- package/src/support/TicketStore.ts +0 -100
- package/src/support/escalateTool.ts +0 -174
- package/src/support/index.ts +0 -29
- package/src/support/supportPrompt.ts +0 -55
- package/src/support/types.ts +0 -155
- package/src/tools/datePickerTool.ts +0 -60
- package/src/tools/guideTool.ts +0 -76
- package/src/tools/index.ts +0 -20
- package/src/tools/keyboardTool.ts +0 -30
- package/src/tools/longPressTool.ts +0 -61
- package/src/tools/pickerTool.ts +0 -115
- package/src/tools/restoreTool.ts +0 -33
- package/src/tools/scrollTool.ts +0 -156
- package/src/tools/simplifyTool.ts +0 -33
- package/src/tools/sliderTool.ts +0 -65
- package/src/tools/tapTool.ts +0 -93
- package/src/tools/typeTool.ts +0 -113
- package/src/tools/types.ts +0 -58
- package/src/types/jsx.d.ts +0 -20
- package/src/utils/audioUtils.ts +0 -54
- package/src/utils/logger.ts +0 -38
|
@@ -1,1716 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* AIAgent — Root provider component for the AI agent.
|
|
3
|
-
*
|
|
4
|
-
* Wraps the app and provides:
|
|
5
|
-
* - Fiber tree root ref for element auto-detection
|
|
6
|
-
* - Navigation ref for auto-navigation
|
|
7
|
-
* - Floating chat bar for user input
|
|
8
|
-
* - Agent runtime context for useAction hooks
|
|
9
|
-
*/
|
|
10
|
-
|
|
11
|
-
import React, {
|
|
12
|
-
useCallback,
|
|
13
|
-
useEffect,
|
|
14
|
-
useMemo,
|
|
15
|
-
useRef,
|
|
16
|
-
useState,
|
|
17
|
-
} from 'react';
|
|
18
|
-
import { View, StyleSheet } from 'react-native';
|
|
19
|
-
import { AgentRuntime } from '../core/AgentRuntime';
|
|
20
|
-
import { createProvider } from '../providers/ProviderFactory';
|
|
21
|
-
import { AgentContext } from '../hooks/useAction';
|
|
22
|
-
import { AgentChatBar } from './AgentChatBar';
|
|
23
|
-
import { AgentOverlay } from './AgentOverlay';
|
|
24
|
-
import { logger } from '../utils/logger';
|
|
25
|
-
import { buildVoiceSystemPrompt } from '../core/systemPrompt';
|
|
26
|
-
import { MCPBridge } from '../core/MCPBridge';
|
|
27
|
-
import { VoiceService } from '../services/VoiceService';
|
|
28
|
-
import { AudioInputService } from '../services/AudioInputService';
|
|
29
|
-
import { AudioOutputService } from '../services/AudioOutputService';
|
|
30
|
-
import { TelemetryService, bindTelemetryService } from '../services/telemetry';
|
|
31
|
-
import { extractTouchLabel, checkRageClick } from '../services/telemetry/TouchAutoCapture';
|
|
32
|
-
import { initDeviceId, getDeviceId } from '../services/telemetry/device';
|
|
33
|
-
import type { AgentConfig, AgentMode, ExecutionResult, ToolDefinition, AgentStep, TokenUsage, KnowledgeBaseConfig, ChatBarTheme, AIMessage, AIProviderName, ScreenMap, ProactiveHelpConfig, InteractionMode } from '../core/types';
|
|
34
|
-
import { AgentErrorBoundary } from './AgentErrorBoundary';
|
|
35
|
-
import { HighlightOverlay } from './HighlightOverlay';
|
|
36
|
-
import { IdleDetector } from '../core/IdleDetector';
|
|
37
|
-
import { ProactiveHint } from './ProactiveHint';
|
|
38
|
-
import { createEscalateTool } from '../support/escalateTool';
|
|
39
|
-
import { EscalationSocket } from '../support/EscalationSocket';
|
|
40
|
-
import { EscalationEventSource } from '../support/EscalationEventSource';
|
|
41
|
-
import { SupportChatModal } from '../support/SupportChatModal';
|
|
42
|
-
import { ENDPOINTS } from '../config/endpoints';
|
|
43
|
-
|
|
44
|
-
// ─── Context ───────────────────────────────────────────────────
|
|
45
|
-
|
|
46
|
-
// ─── AsyncStorage Helper (same pattern as TicketStore) ─────────
|
|
47
|
-
|
|
48
|
-
/** Try to load AsyncStorage for tooltip persistence. Optional peer dep. */
|
|
49
|
-
function getTooltipStorage(): any | null {
|
|
50
|
-
try {
|
|
51
|
-
const origError = console.error;
|
|
52
|
-
console.error = (...args: unknown[]) => {
|
|
53
|
-
const msg = args[0];
|
|
54
|
-
if (typeof msg === 'string' && msg.includes('AsyncStorage')) return;
|
|
55
|
-
origError.apply(console, args);
|
|
56
|
-
};
|
|
57
|
-
try {
|
|
58
|
-
const mod = require('@react-native-async-storage/async-storage');
|
|
59
|
-
const candidate = mod?.default ?? mod?.AsyncStorage ?? null;
|
|
60
|
-
if (candidate && typeof candidate.getItem === 'function') return candidate;
|
|
61
|
-
return null;
|
|
62
|
-
} finally {
|
|
63
|
-
console.error = origError;
|
|
64
|
-
}
|
|
65
|
-
} catch {
|
|
66
|
-
return null;
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
// ─── Props ─────────────────────────────────────────────────────
|
|
71
|
-
|
|
72
|
-
interface AIAgentProps {
|
|
73
|
-
/**
|
|
74
|
-
* API key (for local prototyping only).
|
|
75
|
-
* Do not ship API keys in your production app bundle.
|
|
76
|
-
*/
|
|
77
|
-
apiKey?: string;
|
|
78
|
-
/**
|
|
79
|
-
* Which LLM provider to use for text mode.
|
|
80
|
-
* Default: 'gemini'
|
|
81
|
-
*/
|
|
82
|
-
provider?: AIProviderName;
|
|
83
|
-
/**
|
|
84
|
-
* The URL of your secure backend proxy (for production).
|
|
85
|
-
* Routes all Gemini API traffic through your server.
|
|
86
|
-
*/
|
|
87
|
-
proxyUrl?: string;
|
|
88
|
-
/**
|
|
89
|
-
* Headers to send to your backend proxy (e.g., auth tokens).
|
|
90
|
-
*/
|
|
91
|
-
proxyHeaders?: Record<string, string>;
|
|
92
|
-
/**
|
|
93
|
-
* Optional specific URL for Voice Mode (WebSockets).
|
|
94
|
-
* If voiceProxyUrl isn't provided, it safely falls back to using proxyUrl for everything.
|
|
95
|
-
*/
|
|
96
|
-
voiceProxyUrl?: string;
|
|
97
|
-
/**
|
|
98
|
-
* Optional specific headers for voiceProxyUrl.
|
|
99
|
-
*/
|
|
100
|
-
voiceProxyHeaders?: Record<string, string>;
|
|
101
|
-
/** LLM model name (provider-specific) */
|
|
102
|
-
model?: string;
|
|
103
|
-
/** Navigation container ref (from useNavigationContainerRef) */
|
|
104
|
-
navRef?: any;
|
|
105
|
-
|
|
106
|
-
/** Max agent steps per request */
|
|
107
|
-
maxSteps?: number;
|
|
108
|
-
/** Show/hide the chat bar */
|
|
109
|
-
showChatBar?: boolean;
|
|
110
|
-
/** Children — the actual app */
|
|
111
|
-
children: React.ReactNode;
|
|
112
|
-
/** Callback when agent completes */
|
|
113
|
-
onResult?: (result: ExecutionResult) => void;
|
|
114
|
-
|
|
115
|
-
// ── Security ──────────────────────
|
|
116
|
-
|
|
117
|
-
/** Refs of elements the AI must NOT interact with */
|
|
118
|
-
interactiveBlacklist?: React.RefObject<any>[];
|
|
119
|
-
/** If set, AI can ONLY interact with these elements */
|
|
120
|
-
interactiveWhitelist?: React.RefObject<any>[];
|
|
121
|
-
/** Called before each step */
|
|
122
|
-
onBeforeStep?: (stepCount: number) => Promise<void> | void;
|
|
123
|
-
/** Called after each step */
|
|
124
|
-
onAfterStep?: (history: AgentStep[]) => Promise<void> | void;
|
|
125
|
-
/** Called before task starts */
|
|
126
|
-
onBeforeTask?: () => Promise<void> | void;
|
|
127
|
-
/** Called after task completes */
|
|
128
|
-
onAfterTask?: (result: ExecutionResult) => Promise<void> | void;
|
|
129
|
-
/** Transform screen content before LLM sees it (for data masking) */
|
|
130
|
-
transformScreenContent?: (content: string) => Promise<string> | string;
|
|
131
|
-
/** Override or remove built-in tools (null = remove) */
|
|
132
|
-
customTools?: Record<string, ToolDefinition | null>;
|
|
133
|
-
/** Instructions to guide agent behavior */
|
|
134
|
-
instructions?: {
|
|
135
|
-
system?: string;
|
|
136
|
-
getScreenInstructions?: (screenName: string) => string | undefined | null;
|
|
137
|
-
};
|
|
138
|
-
/** Delay between steps in ms */
|
|
139
|
-
stepDelay?: number;
|
|
140
|
-
/** WebSocket URL to companion MCP server bridge (e.g., ws://localhost:3101) */
|
|
141
|
-
mcpServerUrl?: string;
|
|
142
|
-
/** Expo Router instance (from useRouter()) */
|
|
143
|
-
router?: {
|
|
144
|
-
push: (href: string) => void;
|
|
145
|
-
replace: (href: string) => void;
|
|
146
|
-
back: () => void;
|
|
147
|
-
};
|
|
148
|
-
/** Expo Router pathname (from usePathname()) */
|
|
149
|
-
pathname?: string;
|
|
150
|
-
/** Enable voice mode (requires expo-av) */
|
|
151
|
-
enableVoice?: boolean;
|
|
152
|
-
/** Called after each step with token usage data */
|
|
153
|
-
onTokenUsage?: (usage: TokenUsage) => void;
|
|
154
|
-
/** Enable SDK debug logging (disabled by default) */
|
|
155
|
-
debug?: boolean;
|
|
156
|
-
/**
|
|
157
|
-
* Domain knowledge the AI can query via the query_knowledge tool.
|
|
158
|
-
* Pass a static KnowledgeEntry[] or a { retrieve(query, screen) } function.
|
|
159
|
-
*/
|
|
160
|
-
knowledgeBase?: KnowledgeBaseConfig;
|
|
161
|
-
/** Max token budget for knowledge retrieval (default: 2000) */
|
|
162
|
-
knowledgeMaxTokens?: number;
|
|
163
|
-
/**
|
|
164
|
-
* Enable or disable UI control (tap, type, navigate).
|
|
165
|
-
* When false, the AI operates as a knowledge-only assistant.
|
|
166
|
-
* Default: true
|
|
167
|
-
*/
|
|
168
|
-
enableUIControl?: boolean;
|
|
169
|
-
/**
|
|
170
|
-
* Quick accent color for the chat bar.
|
|
171
|
-
* Tints the FAB, send button, and active states.
|
|
172
|
-
* Overridden by theme.primaryColor if both are provided.
|
|
173
|
-
*/
|
|
174
|
-
accentColor?: string;
|
|
175
|
-
/**
|
|
176
|
-
* Full theme customization for the chat bar popup.
|
|
177
|
-
* Overrides accentColor for any specified key.
|
|
178
|
-
*/
|
|
179
|
-
theme?: ChatBarTheme;
|
|
180
|
-
/**
|
|
181
|
-
* Pre-generated screen map from `npx react-native-ai-agent generate-map`.
|
|
182
|
-
* Gives the AI knowledge of all screens, their content, and navigation chains.
|
|
183
|
-
*/
|
|
184
|
-
screenMap?: ScreenMap;
|
|
185
|
-
/**
|
|
186
|
-
* Maximum total tokens (prompt + completion) allowed per task.
|
|
187
|
-
* The agent loop auto-stops when this budget is exceeded.
|
|
188
|
-
*/
|
|
189
|
-
maxTokenBudget?: number;
|
|
190
|
-
/**
|
|
191
|
-
* Maximum estimated cost (USD) allowed per task.
|
|
192
|
-
* The agent loop auto-stops when this budget is exceeded.
|
|
193
|
-
*/
|
|
194
|
-
maxCostUSD?: number;
|
|
195
|
-
|
|
196
|
-
/**
|
|
197
|
-
* Whether to include the screen map in the AI prompt.
|
|
198
|
-
* Set to `false` to disable navigation intelligence without removing the `screenMap` prop.
|
|
199
|
-
* @default true
|
|
200
|
-
*/
|
|
201
|
-
useScreenMap?: boolean;
|
|
202
|
-
|
|
203
|
-
// ── Analytics (opt-in) ──
|
|
204
|
-
|
|
205
|
-
/**
|
|
206
|
-
* Publishable analytics key (mobileai_pub_xxx).
|
|
207
|
-
*/
|
|
208
|
-
analyticsKey?: string;
|
|
209
|
-
/**
|
|
210
|
-
* Proxy URL for enterprise customers — routes events through your backend.
|
|
211
|
-
*/
|
|
212
|
-
analyticsProxyUrl?: string;
|
|
213
|
-
/**
|
|
214
|
-
* Custom headers for analyticsProxyUrl (e.g., auth tokens).
|
|
215
|
-
*/
|
|
216
|
-
analyticsProxyHeaders?: Record<string, string>;
|
|
217
|
-
|
|
218
|
-
/**
|
|
219
|
-
* Proactive agent configuration (detects user hesitation)
|
|
220
|
-
*/
|
|
221
|
-
proactiveHelp?: ProactiveHelpConfig;
|
|
222
|
-
|
|
223
|
-
// ── Support Configuration ────────────
|
|
224
|
-
|
|
225
|
-
/**
|
|
226
|
-
* Identity of the logged-in user.
|
|
227
|
-
* If provided, this enforces "one ticket per user" and shows the user profile
|
|
228
|
-
* in the Dashboard (name, email, plan, etc.).
|
|
229
|
-
*/
|
|
230
|
-
userContext?: {
|
|
231
|
-
userId?: string;
|
|
232
|
-
name?: string;
|
|
233
|
-
email?: string;
|
|
234
|
-
phone?: string;
|
|
235
|
-
plan?: string;
|
|
236
|
-
custom?: Record<string, string | number | boolean>;
|
|
237
|
-
};
|
|
238
|
-
|
|
239
|
-
/**
|
|
240
|
-
* Device push token for offline support replies.
|
|
241
|
-
* Use '@react-native-firebase/messaging' or 'expo-notifications' to get this.
|
|
242
|
-
*/
|
|
243
|
-
pushToken?: string;
|
|
244
|
-
|
|
245
|
-
/**
|
|
246
|
-
* The type of push token provided.
|
|
247
|
-
* "fcm" is recommended for universal bare/Expo support.
|
|
248
|
-
*/
|
|
249
|
-
pushTokenType?: 'fcm' | 'expo' | 'apns';
|
|
250
|
-
|
|
251
|
-
/**
|
|
252
|
-
* Controls how the agent handles irreversible UI actions.
|
|
253
|
-
* 'copilot' (default): AI pauses before final commit actions (place order, delete, submit).
|
|
254
|
-
* 'autopilot': Full autonomy — all actions execute without confirmation.
|
|
255
|
-
*
|
|
256
|
-
* In copilot mode, the AI works silently (navigates, fills forms, scrolls) and
|
|
257
|
-
* pauses ONCE before the final irreversible action. Elements with aiConfirm={true}
|
|
258
|
-
* also trigger a code-level confirmation gate as a safety net.
|
|
259
|
-
*/
|
|
260
|
-
interactionMode?: InteractionMode;
|
|
261
|
-
|
|
262
|
-
/**
|
|
263
|
-
* Show a one-time discovery tooltip above the chat FAB.
|
|
264
|
-
* Tells new users the AI can navigate and interact with the app.
|
|
265
|
-
* Default: true (shows once, then remembered via AsyncStorage)
|
|
266
|
-
*/
|
|
267
|
-
showDiscoveryTooltip?: boolean;
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
// ─── Component ─────────────────────────────────────────────────
|
|
272
|
-
|
|
273
|
-
export function AIAgent({
|
|
274
|
-
apiKey,
|
|
275
|
-
proxyUrl,
|
|
276
|
-
proxyHeaders,
|
|
277
|
-
voiceProxyUrl,
|
|
278
|
-
voiceProxyHeaders,
|
|
279
|
-
provider: providerName = 'gemini',
|
|
280
|
-
model,
|
|
281
|
-
navRef,
|
|
282
|
-
|
|
283
|
-
maxSteps = 25,
|
|
284
|
-
showChatBar = true,
|
|
285
|
-
children,
|
|
286
|
-
onResult,
|
|
287
|
-
// Security props
|
|
288
|
-
interactiveBlacklist,
|
|
289
|
-
interactiveWhitelist,
|
|
290
|
-
onBeforeStep,
|
|
291
|
-
onAfterStep,
|
|
292
|
-
onBeforeTask,
|
|
293
|
-
onAfterTask,
|
|
294
|
-
transformScreenContent,
|
|
295
|
-
customTools,
|
|
296
|
-
instructions,
|
|
297
|
-
stepDelay,
|
|
298
|
-
mcpServerUrl,
|
|
299
|
-
router,
|
|
300
|
-
pathname,
|
|
301
|
-
enableVoice = false,
|
|
302
|
-
onTokenUsage,
|
|
303
|
-
debug = false,
|
|
304
|
-
knowledgeBase,
|
|
305
|
-
knowledgeMaxTokens,
|
|
306
|
-
enableUIControl,
|
|
307
|
-
accentColor,
|
|
308
|
-
theme,
|
|
309
|
-
screenMap,
|
|
310
|
-
useScreenMap = true,
|
|
311
|
-
maxTokenBudget,
|
|
312
|
-
maxCostUSD,
|
|
313
|
-
analyticsKey,
|
|
314
|
-
analyticsProxyUrl,
|
|
315
|
-
analyticsProxyHeaders,
|
|
316
|
-
proactiveHelp,
|
|
317
|
-
userContext,
|
|
318
|
-
pushToken,
|
|
319
|
-
pushTokenType,
|
|
320
|
-
interactionMode,
|
|
321
|
-
showDiscoveryTooltip: showDiscoveryTooltipProp = true,
|
|
322
|
-
}: AIAgentProps) {
|
|
323
|
-
// Configure logger based on debug prop
|
|
324
|
-
React.useEffect(() => {
|
|
325
|
-
logger.setEnabled(debug);
|
|
326
|
-
if (debug) {
|
|
327
|
-
logger.info('AIAgent', '🔧 Debug logging enabled');
|
|
328
|
-
}
|
|
329
|
-
}, [debug]);
|
|
330
|
-
|
|
331
|
-
const rootViewRef = useRef<any>(null);
|
|
332
|
-
const [isThinking, setIsThinking] = useState(false);
|
|
333
|
-
const [statusText, setStatusText] = useState('');
|
|
334
|
-
const [lastResult, setLastResult] = useState<ExecutionResult | null>(null);
|
|
335
|
-
const [lastUserMessage, setLastUserMessage] = useState<string | null>(null);
|
|
336
|
-
const [messages, setMessages] = useState<AIMessage[]>([]);
|
|
337
|
-
const [chatScrollTrigger, setChatScrollTrigger] = useState(0);
|
|
338
|
-
|
|
339
|
-
// Increment scroll trigger when messages change to auto-scroll chat modal
|
|
340
|
-
useEffect(() => {
|
|
341
|
-
if (messages.length > 0) {
|
|
342
|
-
setChatScrollTrigger(prev => prev + 1);
|
|
343
|
-
}
|
|
344
|
-
}, [messages.length]);
|
|
345
|
-
|
|
346
|
-
// ── Support Modal State ──
|
|
347
|
-
const [tickets, setTickets] = useState<import('../support/types').SupportTicket[]>([]);
|
|
348
|
-
const [selectedTicketId, setSelectedTicketId] = useState<string | null>(null);
|
|
349
|
-
const [supportSocket, setSupportSocket] = useState<EscalationSocket | null>(null);
|
|
350
|
-
const [isLiveAgentTyping, setIsLiveAgentTyping] = useState(false);
|
|
351
|
-
const [autoExpandTrigger, setAutoExpandTrigger] = useState(0);
|
|
352
|
-
const [unreadCounts, setUnreadCounts] = useState<Record<string, number>>({});
|
|
353
|
-
// Ref mirrors selectedTicketId — lets socket callbacks access current value
|
|
354
|
-
// without stale closures (sockets are long-lived, closures capture old state).
|
|
355
|
-
const selectedTicketIdRef = useRef<string | null>(null);
|
|
356
|
-
useEffect(() => { selectedTicketIdRef.current = selectedTicketId; }, [selectedTicketId]);
|
|
357
|
-
// Cache of live sockets by ticketId — keeps sockets alive even when user
|
|
358
|
-
// navigates back to the ticket list, so new messages still trigger badge updates.
|
|
359
|
-
const pendingSocketsRef = useRef<Map<string, EscalationSocket>>(new Map());
|
|
360
|
-
// SSE connections per ticket — reliable fallback for ticket_closed events
|
|
361
|
-
// when the WebSocket is disconnected. EventSource auto-reconnects.
|
|
362
|
-
const sseRef = useRef<Map<string, EscalationEventSource>>(new Map());
|
|
363
|
-
|
|
364
|
-
const totalUnread = Object.values(unreadCounts).reduce((sum, count) => sum + count, 0);
|
|
365
|
-
|
|
366
|
-
// ── Discovery Tooltip (one-time) ──────────────────────────
|
|
367
|
-
const [tooltipVisible, setTooltipVisible] = useState(false);
|
|
368
|
-
|
|
369
|
-
useEffect(() => {
|
|
370
|
-
if (!showDiscoveryTooltipProp) return;
|
|
371
|
-
void (async () => {
|
|
372
|
-
try {
|
|
373
|
-
const AS = getTooltipStorage();
|
|
374
|
-
if (!AS) { setTooltipVisible(true); return; }
|
|
375
|
-
const seen = await AS.getItem('@mobileai_tooltip_seen');
|
|
376
|
-
if (!seen) setTooltipVisible(true);
|
|
377
|
-
} catch {
|
|
378
|
-
setTooltipVisible(true);
|
|
379
|
-
}
|
|
380
|
-
})();
|
|
381
|
-
}, [showDiscoveryTooltipProp]);
|
|
382
|
-
|
|
383
|
-
const handleTooltipDismiss = useCallback(() => {
|
|
384
|
-
setTooltipVisible(false);
|
|
385
|
-
void (async () => {
|
|
386
|
-
try {
|
|
387
|
-
const AS = getTooltipStorage();
|
|
388
|
-
await AS?.setItem('@mobileai_tooltip_seen', 'true');
|
|
389
|
-
} catch { /* graceful */ }
|
|
390
|
-
})();
|
|
391
|
-
}, []);
|
|
392
|
-
|
|
393
|
-
// CRITICAL: clearSupport uses REFS and functional setters — never closure values.
|
|
394
|
-
// This function is captured by long-lived callbacks (escalation sockets, restored
|
|
395
|
-
// sockets) that may hold stale references. Using refs guarantees the current
|
|
396
|
-
// selectedTicketId and supportSocket are always read, not snapshot values.
|
|
397
|
-
const clearSupport = useCallback((ticketId?: string) => {
|
|
398
|
-
if (ticketId) {
|
|
399
|
-
// Remove specific ticket + its cached socket and SSE
|
|
400
|
-
const cached = pendingSocketsRef.current.get(ticketId);
|
|
401
|
-
if (cached) { cached.disconnect(); pendingSocketsRef.current.delete(ticketId); }
|
|
402
|
-
const sse = sseRef.current.get(ticketId);
|
|
403
|
-
if (sse) { sse.disconnect(); sseRef.current.delete(ticketId); }
|
|
404
|
-
setTickets(prev => prev.filter(t => t.id !== ticketId));
|
|
405
|
-
setUnreadCounts(prev => { const n = { ...prev }; delete n[ticketId]; return n; });
|
|
406
|
-
|
|
407
|
-
// If user was viewing this ticket, close the support modal + switch to ticket list
|
|
408
|
-
if (selectedTicketIdRef.current === ticketId) {
|
|
409
|
-
setSupportSocket(prev => { prev?.disconnect(); return null; });
|
|
410
|
-
setSelectedTicketId(null);
|
|
411
|
-
setIsLiveAgentTyping(false);
|
|
412
|
-
setMessages([]);
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
// If no tickets remain, switch back to text mode
|
|
416
|
-
setTickets(prev => {
|
|
417
|
-
if (prev.length === 0) {
|
|
418
|
-
setMode('text');
|
|
419
|
-
}
|
|
420
|
-
return prev;
|
|
421
|
-
});
|
|
422
|
-
} else {
|
|
423
|
-
// Clear all — disconnect every cached socket and SSE
|
|
424
|
-
pendingSocketsRef.current.forEach(s => s.disconnect());
|
|
425
|
-
pendingSocketsRef.current.clear();
|
|
426
|
-
sseRef.current.forEach(s => s.disconnect());
|
|
427
|
-
sseRef.current.clear();
|
|
428
|
-
setSupportSocket(prev => { prev?.disconnect(); return null; });
|
|
429
|
-
setSelectedTicketId(null);
|
|
430
|
-
setTickets([]);
|
|
431
|
-
setUnreadCounts({});
|
|
432
|
-
setIsLiveAgentTyping(false);
|
|
433
|
-
setMode('text');
|
|
434
|
-
}
|
|
435
|
-
}, []);
|
|
436
|
-
|
|
437
|
-
const openSSE = useCallback((ticketId: string) => {
|
|
438
|
-
if (sseRef.current.has(ticketId)) return;
|
|
439
|
-
if (!analyticsKey) return;
|
|
440
|
-
|
|
441
|
-
const sseUrl = `${ENDPOINTS.escalation}/api/v1/escalations/events?analyticsKey=${encodeURIComponent(analyticsKey)}&ticketId=${encodeURIComponent(ticketId)}`;
|
|
442
|
-
const sse = new EscalationEventSource({
|
|
443
|
-
url: sseUrl,
|
|
444
|
-
onTicketClosed: (tid) => {
|
|
445
|
-
logger.info('AIAgent', 'SSE: ticket_closed received for', tid);
|
|
446
|
-
setUnreadCounts(prev => {
|
|
447
|
-
const next = { ...prev };
|
|
448
|
-
delete next[tid];
|
|
449
|
-
return next;
|
|
450
|
-
});
|
|
451
|
-
clearSupport(tid);
|
|
452
|
-
},
|
|
453
|
-
onConnected: (tid) => {
|
|
454
|
-
logger.info('AIAgent', 'SSE: connected for ticket', tid);
|
|
455
|
-
},
|
|
456
|
-
});
|
|
457
|
-
sse.connect();
|
|
458
|
-
sseRef.current.set(ticketId, sse);
|
|
459
|
-
logger.info('AIAgent', 'SSE opened for ticket:', ticketId);
|
|
460
|
-
}, [analyticsKey, clearSupport]);
|
|
461
|
-
|
|
462
|
-
const clearMessages = useCallback(() => {
|
|
463
|
-
setMessages([]);
|
|
464
|
-
setLastResult(null);
|
|
465
|
-
}, []);
|
|
466
|
-
|
|
467
|
-
const getResolvedScreenName = useCallback(() => {
|
|
468
|
-
const routeName = (navRef as any)?.getCurrentRoute?.()?.name;
|
|
469
|
-
if (typeof routeName === 'string' && routeName.trim().length > 0) {
|
|
470
|
-
return routeName;
|
|
471
|
-
}
|
|
472
|
-
|
|
473
|
-
const telemetryScreen = telemetryRef.current?.screen;
|
|
474
|
-
if (typeof telemetryScreen === 'string' && telemetryScreen !== 'Unknown') {
|
|
475
|
-
return telemetryScreen;
|
|
476
|
-
}
|
|
477
|
-
|
|
478
|
-
return 'unknown';
|
|
479
|
-
}, [navRef]);
|
|
480
|
-
|
|
481
|
-
// ─── Auto-create MobileAI escalation tool ─────────────────────
|
|
482
|
-
// When analyticsKey is present and consumer hasn't provided their own
|
|
483
|
-
// escalate_to_human tool, auto-wire the MobileAI platform provider.
|
|
484
|
-
// Human replies from the dashboard inbox are injected into chat messages.
|
|
485
|
-
const autoEscalateTool = useMemo(() => {
|
|
486
|
-
if (!analyticsKey) return null;
|
|
487
|
-
if (customTools?.['escalate_to_human']) return null; // consumer overrides
|
|
488
|
-
return createEscalateTool({
|
|
489
|
-
config: { provider: 'mobileai' },
|
|
490
|
-
analyticsKey,
|
|
491
|
-
getContext: () => ({
|
|
492
|
-
currentScreen: getResolvedScreenName(),
|
|
493
|
-
originalQuery: '',
|
|
494
|
-
stepsBeforeEscalation: 0,
|
|
495
|
-
}),
|
|
496
|
-
getHistory: () =>
|
|
497
|
-
messages.map((m) => ({ role: m.role, content: m.content })),
|
|
498
|
-
getScreenFlow: () => telemetryRef.current?.getScreenFlow() ?? [],
|
|
499
|
-
userContext,
|
|
500
|
-
pushToken,
|
|
501
|
-
pushTokenType,
|
|
502
|
-
onEscalationStarted: (tid, socket) => {
|
|
503
|
-
logger.info('AIAgent', '★★★ onEscalationStarted FIRED — ticketId:', tid);
|
|
504
|
-
// Cache the live socket so handleTicketSelect can reuse it without reconnecting
|
|
505
|
-
pendingSocketsRef.current.set(tid, socket);
|
|
506
|
-
// Open SSE for reliable ticket_closed delivery
|
|
507
|
-
openSSE(tid);
|
|
508
|
-
|
|
509
|
-
const currentScreen = getResolvedScreenName();
|
|
510
|
-
setTickets(prev => {
|
|
511
|
-
if (prev.find(t => t.id === tid)) {
|
|
512
|
-
logger.info('AIAgent', '★★★ Ticket already in list, skipping add');
|
|
513
|
-
return prev;
|
|
514
|
-
}
|
|
515
|
-
const newList = [{ id: tid, reason: 'Connecting to agent...', screen: currentScreen, status: 'open', history: [], createdAt: new Date().toISOString(), wsUrl: '' }, ...prev];
|
|
516
|
-
logger.info('AIAgent', '★★★ Tickets updated, new length:', newList.length);
|
|
517
|
-
return newList;
|
|
518
|
-
});
|
|
519
|
-
|
|
520
|
-
// Fetch real ticket data from backend to replace the placeholder
|
|
521
|
-
void (async () => {
|
|
522
|
-
try {
|
|
523
|
-
const res = await fetch(`${ENDPOINTS.escalation}/api/v1/escalations/${tid}?analyticsKey=${encodeURIComponent(analyticsKey!)}`);
|
|
524
|
-
if (res.ok) {
|
|
525
|
-
const data = await res.json();
|
|
526
|
-
setTickets(prev => prev.map(t => {
|
|
527
|
-
if (t.id !== tid) return t;
|
|
528
|
-
return {
|
|
529
|
-
...t,
|
|
530
|
-
reason: data.reason || t.reason,
|
|
531
|
-
screen: data.screen || t.screen,
|
|
532
|
-
status: data.status || t.status,
|
|
533
|
-
history: Array.isArray(data.history) ? data.history : t.history,
|
|
534
|
-
};
|
|
535
|
-
}));
|
|
536
|
-
}
|
|
537
|
-
} catch {
|
|
538
|
-
// Best-effort — placeholder is still usable
|
|
539
|
-
}
|
|
540
|
-
})();
|
|
541
|
-
|
|
542
|
-
// Switch to human mode so the ticket LIST is visible — do NOT auto-select
|
|
543
|
-
setMode('human');
|
|
544
|
-
setAutoExpandTrigger(prev => {
|
|
545
|
-
const next = prev + 1;
|
|
546
|
-
logger.info('AIAgent', '★★★ autoExpandTrigger:', prev, '→', next);
|
|
547
|
-
return next;
|
|
548
|
-
});
|
|
549
|
-
logger.info('AIAgent', '★★★ setMode("human") called from onEscalationStarted');
|
|
550
|
-
},
|
|
551
|
-
onHumanReply: (reply: string, ticketId?: string) => {
|
|
552
|
-
if (ticketId) {
|
|
553
|
-
// Always update the ticket's history (source of truth for ticket cards)
|
|
554
|
-
setTickets(prev => prev.map(t => {
|
|
555
|
-
if (t.id !== ticketId) return t;
|
|
556
|
-
return {
|
|
557
|
-
...t,
|
|
558
|
-
history: [...(t.history || []), { role: 'live_agent', content: reply, timestamp: new Date().toISOString() }],
|
|
559
|
-
};
|
|
560
|
-
}));
|
|
561
|
-
|
|
562
|
-
// Route via ref: only push to messages[] if user is viewing THIS ticket
|
|
563
|
-
if (selectedTicketIdRef.current === ticketId) {
|
|
564
|
-
const humanMsg: AIMessage = {
|
|
565
|
-
id: `human-${Date.now()}`,
|
|
566
|
-
role: 'live_agent' as any,
|
|
567
|
-
content: reply,
|
|
568
|
-
timestamp: Date.now(),
|
|
569
|
-
};
|
|
570
|
-
setMessages((prev) => [...prev, humanMsg]);
|
|
571
|
-
setLastResult({ success: true, message: `👤 ${reply}`, steps: [] });
|
|
572
|
-
} else {
|
|
573
|
-
// Not viewing this ticket — increment unread badge
|
|
574
|
-
setUnreadCounts(prev => ({
|
|
575
|
-
...prev,
|
|
576
|
-
[ticketId]: (prev[ticketId] || 0) + 1,
|
|
577
|
-
}));
|
|
578
|
-
}
|
|
579
|
-
}
|
|
580
|
-
},
|
|
581
|
-
onTypingChange: (isTyping: boolean) => {
|
|
582
|
-
setIsLiveAgentTyping(isTyping);
|
|
583
|
-
},
|
|
584
|
-
onTicketClosed: (ticketId?: string) => {
|
|
585
|
-
logger.info('AIAgent', 'Ticket closed by agent — removing from list');
|
|
586
|
-
if (ticketId) {
|
|
587
|
-
setUnreadCounts(prev => {
|
|
588
|
-
const next = { ...prev };
|
|
589
|
-
delete next[ticketId];
|
|
590
|
-
return next;
|
|
591
|
-
});
|
|
592
|
-
}
|
|
593
|
-
clearSupport(ticketId);
|
|
594
|
-
},
|
|
595
|
-
});
|
|
596
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
597
|
-
}, [analyticsKey, customTools, getResolvedScreenName, navRef, openSSE, userContext, pushToken, pushTokenType, messages, clearSupport]);
|
|
598
|
-
|
|
599
|
-
// ─── Restore pending tickets on app start ──────────────────────
|
|
600
|
-
useEffect(() => {
|
|
601
|
-
if (!analyticsKey) return;
|
|
602
|
-
|
|
603
|
-
void (async () => {
|
|
604
|
-
try {
|
|
605
|
-
// Wait for the device ID to be initialised before reading it.
|
|
606
|
-
// getDeviceId() is synchronous but returns null on cold start until
|
|
607
|
-
// initDeviceId() resolves — awaiting here prevents an early bail-out
|
|
608
|
-
// that would leave the Human tab hidden after an app refresh.
|
|
609
|
-
await initDeviceId();
|
|
610
|
-
const deviceId = getDeviceId();
|
|
611
|
-
|
|
612
|
-
logger.info('AIAgent', '★ Restore check — analyticsKey:', !!analyticsKey, 'userId:', userContext?.userId, 'pushToken:', !!pushToken, 'deviceId:', deviceId);
|
|
613
|
-
if (!userContext?.userId && !pushToken && !deviceId) return;
|
|
614
|
-
|
|
615
|
-
const query = new URLSearchParams({ analyticsKey });
|
|
616
|
-
if (userContext?.userId) query.append('userId', userContext.userId);
|
|
617
|
-
if (pushToken) query.append('pushToken', pushToken);
|
|
618
|
-
if (deviceId) query.append('deviceId', deviceId);
|
|
619
|
-
|
|
620
|
-
const url = `${ENDPOINTS.escalation}/api/v1/escalations/mine?${query.toString()}`;
|
|
621
|
-
logger.info('AIAgent', '★ Restore — fetching:', url);
|
|
622
|
-
const res = await fetch(url);
|
|
623
|
-
|
|
624
|
-
logger.info('AIAgent', '★ Restore — response status:', res.status);
|
|
625
|
-
if (!res.ok) return;
|
|
626
|
-
|
|
627
|
-
const data = await res.json();
|
|
628
|
-
const fetchedTickets: import('../support/types').SupportTicket[] = data.tickets ?? [];
|
|
629
|
-
logger.info('AIAgent', '★ Restore — found', fetchedTickets.length, 'active tickets');
|
|
630
|
-
|
|
631
|
-
if (fetchedTickets.length === 0) return;
|
|
632
|
-
|
|
633
|
-
// Initialize unread counts from backend (set together with tickets for instant badge)
|
|
634
|
-
const initialUnreadCounts: Record<string, number> = {};
|
|
635
|
-
for (const ticket of fetchedTickets) {
|
|
636
|
-
if (ticket.unreadCount && ticket.unreadCount > 0) {
|
|
637
|
-
initialUnreadCounts[ticket.id] = ticket.unreadCount;
|
|
638
|
-
}
|
|
639
|
-
}
|
|
640
|
-
setTickets(fetchedTickets);
|
|
641
|
-
setUnreadCounts(initialUnreadCounts);
|
|
642
|
-
|
|
643
|
-
// Show the ticket list without auto-selecting — user taps in (Intercom-style).
|
|
644
|
-
// setMode switches the widget to human mode so the list is immediately visible.
|
|
645
|
-
setMode('human');
|
|
646
|
-
setAutoExpandTrigger(prev => prev + 1);
|
|
647
|
-
|
|
648
|
-
// Open SSE for every restored ticket — reliable ticket_closed delivery
|
|
649
|
-
for (const t of fetchedTickets) {
|
|
650
|
-
openSSE(t.id);
|
|
651
|
-
}
|
|
652
|
-
|
|
653
|
-
// If there is exactly one ticket, pre-wire its WebSocket so it is ready
|
|
654
|
-
// the moment the user taps the card (no extra connect delay).
|
|
655
|
-
if (fetchedTickets.length === 1) {
|
|
656
|
-
const ticket = fetchedTickets[0]!;
|
|
657
|
-
|
|
658
|
-
if (ticket.history?.length) {
|
|
659
|
-
const restored: AIMessage[] = ticket.history.map(
|
|
660
|
-
(entry: { role: string; content: string; timestamp?: string }, i: number) => ({
|
|
661
|
-
id: `restored-${ticket.id}-${i}`,
|
|
662
|
-
role: (entry.role === 'live_agent' ? 'assistant' : entry.role) as any,
|
|
663
|
-
content: entry.content,
|
|
664
|
-
timestamp: entry.timestamp ? new Date(entry.timestamp).getTime() : Date.now(),
|
|
665
|
-
})
|
|
666
|
-
);
|
|
667
|
-
setMessages(restored);
|
|
668
|
-
}
|
|
669
|
-
|
|
670
|
-
const socket = new EscalationSocket({
|
|
671
|
-
onReply: (reply: string) => {
|
|
672
|
-
const tid = ticket.id;
|
|
673
|
-
// Always update ticket history
|
|
674
|
-
setTickets(prev => prev.map(t => {
|
|
675
|
-
if (t.id !== tid) return t;
|
|
676
|
-
return {
|
|
677
|
-
...t,
|
|
678
|
-
history: [...(t.history || []), { role: 'live_agent', content: reply, timestamp: new Date().toISOString() }],
|
|
679
|
-
};
|
|
680
|
-
}));
|
|
681
|
-
|
|
682
|
-
// Route via ref: only push to messages[] if user is viewing THIS ticket
|
|
683
|
-
if (selectedTicketIdRef.current === tid) {
|
|
684
|
-
const msg: AIMessage = {
|
|
685
|
-
id: `human-${Date.now()}`,
|
|
686
|
-
role: 'assistant',
|
|
687
|
-
content: reply,
|
|
688
|
-
timestamp: Date.now(),
|
|
689
|
-
};
|
|
690
|
-
setMessages((prev) => [...prev, msg]);
|
|
691
|
-
setLastResult({ success: true, message: `👤 ${reply}`, steps: [] });
|
|
692
|
-
} else {
|
|
693
|
-
setUnreadCounts(prev => ({
|
|
694
|
-
...prev,
|
|
695
|
-
[tid]: (prev[tid] || 0) + 1,
|
|
696
|
-
}));
|
|
697
|
-
}
|
|
698
|
-
},
|
|
699
|
-
onTypingChange: setIsLiveAgentTyping,
|
|
700
|
-
onTicketClosed: () => clearSupport(ticket.id),
|
|
701
|
-
onError: (err) => logger.error('AIAgent', '★ Restored socket error:', err),
|
|
702
|
-
});
|
|
703
|
-
socket.connect(ticket.wsUrl);
|
|
704
|
-
// Cache in pendingSocketsRef so handleTicketSelect reuses it without reconnecting
|
|
705
|
-
pendingSocketsRef.current.set(ticket.id, socket);
|
|
706
|
-
logger.info('AIAgent', '★ Single ticket restored and socket cached:', ticket.id);
|
|
707
|
-
}
|
|
708
|
-
} catch (err) {
|
|
709
|
-
logger.error('AIAgent', '★ Failed to restore tickets:', err);
|
|
710
|
-
}
|
|
711
|
-
})();
|
|
712
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
713
|
-
}, [analyticsKey]);
|
|
714
|
-
|
|
715
|
-
// ─── Ticket selection handlers ────────────────────────────────
|
|
716
|
-
const handleTicketSelect = useCallback(async (ticketId: string) => {
|
|
717
|
-
const ticket = tickets.find(t => t.id === ticketId);
|
|
718
|
-
if (!ticket) return;
|
|
719
|
-
|
|
720
|
-
// Cache (not disconnect!) the previous ticket's socket so it keeps
|
|
721
|
-
// receiving messages in the background and can update unread counts.
|
|
722
|
-
if (supportSocket && selectedTicketId && selectedTicketId !== ticketId) {
|
|
723
|
-
pendingSocketsRef.current.set(selectedTicketId, supportSocket);
|
|
724
|
-
setSupportSocket(null);
|
|
725
|
-
}
|
|
726
|
-
|
|
727
|
-
setSelectedTicketId(ticketId);
|
|
728
|
-
setMode('human');
|
|
729
|
-
|
|
730
|
-
// Clear unread count when user opens a ticket
|
|
731
|
-
setUnreadCounts(prev => {
|
|
732
|
-
if (!prev[ticketId]) return prev;
|
|
733
|
-
const next = { ...prev };
|
|
734
|
-
delete next[ticketId];
|
|
735
|
-
return next;
|
|
736
|
-
});
|
|
737
|
-
|
|
738
|
-
// Mark ticket as read on backend (source of truth)
|
|
739
|
-
(async () => {
|
|
740
|
-
try {
|
|
741
|
-
await fetch(
|
|
742
|
-
`${ENDPOINTS.escalation}/api/v1/escalations/${ticketId}/read?analyticsKey=${analyticsKey}`,
|
|
743
|
-
{ method: 'POST' }
|
|
744
|
-
);
|
|
745
|
-
logger.info('AIAgent', '★ Marked ticket as read:', ticketId);
|
|
746
|
-
} catch (err) {
|
|
747
|
-
logger.warn('AIAgent', '★ Failed to mark ticket as read:', err);
|
|
748
|
-
}
|
|
749
|
-
})();
|
|
750
|
-
|
|
751
|
-
// Trigger scroll to bottom when modal opens
|
|
752
|
-
setChatScrollTrigger(prev => prev + 1);
|
|
753
|
-
|
|
754
|
-
// Fetch latest history from server — this is the source of truth and catches
|
|
755
|
-
// any messages that arrived while the socket was disconnected (modal closed,
|
|
756
|
-
// app backgrounded, etc.)
|
|
757
|
-
try {
|
|
758
|
-
const res = await fetch(
|
|
759
|
-
`${ENDPOINTS.escalation}/api/v1/escalations/${ticketId}?analyticsKey=${analyticsKey}`
|
|
760
|
-
);
|
|
761
|
-
if (res.ok) {
|
|
762
|
-
const data = await res.json();
|
|
763
|
-
const history: Array<{ role: string; content: string; timestamp?: string }> =
|
|
764
|
-
Array.isArray(data.history) ? data.history : [];
|
|
765
|
-
const restored: AIMessage[] = history.map((entry, i) => ({
|
|
766
|
-
id: `restored-${ticketId}-${i}`,
|
|
767
|
-
role: (entry.role === 'live_agent' ? 'assistant' : entry.role) as any,
|
|
768
|
-
content: entry.content,
|
|
769
|
-
timestamp: entry.timestamp ? new Date(entry.timestamp).getTime() : Date.now(),
|
|
770
|
-
}));
|
|
771
|
-
setMessages(restored);
|
|
772
|
-
// Update ticket in local list with fresh history
|
|
773
|
-
if (data.wsUrl) {
|
|
774
|
-
setTickets(prev => prev.map(t => t.id === ticketId ? { ...t, history, wsUrl: data.wsUrl } : t));
|
|
775
|
-
}
|
|
776
|
-
} else {
|
|
777
|
-
// Fallback to local ticket history
|
|
778
|
-
if (ticket.history?.length) {
|
|
779
|
-
const restored: AIMessage[] = ticket.history.map(
|
|
780
|
-
(entry: { role: string; content: string; timestamp?: string }, i: number) => ({
|
|
781
|
-
id: `restored-${ticketId}-${i}`,
|
|
782
|
-
role: (entry.role === 'live_agent' ? 'assistant' : entry.role) as any,
|
|
783
|
-
content: entry.content,
|
|
784
|
-
timestamp: entry.timestamp ? new Date(entry.timestamp).getTime() : Date.now(),
|
|
785
|
-
})
|
|
786
|
-
);
|
|
787
|
-
setMessages(restored);
|
|
788
|
-
} else {
|
|
789
|
-
setMessages([]);
|
|
790
|
-
}
|
|
791
|
-
}
|
|
792
|
-
} catch (err) {
|
|
793
|
-
logger.warn('AIAgent', '★ Failed to fetch ticket history, using local:', err);
|
|
794
|
-
if (ticket.history?.length) {
|
|
795
|
-
const restored: AIMessage[] = ticket.history.map(
|
|
796
|
-
(entry: { role: string; content: string; timestamp?: string }, i: number) => ({
|
|
797
|
-
id: `restored-${ticketId}-${i}`,
|
|
798
|
-
role: (entry.role === 'live_agent' ? 'assistant' : entry.role) as any,
|
|
799
|
-
content: entry.content,
|
|
800
|
-
timestamp: entry.timestamp ? new Date(entry.timestamp).getTime() : Date.now(),
|
|
801
|
-
})
|
|
802
|
-
);
|
|
803
|
-
setMessages(restored);
|
|
804
|
-
} else {
|
|
805
|
-
setMessages([]);
|
|
806
|
-
}
|
|
807
|
-
}
|
|
808
|
-
|
|
809
|
-
// Reuse the already-connected socket if escalation just happened,
|
|
810
|
-
// otherwise create a fresh connection from the ticket's stored wsUrl.
|
|
811
|
-
const cached = pendingSocketsRef.current.get(ticketId);
|
|
812
|
-
if (cached) {
|
|
813
|
-
pendingSocketsRef.current.delete(ticketId);
|
|
814
|
-
setSupportSocket(cached);
|
|
815
|
-
logger.info('AIAgent', '★ Reusing cached escalation socket for ticket:', ticketId);
|
|
816
|
-
return;
|
|
817
|
-
}
|
|
818
|
-
|
|
819
|
-
const socket = new EscalationSocket({
|
|
820
|
-
onReply: (reply: string) => {
|
|
821
|
-
// Always update ticket history
|
|
822
|
-
setTickets(prev => prev.map(t => {
|
|
823
|
-
if (t.id !== ticketId) return t;
|
|
824
|
-
return {
|
|
825
|
-
...t,
|
|
826
|
-
history: [...(t.history || []), { role: 'live_agent', content: reply, timestamp: new Date().toISOString() }],
|
|
827
|
-
};
|
|
828
|
-
}));
|
|
829
|
-
|
|
830
|
-
// Route via ref: only push to messages[] if user is viewing THIS ticket
|
|
831
|
-
if (selectedTicketIdRef.current === ticketId) {
|
|
832
|
-
const msg: AIMessage = {
|
|
833
|
-
id: `human-${Date.now()}`,
|
|
834
|
-
role: 'assistant',
|
|
835
|
-
content: reply,
|
|
836
|
-
timestamp: Date.now(),
|
|
837
|
-
};
|
|
838
|
-
setMessages(prev => [...prev, msg]);
|
|
839
|
-
setLastResult({ success: true, message: `👤 ${reply}`, steps: [] });
|
|
840
|
-
} else {
|
|
841
|
-
setUnreadCounts(prev => ({
|
|
842
|
-
...prev,
|
|
843
|
-
[ticketId]: (prev[ticketId] || 0) + 1,
|
|
844
|
-
}));
|
|
845
|
-
}
|
|
846
|
-
},
|
|
847
|
-
onTypingChange: setIsLiveAgentTyping,
|
|
848
|
-
onTicketClosed: (closedTicketId?: string) => {
|
|
849
|
-
if (closedTicketId) {
|
|
850
|
-
setUnreadCounts(prev => {
|
|
851
|
-
const next = { ...prev };
|
|
852
|
-
delete next[closedTicketId];
|
|
853
|
-
return next;
|
|
854
|
-
});
|
|
855
|
-
}
|
|
856
|
-
clearSupport(ticketId);
|
|
857
|
-
},
|
|
858
|
-
onError: (err) => logger.error('AIAgent', '★ Socket error on select:', err),
|
|
859
|
-
});
|
|
860
|
-
socket.connect(ticket.wsUrl);
|
|
861
|
-
setSupportSocket(socket);
|
|
862
|
-
}, [tickets, supportSocket, selectedTicketId, analyticsKey, clearSupport]);
|
|
863
|
-
|
|
864
|
-
const handleBackToTickets = useCallback(() => {
|
|
865
|
-
// Cache socket in pendingSocketsRef instead of disconnecting —
|
|
866
|
-
// keeps the WS alive so new messages update unreadCounts in real time.
|
|
867
|
-
const currentTicketId = selectedTicketIdRef.current;
|
|
868
|
-
// Use functional setter to read + cache the current socket without closure dependency
|
|
869
|
-
setSupportSocket(prev => {
|
|
870
|
-
if (prev && currentTicketId) {
|
|
871
|
-
pendingSocketsRef.current.set(currentTicketId, prev);
|
|
872
|
-
logger.info('AIAgent', '★ Socket cached for ticket:', currentTicketId, '— stays alive for badge updates');
|
|
873
|
-
}
|
|
874
|
-
return null;
|
|
875
|
-
});
|
|
876
|
-
setSelectedTicketId(null);
|
|
877
|
-
setMessages([]);
|
|
878
|
-
setIsLiveAgentTyping(false);
|
|
879
|
-
}, []); // No dependencies — uses refs/functional setters
|
|
880
|
-
|
|
881
|
-
const mergedCustomTools = useMemo(() => {
|
|
882
|
-
if (!autoEscalateTool) return customTools;
|
|
883
|
-
return { escalate_to_human: autoEscalateTool, ...customTools };
|
|
884
|
-
}, [autoEscalateTool, customTools]);
|
|
885
|
-
|
|
886
|
-
// ─── Voice/Live Mode State ──────────────────────────────────
|
|
887
|
-
const [mode, setMode] = useState<AgentMode>('text');
|
|
888
|
-
const [isMicActive, setIsMicActive] = useState(false);
|
|
889
|
-
const [isSpeakerMuted, setIsSpeakerMuted] = useState(false);
|
|
890
|
-
const [isAISpeaking, setIsAISpeaking] = useState(false);
|
|
891
|
-
const [isVoiceConnected, setIsVoiceConnected] = useState(false);
|
|
892
|
-
|
|
893
|
-
const voiceServiceRef = useRef<VoiceService | null>(null);
|
|
894
|
-
const audioInputRef = useRef<AudioInputService | null>(null);
|
|
895
|
-
const audioOutputRef = useRef<AudioOutputService | null>(null);
|
|
896
|
-
const toolLockRef = useRef<boolean>(false);
|
|
897
|
-
const userHasSpokenRef = useRef<boolean>(false);
|
|
898
|
-
const lastScreenContextRef = useRef<string>('');
|
|
899
|
-
const screenPollIntervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
|
900
|
-
const lastAgentErrorRef = useRef<string | null>(null);
|
|
901
|
-
|
|
902
|
-
const availableModes: AgentMode[] = useMemo(() => {
|
|
903
|
-
const modes: AgentMode[] = ['text'];
|
|
904
|
-
if (enableVoice) modes.push('voice');
|
|
905
|
-
if (tickets.length > 0) modes.push('human');
|
|
906
|
-
logger.info('AIAgent', '★ availableModes recomputed:', modes, '| tickets:', tickets.length, '| ticketIds:', tickets.map(t => t.id));
|
|
907
|
-
return modes;
|
|
908
|
-
}, [enableVoice, tickets]);
|
|
909
|
-
|
|
910
|
-
// Ref-based resolver for ask_user — stays alive across renders
|
|
911
|
-
const askUserResolverRef = useRef<((answer: string) => void) | null>(null);
|
|
912
|
-
|
|
913
|
-
// ─── Create Runtime ──────────────────────────────────────────
|
|
914
|
-
|
|
915
|
-
const config: AgentConfig = useMemo(() => ({
|
|
916
|
-
apiKey,
|
|
917
|
-
proxyUrl,
|
|
918
|
-
proxyHeaders,
|
|
919
|
-
voiceProxyUrl,
|
|
920
|
-
voiceProxyHeaders,
|
|
921
|
-
model,
|
|
922
|
-
language: 'en',
|
|
923
|
-
maxSteps,
|
|
924
|
-
interactiveBlacklist,
|
|
925
|
-
interactiveWhitelist,
|
|
926
|
-
onBeforeStep,
|
|
927
|
-
onAfterStep,
|
|
928
|
-
onBeforeTask,
|
|
929
|
-
onAfterTask,
|
|
930
|
-
customTools: mode === 'voice' ? { ...mergedCustomTools, ask_user: null } : mergedCustomTools,
|
|
931
|
-
instructions,
|
|
932
|
-
stepDelay,
|
|
933
|
-
mcpServerUrl,
|
|
934
|
-
router,
|
|
935
|
-
pathname,
|
|
936
|
-
onStatusUpdate: setStatusText,
|
|
937
|
-
onTokenUsage,
|
|
938
|
-
knowledgeBase,
|
|
939
|
-
knowledgeMaxTokens,
|
|
940
|
-
enableUIControl,
|
|
941
|
-
screenMap: useScreenMap ? screenMap : undefined,
|
|
942
|
-
maxTokenBudget,
|
|
943
|
-
maxCostUSD,
|
|
944
|
-
interactionMode,
|
|
945
|
-
// Block the agent loop until user responds
|
|
946
|
-
onAskUser: mode === 'voice' ? undefined : ((question: string) => {
|
|
947
|
-
return new Promise<string>((resolve) => {
|
|
948
|
-
askUserResolverRef.current = resolve;
|
|
949
|
-
// Show question in chat bar, allow user input
|
|
950
|
-
setLastResult({ success: true, message: `❓ ${question}`, steps: [] });
|
|
951
|
-
setIsThinking(false);
|
|
952
|
-
setStatusText('');
|
|
953
|
-
});
|
|
954
|
-
}),
|
|
955
|
-
// Toggle isAgentActing flag on TelemetryService before/after every tool
|
|
956
|
-
// so that AI-driven taps are never tracked as user_interaction events.
|
|
957
|
-
onToolExecute: (active: boolean) => {
|
|
958
|
-
telemetryRef.current?.setAgentActing(active);
|
|
959
|
-
},
|
|
960
|
-
}), [
|
|
961
|
-
mode, apiKey, proxyUrl, proxyHeaders, voiceProxyUrl, voiceProxyHeaders, model, maxSteps,
|
|
962
|
-
interactiveBlacklist, interactiveWhitelist,
|
|
963
|
-
onBeforeStep, onAfterStep, onBeforeTask, onAfterTask,
|
|
964
|
-
transformScreenContent, customTools, instructions, stepDelay,
|
|
965
|
-
mcpServerUrl, router, pathname, onTokenUsage,
|
|
966
|
-
knowledgeBase, knowledgeMaxTokens, enableUIControl, screenMap, useScreenMap,
|
|
967
|
-
maxTokenBudget, maxCostUSD,
|
|
968
|
-
]);
|
|
969
|
-
|
|
970
|
-
const provider = useMemo(
|
|
971
|
-
() => createProvider(providerName, apiKey, model, proxyUrl, proxyHeaders),
|
|
972
|
-
[providerName, apiKey, model, proxyUrl, proxyHeaders]
|
|
973
|
-
);
|
|
974
|
-
|
|
975
|
-
const runtime = useMemo(
|
|
976
|
-
() => new AgentRuntime(provider, config, rootViewRef.current, navRef),
|
|
977
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
978
|
-
[provider, config],
|
|
979
|
-
);
|
|
980
|
-
|
|
981
|
-
// Update refs when they change
|
|
982
|
-
useEffect(() => {
|
|
983
|
-
runtime.updateRefs(rootViewRef.current, navRef);
|
|
984
|
-
}, [runtime, navRef]);
|
|
985
|
-
|
|
986
|
-
// ─── Telemetry ─────────────────────────────────────────────
|
|
987
|
-
|
|
988
|
-
const telemetryRef = useRef<TelemetryService | null>(null);
|
|
989
|
-
|
|
990
|
-
useEffect(() => {
|
|
991
|
-
if (!analyticsKey && !analyticsProxyUrl) {
|
|
992
|
-
bindTelemetryService(null);
|
|
993
|
-
return;
|
|
994
|
-
}
|
|
995
|
-
|
|
996
|
-
// Initialize persistent device ID before telemetry starts
|
|
997
|
-
initDeviceId().then(() => {
|
|
998
|
-
|
|
999
|
-
const telemetry = new TelemetryService({
|
|
1000
|
-
analyticsKey,
|
|
1001
|
-
analyticsProxyUrl,
|
|
1002
|
-
analyticsProxyHeaders,
|
|
1003
|
-
debug,
|
|
1004
|
-
});
|
|
1005
|
-
telemetryRef.current = telemetry;
|
|
1006
|
-
bindTelemetryService(telemetry);
|
|
1007
|
-
telemetry.start();
|
|
1008
|
-
|
|
1009
|
-
const initialRoute = navRef?.getCurrentRoute?.();
|
|
1010
|
-
if (initialRoute?.name) {
|
|
1011
|
-
telemetry.setScreen(initialRoute.name);
|
|
1012
|
-
}
|
|
1013
|
-
}); // initDeviceId
|
|
1014
|
-
}, [analyticsKey, analyticsProxyUrl, analyticsProxyHeaders, bindTelemetryService, debug, navRef]);
|
|
1015
|
-
|
|
1016
|
-
// ─── Security warnings ──────────────────────────────────────
|
|
1017
|
-
|
|
1018
|
-
useEffect(() => {
|
|
1019
|
-
// @ts-ignore
|
|
1020
|
-
if (typeof __DEV__ !== 'undefined' && !__DEV__ && apiKey && !proxyUrl) {
|
|
1021
|
-
logger.warn(
|
|
1022
|
-
'[MobileAI] ⚠️ SECURITY WARNING: You are using `apiKey` directly in a production build. ' +
|
|
1023
|
-
'This exposes your LLM provider key in the app binary. ' +
|
|
1024
|
-
'Use `apiProxyUrl` to route requests through your backend instead. ' +
|
|
1025
|
-
'See docs for details.'
|
|
1026
|
-
);
|
|
1027
|
-
}
|
|
1028
|
-
}, [apiKey, proxyUrl]);
|
|
1029
|
-
|
|
1030
|
-
// Track screen changes via navRef
|
|
1031
|
-
useEffect(() => {
|
|
1032
|
-
if (!navRef?.addListener || !telemetryRef.current) return;
|
|
1033
|
-
|
|
1034
|
-
const unsubscribe = navRef.addListener('state', () => {
|
|
1035
|
-
const currentRoute = navRef.getCurrentRoute?.();
|
|
1036
|
-
if (currentRoute?.name) {
|
|
1037
|
-
telemetryRef.current?.setScreen(currentRoute.name);
|
|
1038
|
-
}
|
|
1039
|
-
});
|
|
1040
|
-
|
|
1041
|
-
return () => unsubscribe?.();
|
|
1042
|
-
}, [navRef]);
|
|
1043
|
-
|
|
1044
|
-
// ─── MCP Bridge ──────────────────────────────────────────────
|
|
1045
|
-
|
|
1046
|
-
useEffect(() => {
|
|
1047
|
-
if (!mcpServerUrl) return;
|
|
1048
|
-
|
|
1049
|
-
logger.info('AIAgent', `Setting up MCP bridge at ${mcpServerUrl}`);
|
|
1050
|
-
const bridge = new MCPBridge(mcpServerUrl, runtime);
|
|
1051
|
-
|
|
1052
|
-
return () => {
|
|
1053
|
-
bridge.destroy();
|
|
1054
|
-
};
|
|
1055
|
-
}, [mcpServerUrl, runtime]);
|
|
1056
|
-
|
|
1057
|
-
// ─── Proactive Idle Agent ────────────────────────────────────
|
|
1058
|
-
|
|
1059
|
-
const idleDetectorRef = useRef<IdleDetector | null>(null);
|
|
1060
|
-
const [proactiveStage, setProactiveStage] = useState<'hidden' | 'pulse' | 'badge'>('hidden');
|
|
1061
|
-
const [proactiveBadgeText, setProactiveBadgeText] = useState('');
|
|
1062
|
-
|
|
1063
|
-
useEffect(() => {
|
|
1064
|
-
if (proactiveHelp?.enabled === false) {
|
|
1065
|
-
idleDetectorRef.current?.destroy();
|
|
1066
|
-
idleDetectorRef.current = null;
|
|
1067
|
-
setProactiveStage('hidden');
|
|
1068
|
-
return;
|
|
1069
|
-
}
|
|
1070
|
-
|
|
1071
|
-
if (!idleDetectorRef.current) {
|
|
1072
|
-
idleDetectorRef.current = new IdleDetector();
|
|
1073
|
-
}
|
|
1074
|
-
|
|
1075
|
-
idleDetectorRef.current.start({
|
|
1076
|
-
pulseAfterMs: (proactiveHelp?.pulseAfterMinutes || 2) * 60000,
|
|
1077
|
-
badgeAfterMs: (proactiveHelp?.badgeAfterMinutes || 4) * 60000,
|
|
1078
|
-
onPulse: () => setProactiveStage('pulse'),
|
|
1079
|
-
onBadge: (suggestion: string) => {
|
|
1080
|
-
setProactiveBadgeText(suggestion);
|
|
1081
|
-
setProactiveStage('badge');
|
|
1082
|
-
},
|
|
1083
|
-
onReset: () => setProactiveStage('hidden'),
|
|
1084
|
-
generateSuggestion: () => proactiveHelp?.generateSuggestion?.(telemetryRef.current?.screen || 'Home') || proactiveHelp?.badgeText || "Need help with this screen?",
|
|
1085
|
-
});
|
|
1086
|
-
|
|
1087
|
-
return () => {
|
|
1088
|
-
idleDetectorRef.current?.destroy();
|
|
1089
|
-
idleDetectorRef.current = null;
|
|
1090
|
-
};
|
|
1091
|
-
}, [proactiveHelp, telemetryRef]);
|
|
1092
|
-
|
|
1093
|
-
// ─── Voice/Live Service Initialization ──────────────────────
|
|
1094
|
-
|
|
1095
|
-
// Initialize voice services when mode changes to voice
|
|
1096
|
-
useEffect(() => {
|
|
1097
|
-
if (mode !== 'voice') {
|
|
1098
|
-
logger.info('AIAgent', `Mode ${mode} — skipping voice service init`);
|
|
1099
|
-
return;
|
|
1100
|
-
}
|
|
1101
|
-
|
|
1102
|
-
logger.info('AIAgent', `Mode changed to "${mode}" — initializing voice services...`);
|
|
1103
|
-
|
|
1104
|
-
// Track async audio output init — mic MUST wait for this
|
|
1105
|
-
let audioOutputInitPromise: Promise<void> = Promise.resolve();
|
|
1106
|
-
|
|
1107
|
-
// Create VoiceService with runtime's built-in tools (navigate, tap, type, done, etc.)
|
|
1108
|
-
if (!voiceServiceRef.current) {
|
|
1109
|
-
logger.info('AIAgent', 'Creating VoiceService...');
|
|
1110
|
-
const runtimeTools = runtime.getTools();
|
|
1111
|
-
logger.info('AIAgent', `Registering ${runtimeTools.length} tools with VoiceService: ${runtimeTools.map(t => t.name).join(', ')}`);
|
|
1112
|
-
// Use voice-adapted system prompt — same core rules as text mode
|
|
1113
|
-
// but without agent-loop directives that trigger autonomous actions
|
|
1114
|
-
const voicePrompt = buildVoiceSystemPrompt('en', instructions?.system, !!knowledgeBase);
|
|
1115
|
-
logger.info('AIAgent', `📝 Voice system prompt (${voicePrompt.length} chars):\n${voicePrompt}`);
|
|
1116
|
-
voiceServiceRef.current = new VoiceService({
|
|
1117
|
-
apiKey,
|
|
1118
|
-
proxyUrl: voiceProxyUrl || proxyUrl,
|
|
1119
|
-
proxyHeaders: voiceProxyHeaders || proxyHeaders,
|
|
1120
|
-
systemPrompt: voicePrompt,
|
|
1121
|
-
tools: runtimeTools,
|
|
1122
|
-
language: 'en',
|
|
1123
|
-
});
|
|
1124
|
-
logger.info('AIAgent', `VoiceService created with ${runtimeTools.length} tools: ${runtimeTools.map(t => t.name).join(', ')}`);
|
|
1125
|
-
}
|
|
1126
|
-
|
|
1127
|
-
// Create AudioOutputService if not exists
|
|
1128
|
-
if (!audioOutputRef.current) {
|
|
1129
|
-
logger.info('AIAgent', 'Creating AudioOutputService...');
|
|
1130
|
-
audioOutputRef.current = new AudioOutputService({
|
|
1131
|
-
onError: (err) => logger.error('AIAgent', `AudioOutput error: ${err}`),
|
|
1132
|
-
});
|
|
1133
|
-
// IMPORTANT: Must await initialize() BEFORE starting mic.
|
|
1134
|
-
// initialize() calls setAudioSessionOptions which reconfigures the
|
|
1135
|
-
// audio hardware. If the mic starts before this finishes, the native
|
|
1136
|
-
// audio session change kills the recorder's device handle.
|
|
1137
|
-
audioOutputInitPromise = audioOutputRef.current.initialize().then((ok) => {
|
|
1138
|
-
logger.info('AIAgent', `AudioOutputService initialized: ${ok}`);
|
|
1139
|
-
});
|
|
1140
|
-
}
|
|
1141
|
-
|
|
1142
|
-
// Create AudioInputService if not exists
|
|
1143
|
-
if (!audioInputRef.current) {
|
|
1144
|
-
logger.info('AIAgent', 'Creating AudioInputService...');
|
|
1145
|
-
audioInputRef.current = new AudioInputService({
|
|
1146
|
-
// Default 16kHz — Gemini Live API input standard
|
|
1147
|
-
onAudioChunk: (chunk) => {
|
|
1148
|
-
logger.info('AIAgent', `🎤 onAudioChunk: ${chunk.length} chars, voiceService=${!!voiceServiceRef.current}, connected=${voiceServiceRef.current?.isConnected}`);
|
|
1149
|
-
voiceServiceRef.current?.sendAudio(chunk);
|
|
1150
|
-
},
|
|
1151
|
-
onError: (err) => logger.error('AIAgent', `AudioInput error: ${err}`),
|
|
1152
|
-
onPermissionDenied: () => logger.warn('AIAgent', 'Mic permission denied by user'),
|
|
1153
|
-
});
|
|
1154
|
-
}
|
|
1155
|
-
|
|
1156
|
-
// Connect VoiceService (async — SDK's ai.live.connect returns a Promise)
|
|
1157
|
-
logger.info('AIAgent', 'Connecting VoiceService...');
|
|
1158
|
-
void voiceServiceRef.current.connect({
|
|
1159
|
-
onAudioResponse: (audio) => {
|
|
1160
|
-
logger.info('AIAgent', `🔊 Audio response: ${audio.length} chars, audioOutputRef=${!!audioOutputRef.current}`);
|
|
1161
|
-
setIsAISpeaking(true);
|
|
1162
|
-
if (!audioOutputRef.current) {
|
|
1163
|
-
logger.error('AIAgent', '❌ audioOutputRef.current is NULL — cannot play audio!');
|
|
1164
|
-
return;
|
|
1165
|
-
}
|
|
1166
|
-
audioOutputRef.current.enqueue(audio);
|
|
1167
|
-
},
|
|
1168
|
-
onStatusChange: (status) => {
|
|
1169
|
-
logger.info('AIAgent', `Voice status: ${status}`);
|
|
1170
|
-
const connected = status === 'connected';
|
|
1171
|
-
setIsVoiceConnected(connected);
|
|
1172
|
-
if (connected) {
|
|
1173
|
-
logger.info('AIAgent', '✅ VoiceService connected — waiting for audio session init before starting mic...');
|
|
1174
|
-
// Wait for audio session config to finish BEFORE starting mic.
|
|
1175
|
-
// If mic starts while setAudioSessionOptions is in flight,
|
|
1176
|
-
// the native audio device gets killed (AudioDeviceStop error).
|
|
1177
|
-
audioOutputInitPromise.then(() => {
|
|
1178
|
-
logger.info('AIAgent', '✅ Audio session ready — starting mic now...');
|
|
1179
|
-
audioInputRef.current?.start().then((ok) => {
|
|
1180
|
-
if (ok) {
|
|
1181
|
-
setIsMicActive(true);
|
|
1182
|
-
logger.info('AIAgent', '🎙️ Mic auto-started after connection');
|
|
1183
|
-
}
|
|
1184
|
-
});
|
|
1185
|
-
});
|
|
1186
|
-
}
|
|
1187
|
-
|
|
1188
|
-
// Handle unexpected disconnection — auto-reconnect ONLY if not intentional
|
|
1189
|
-
if (
|
|
1190
|
-
status === 'disconnected' &&
|
|
1191
|
-
mode === 'voice' &&
|
|
1192
|
-
voiceServiceRef.current &&
|
|
1193
|
-
!voiceServiceRef.current.intentionalDisconnect
|
|
1194
|
-
) {
|
|
1195
|
-
// Stop mic & audio immediately so user isn't stuck
|
|
1196
|
-
audioInputRef.current?.stop();
|
|
1197
|
-
audioOutputRef.current?.stop();
|
|
1198
|
-
setIsMicActive(false);
|
|
1199
|
-
setIsAISpeaking(false);
|
|
1200
|
-
|
|
1201
|
-
logger.warn('AIAgent', '⚠️ Voice connection dropped unexpectedly — attempting reconnect in 2s...');
|
|
1202
|
-
|
|
1203
|
-
// Auto-reconnect after short delay
|
|
1204
|
-
setTimeout(() => {
|
|
1205
|
-
if (
|
|
1206
|
-
mode === 'voice' &&
|
|
1207
|
-
voiceServiceRef.current &&
|
|
1208
|
-
!voiceServiceRef.current.intentionalDisconnect &&
|
|
1209
|
-
voiceServiceRef.current.lastCallbacks
|
|
1210
|
-
) {
|
|
1211
|
-
logger.info('AIAgent', '🔄 Reconnecting VoiceService...');
|
|
1212
|
-
void voiceServiceRef.current.connect(voiceServiceRef.current.lastCallbacks);
|
|
1213
|
-
}
|
|
1214
|
-
}, 2000);
|
|
1215
|
-
}
|
|
1216
|
-
},
|
|
1217
|
-
onTranscript: (text, isFinal, role) => {
|
|
1218
|
-
logger.info('AIAgent', `Transcript [${role}] (final=${isFinal}): "${text}"`);
|
|
1219
|
-
if (role === 'user') {
|
|
1220
|
-
userHasSpokenRef.current = true;
|
|
1221
|
-
}
|
|
1222
|
-
},
|
|
1223
|
-
onSetupComplete: () => {
|
|
1224
|
-
logger.info('AIAgent', '✅ SDK setup complete — sending initial screen context now');
|
|
1225
|
-
userHasSpokenRef.current = false;
|
|
1226
|
-
const initialContext = runtime.getScreenContext();
|
|
1227
|
-
lastScreenContextRef.current = initialContext; // Seed for screen change detection
|
|
1228
|
-
const passiveContext = `[SYSTEM CONTEXT — THIS IS NOT A USER REQUEST. DO NOT RESPOND. DO NOT NAVIGATE. DO NOT CALL ANY TOOLS. DO NOT SPEAK. Just silently read and memorize this screen layout. You will use it when the user speaks their first voice command.]\n\n${initialContext}`;
|
|
1229
|
-
logger.info('AIAgent', `📡 Initial screen context (${passiveContext.length} chars):\n${passiveContext}`);
|
|
1230
|
-
voiceServiceRef.current?.sendScreenContext(passiveContext);
|
|
1231
|
-
logger.info('AIAgent', '📡 Initial screen context sent (turnComplete=true)');
|
|
1232
|
-
},
|
|
1233
|
-
onToolCall: async (toolCall) => {
|
|
1234
|
-
logger.info('AIAgent', `🔧 Voice tool call: ${toolCall.name}(${JSON.stringify(toolCall.args)}) [id=${toolCall.id}]`);
|
|
1235
|
-
|
|
1236
|
-
// Code-level gate: reject tool calls before the user has spoken.
|
|
1237
|
-
// The model sometimes auto-navigates on receiving screen context.
|
|
1238
|
-
if (!userHasSpokenRef.current) {
|
|
1239
|
-
logger.warn('AIAgent', `🚫 Rejected tool call ${toolCall.name} — user hasn't spoken yet`);
|
|
1240
|
-
voiceServiceRef.current?.sendFunctionResponse(toolCall.name, toolCall.id, {
|
|
1241
|
-
result: 'Action rejected: wait for the user to speak before performing any actions.',
|
|
1242
|
-
});
|
|
1243
|
-
return;
|
|
1244
|
-
}
|
|
1245
|
-
|
|
1246
|
-
// CRITICAL: Gate audio input during tool execution.
|
|
1247
|
-
// The Gemini Live API crashes (code 1008) if sendRealtimeInput
|
|
1248
|
-
// (audio) is called while a tool call is pending. Stop the mic
|
|
1249
|
-
// before executing the tool and resume after the response is sent.
|
|
1250
|
-
audioInputRef.current?.stop();
|
|
1251
|
-
logger.info('AIAgent', `🔇 Mic paused for tool execution: ${toolCall.name}`);
|
|
1252
|
-
|
|
1253
|
-
// One-tool-at-a-time enforcement (mirrors text mode's line 752).
|
|
1254
|
-
if (toolLockRef.current) {
|
|
1255
|
-
logger.warn('AIAgent', `⏳ Tool locked — waiting for previous tool to finish before executing ${toolCall.name}`);
|
|
1256
|
-
while (toolLockRef.current) {
|
|
1257
|
-
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1258
|
-
}
|
|
1259
|
-
}
|
|
1260
|
-
toolLockRef.current = true;
|
|
1261
|
-
|
|
1262
|
-
try {
|
|
1263
|
-
// Execute the tool via AgentRuntime and send result back to Gemini
|
|
1264
|
-
const result = await runtime.executeTool(toolCall.name, toolCall.args);
|
|
1265
|
-
logger.info('AIAgent', `🔧 Tool result for ${toolCall.name}: ${result}`);
|
|
1266
|
-
|
|
1267
|
-
// Step delay — matches text mode's stepDelay (line 820 in AgentRuntime).
|
|
1268
|
-
await new Promise(resolve => setTimeout(resolve, 300));
|
|
1269
|
-
|
|
1270
|
-
// Include updated screen context IN the tool response
|
|
1271
|
-
const updatedContext = runtime.getScreenContext();
|
|
1272
|
-
lastScreenContextRef.current = updatedContext; // Sync with poll tracker
|
|
1273
|
-
logger.info('AIAgent', `📡 Updated screen context after ${toolCall.name} (${updatedContext.length} chars):\n${updatedContext}`);
|
|
1274
|
-
const enrichedResult = `${result}\n\n<updated_screen>\n${updatedContext}\n</updated_screen>`;
|
|
1275
|
-
logger.info('AIAgent', `📡 Enriched tool response (${enrichedResult.length} chars):\n${enrichedResult}`);
|
|
1276
|
-
|
|
1277
|
-
voiceServiceRef.current?.sendFunctionResponse(toolCall.name, toolCall.id, { result: enrichedResult });
|
|
1278
|
-
logger.info('AIAgent', `📡 Tool response sent for ${toolCall.name} [id=${toolCall.id}]`);
|
|
1279
|
-
} finally {
|
|
1280
|
-
toolLockRef.current = false;
|
|
1281
|
-
// Resume mic after tool response is sent
|
|
1282
|
-
if (voiceServiceRef.current?.isConnected) {
|
|
1283
|
-
audioInputRef.current?.start().then((ok) => {
|
|
1284
|
-
if (ok) {
|
|
1285
|
-
setIsMicActive(true);
|
|
1286
|
-
logger.info('AIAgent', `🔊 Mic resumed after tool execution: ${toolCall.name}`);
|
|
1287
|
-
}
|
|
1288
|
-
});
|
|
1289
|
-
}
|
|
1290
|
-
}
|
|
1291
|
-
},
|
|
1292
|
-
onError: (err) => {
|
|
1293
|
-
logger.error('AIAgent', `VoiceService error: ${err}`);
|
|
1294
|
-
// Stop mic & audio on error to prevent stale state
|
|
1295
|
-
audioInputRef.current?.stop();
|
|
1296
|
-
audioOutputRef.current?.stop();
|
|
1297
|
-
setIsMicActive(false);
|
|
1298
|
-
setIsAISpeaking(false);
|
|
1299
|
-
},
|
|
1300
|
-
onTurnComplete: () => {
|
|
1301
|
-
logger.info('AIAgent', 'AI turn complete');
|
|
1302
|
-
setIsAISpeaking(false);
|
|
1303
|
-
// No cool-down or echo gate needed — hardware AEC handles everything.
|
|
1304
|
-
// Mic stays active and ready for the next voice command immediately.
|
|
1305
|
-
},
|
|
1306
|
-
});
|
|
1307
|
-
|
|
1308
|
-
// ─── Screen Change Detection ───────────────────────────────
|
|
1309
|
-
// Poll the Fiber tree every 5s and resend context if the screen meaningfully changed.
|
|
1310
|
-
// This gives voice mode the same screen-awareness as text mode's per-step re-read.
|
|
1311
|
-
const SCREEN_POLL_INTERVAL = 5000;
|
|
1312
|
-
const MIN_DIFF_RATIO = 0.05; // Ignore changes smaller than 5% of total length (animation flicker)
|
|
1313
|
-
|
|
1314
|
-
screenPollIntervalRef.current = setInterval(() => {
|
|
1315
|
-
if (!voiceServiceRef.current?.isConnected) return;
|
|
1316
|
-
// Skip during tool execution — the enriched tool response handles that
|
|
1317
|
-
if (toolLockRef.current) {
|
|
1318
|
-
logger.debug('AIAgent', '🔄 Screen poll skipped — tool lock active');
|
|
1319
|
-
return;
|
|
1320
|
-
}
|
|
1321
|
-
|
|
1322
|
-
try {
|
|
1323
|
-
const currentContext = runtime.getScreenContext();
|
|
1324
|
-
if (currentContext === lastScreenContextRef.current) return; // No change
|
|
1325
|
-
|
|
1326
|
-
// Check if the change is meaningful (not just animation/cursor flicker)
|
|
1327
|
-
const lastLen = lastScreenContextRef.current.length;
|
|
1328
|
-
const diff = Math.abs(currentContext.length - lastLen);
|
|
1329
|
-
const diffRatio = lastLen > 0 ? diff / lastLen : 1;
|
|
1330
|
-
|
|
1331
|
-
if (diffRatio < MIN_DIFF_RATIO) {
|
|
1332
|
-
logger.debug('AIAgent', `🔄 Screen poll: minor change ignored (${diff} chars, ${(diffRatio * 100).toFixed(1)}% < ${MIN_DIFF_RATIO * 100}% threshold)`);
|
|
1333
|
-
return;
|
|
1334
|
-
}
|
|
1335
|
-
|
|
1336
|
-
logger.info('AIAgent', `🔄 Screen change detected (${lastLen} → ${currentContext.length} chars, ${(diffRatio * 100).toFixed(1)}% diff)`);
|
|
1337
|
-
lastScreenContextRef.current = currentContext;
|
|
1338
|
-
const passiveUpdate = `[SCREEN UPDATE — The UI has changed. Here is the current screen layout. This is not a user request — do not act unless the user asks.]\n\n${currentContext}`;
|
|
1339
|
-
voiceServiceRef.current?.sendScreenContext(passiveUpdate);
|
|
1340
|
-
logger.info('AIAgent', '🔄 Updated screen context sent to voice model');
|
|
1341
|
-
} catch (err) {
|
|
1342
|
-
logger.warn('AIAgent', `🔄 Screen poll error: ${err}`);
|
|
1343
|
-
}
|
|
1344
|
-
}, SCREEN_POLL_INTERVAL);
|
|
1345
|
-
|
|
1346
|
-
// Cleanup on mode change back to text
|
|
1347
|
-
return () => {
|
|
1348
|
-
logger.info('AIAgent', `Cleaning up voice services (leaving "${mode}" mode)`);
|
|
1349
|
-
// Stop screen change polling
|
|
1350
|
-
if (screenPollIntervalRef.current) {
|
|
1351
|
-
clearInterval(screenPollIntervalRef.current);
|
|
1352
|
-
screenPollIntervalRef.current = null;
|
|
1353
|
-
logger.info('AIAgent', '🔄 Screen poll stopped');
|
|
1354
|
-
}
|
|
1355
|
-
lastScreenContextRef.current = '';
|
|
1356
|
-
voiceServiceRef.current?.disconnect();
|
|
1357
|
-
voiceServiceRef.current = null;
|
|
1358
|
-
audioInputRef.current?.stop();
|
|
1359
|
-
setIsMicActive(false);
|
|
1360
|
-
setIsAISpeaking(false);
|
|
1361
|
-
setIsVoiceConnected(false);
|
|
1362
|
-
};
|
|
1363
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
1364
|
-
}, [mode, apiKey, proxyUrl, proxyHeaders, voiceProxyUrl, voiceProxyHeaders, runtime, instructions]);
|
|
1365
|
-
|
|
1366
|
-
// ─── Stop Voice Session (full cleanup) ─────────────────────
|
|
1367
|
-
|
|
1368
|
-
const stopVoiceSession = useCallback(() => {
|
|
1369
|
-
logger.info('AIAgent', '🛑 Stopping voice session (full cleanup)...');
|
|
1370
|
-
// 1. Stop mic input
|
|
1371
|
-
audioInputRef.current?.stop();
|
|
1372
|
-
// 2. Stop audio output (clear queued chunks)
|
|
1373
|
-
audioOutputRef.current?.stop();
|
|
1374
|
-
// 3. Disconnect WebSocket
|
|
1375
|
-
voiceServiceRef.current?.disconnect();
|
|
1376
|
-
voiceServiceRef.current = null;
|
|
1377
|
-
// 4. Reset state
|
|
1378
|
-
setIsMicActive(false);
|
|
1379
|
-
setIsAISpeaking(false);
|
|
1380
|
-
setIsVoiceConnected(false);
|
|
1381
|
-
// 6. Switch back to text mode (triggers cleanup effect naturally)
|
|
1382
|
-
setMode('text');
|
|
1383
|
-
logger.info('AIAgent', '🛑 Voice session fully stopped');
|
|
1384
|
-
}, [runtime]);
|
|
1385
|
-
|
|
1386
|
-
// ─── Execute ──────────────────────────────────────────────────
|
|
1387
|
-
|
|
1388
|
-
const handleSend = useCallback(async (
|
|
1389
|
-
message: string,
|
|
1390
|
-
options?: { onResult?: (result: ExecutionResult) => void }
|
|
1391
|
-
) => {
|
|
1392
|
-
if (!message.trim() || isThinking) return;
|
|
1393
|
-
|
|
1394
|
-
logger.info('AIAgent', `User message: "${message}"`);
|
|
1395
|
-
setLastUserMessage(message.trim());
|
|
1396
|
-
|
|
1397
|
-
// Intercom-style transparent intercept:
|
|
1398
|
-
// If we're connected to a human agent, all text input goes directly to them.
|
|
1399
|
-
if (selectedTicketId && supportSocket) {
|
|
1400
|
-
// Gate: do not allow sending if the ticket is closed/resolved.
|
|
1401
|
-
const activeTicket = tickets.find(t => t.id === selectedTicketId);
|
|
1402
|
-
const CLOSED_STATUSES = ['closed', 'resolved'];
|
|
1403
|
-
if (activeTicket && CLOSED_STATUSES.includes(activeTicket.status)) {
|
|
1404
|
-
setLastResult({
|
|
1405
|
-
success: false,
|
|
1406
|
-
message: 'This conversation is closed. Please start a new request.',
|
|
1407
|
-
steps: [],
|
|
1408
|
-
});
|
|
1409
|
-
return;
|
|
1410
|
-
}
|
|
1411
|
-
|
|
1412
|
-
if (supportSocket.sendText(message)) {
|
|
1413
|
-
setMessages((prev) => [
|
|
1414
|
-
...prev,
|
|
1415
|
-
{ id: `user-${Date.now()}`, role: 'user', content: message.trim(), timestamp: Date.now() },
|
|
1416
|
-
]);
|
|
1417
|
-
setIsThinking(true);
|
|
1418
|
-
setStatusText('Sending to agent...');
|
|
1419
|
-
setTimeout(() => {
|
|
1420
|
-
setIsThinking(false);
|
|
1421
|
-
setStatusText('');
|
|
1422
|
-
}, 800);
|
|
1423
|
-
} else {
|
|
1424
|
-
setLastResult({
|
|
1425
|
-
success: false,
|
|
1426
|
-
message: 'Failed to send message to support agent. Connection lost.',
|
|
1427
|
-
steps: [],
|
|
1428
|
-
});
|
|
1429
|
-
}
|
|
1430
|
-
return;
|
|
1431
|
-
}
|
|
1432
|
-
|
|
1433
|
-
// Append user message to AI thread
|
|
1434
|
-
setMessages((prev) => [
|
|
1435
|
-
...prev,
|
|
1436
|
-
{
|
|
1437
|
-
id: Date.now().toString() + Math.random(),
|
|
1438
|
-
role: 'user',
|
|
1439
|
-
content: message.trim(),
|
|
1440
|
-
timestamp: Date.now(),
|
|
1441
|
-
},
|
|
1442
|
-
]);
|
|
1443
|
-
|
|
1444
|
-
// If there's a pending ask_user, resolve it instead of starting a new execution
|
|
1445
|
-
if (askUserResolverRef.current) {
|
|
1446
|
-
const resolver = askUserResolverRef.current;
|
|
1447
|
-
askUserResolverRef.current = null;
|
|
1448
|
-
setIsThinking(true);
|
|
1449
|
-
setStatusText('Processing your answer...');
|
|
1450
|
-
setLastResult(null);
|
|
1451
|
-
resolver(message);
|
|
1452
|
-
return;
|
|
1453
|
-
}
|
|
1454
|
-
|
|
1455
|
-
// Normal execution — new task
|
|
1456
|
-
setIsThinking(true);
|
|
1457
|
-
setStatusText('Thinking...');
|
|
1458
|
-
setLastResult(null);
|
|
1459
|
-
|
|
1460
|
-
// Telemetry: track agent request
|
|
1461
|
-
telemetryRef.current?.track('agent_request', {
|
|
1462
|
-
query: message.trim(),
|
|
1463
|
-
});
|
|
1464
|
-
|
|
1465
|
-
try {
|
|
1466
|
-
// Ensure we have the latest Fiber tree ref
|
|
1467
|
-
runtime.updateRefs(rootViewRef.current, navRef);
|
|
1468
|
-
|
|
1469
|
-
const result = await runtime.execute(message, messages);
|
|
1470
|
-
|
|
1471
|
-
// Telemetry: track agent completion and per-step details
|
|
1472
|
-
if (telemetryRef.current) {
|
|
1473
|
-
for (const step of result.steps ?? []) {
|
|
1474
|
-
telemetryRef.current.track('agent_step', {
|
|
1475
|
-
tool: step.action.name,
|
|
1476
|
-
args: step.action.input,
|
|
1477
|
-
result: typeof step.action.output === 'string'
|
|
1478
|
-
? step.action.output.substring(0, 200)
|
|
1479
|
-
: String(step.action.output),
|
|
1480
|
-
});
|
|
1481
|
-
}
|
|
1482
|
-
telemetryRef.current.track('agent_complete', {
|
|
1483
|
-
success: result.success,
|
|
1484
|
-
steps: result.steps?.length ?? 0,
|
|
1485
|
-
tokens: result.tokenUsage?.totalTokens ?? 0,
|
|
1486
|
-
cost: result.tokenUsage?.estimatedCostUSD ?? 0,
|
|
1487
|
-
});
|
|
1488
|
-
}
|
|
1489
|
-
|
|
1490
|
-
logger.info('AIAgent', '★ handleSend — SETTING lastResult:', result.message.substring(0, 80), '| mode:', mode);
|
|
1491
|
-
logger.info('AIAgent', '★ handleSend — tickets:', tickets.length, 'selectedTicketId:', selectedTicketId);
|
|
1492
|
-
|
|
1493
|
-
// Don't overwrite lastResult if escalation already switched us to human mode
|
|
1494
|
-
// (mode in this closure is stale — the actual mode may have changed during async execution)
|
|
1495
|
-
const stepsHadEscalation = result.steps?.some(s => s.action.name === 'escalate_to_human');
|
|
1496
|
-
if (!stepsHadEscalation) {
|
|
1497
|
-
setLastResult(result);
|
|
1498
|
-
}
|
|
1499
|
-
|
|
1500
|
-
setMessages((prev) => [
|
|
1501
|
-
...prev,
|
|
1502
|
-
{
|
|
1503
|
-
id: Date.now().toString() + Math.random(),
|
|
1504
|
-
role: 'assistant',
|
|
1505
|
-
content: result.message,
|
|
1506
|
-
timestamp: Date.now(),
|
|
1507
|
-
result,
|
|
1508
|
-
},
|
|
1509
|
-
]);
|
|
1510
|
-
|
|
1511
|
-
if (options?.onResult) {
|
|
1512
|
-
options.onResult(result);
|
|
1513
|
-
} else {
|
|
1514
|
-
onResult?.(result);
|
|
1515
|
-
}
|
|
1516
|
-
|
|
1517
|
-
logger.info('AIAgent', `Result: ${result.success ? '✅' : '❌'} ${result.message}`);
|
|
1518
|
-
} catch (error: any) {
|
|
1519
|
-
logger.error('AIAgent', 'Execution failed:', error);
|
|
1520
|
-
|
|
1521
|
-
// Telemetry: track agent failure
|
|
1522
|
-
telemetryRef.current?.track('agent_complete', {
|
|
1523
|
-
success: false,
|
|
1524
|
-
error: error.message,
|
|
1525
|
-
});
|
|
1526
|
-
|
|
1527
|
-
setLastResult({
|
|
1528
|
-
success: false,
|
|
1529
|
-
message: `Error: ${error.message}`,
|
|
1530
|
-
steps: [],
|
|
1531
|
-
});
|
|
1532
|
-
} finally {
|
|
1533
|
-
setIsThinking(false);
|
|
1534
|
-
setStatusText('');
|
|
1535
|
-
}
|
|
1536
|
-
}, [runtime, navRef, onResult, messages, isThinking]);
|
|
1537
|
-
|
|
1538
|
-
// ─── Context value (for useAI bridge) ─────────────────────────
|
|
1539
|
-
|
|
1540
|
-
const handleCancel = useCallback(() => {
|
|
1541
|
-
runtime.cancel();
|
|
1542
|
-
setIsThinking(false);
|
|
1543
|
-
setStatusText('');
|
|
1544
|
-
}, [runtime]);
|
|
1545
|
-
|
|
1546
|
-
const contextValue = useMemo(() => ({
|
|
1547
|
-
runtime,
|
|
1548
|
-
send: handleSend,
|
|
1549
|
-
isLoading: isThinking,
|
|
1550
|
-
status: statusText,
|
|
1551
|
-
lastResult,
|
|
1552
|
-
messages,
|
|
1553
|
-
clearMessages,
|
|
1554
|
-
cancel: handleCancel,
|
|
1555
|
-
}), [runtime, handleSend, handleCancel, isThinking, statusText, lastResult, messages, clearMessages]);
|
|
1556
|
-
|
|
1557
|
-
// ─── Render ──────────────────────────────────────────────────
|
|
1558
|
-
|
|
1559
|
-
return (
|
|
1560
|
-
<AgentContext.Provider value={contextValue}>
|
|
1561
|
-
<View style={styles.root}>
|
|
1562
|
-
{/* App content — rootViewRef captures Fiber tree for element detection */}
|
|
1563
|
-
<View
|
|
1564
|
-
ref={rootViewRef}
|
|
1565
|
-
style={styles.root}
|
|
1566
|
-
collapsable={false}
|
|
1567
|
-
onStartShouldSetResponderCapture={(event) => {
|
|
1568
|
-
// Auto-capture every tap for analytics (zero-config)
|
|
1569
|
-
// Skip if the AI agent is currently executing a tool — those are
|
|
1570
|
-
// already tracked as `agent_step` events with full context.
|
|
1571
|
-
if (telemetryRef.current && !telemetryRef.current.isAgentActing) {
|
|
1572
|
-
const label = extractTouchLabel(event.nativeEvent);
|
|
1573
|
-
if (label && label !== 'Unknown Element' && label !== '[pressable]') {
|
|
1574
|
-
telemetryRef.current.track('user_interaction', {
|
|
1575
|
-
type: 'tap',
|
|
1576
|
-
label,
|
|
1577
|
-
actor: 'user',
|
|
1578
|
-
x: Math.round(event.nativeEvent.pageX),
|
|
1579
|
-
y: Math.round(event.nativeEvent.pageY),
|
|
1580
|
-
});
|
|
1581
|
-
|
|
1582
|
-
// Track if user is rage-tapping this specific element
|
|
1583
|
-
checkRageClick(label, telemetryRef.current);
|
|
1584
|
-
} else {
|
|
1585
|
-
// Tapped an unlabelled/empty area
|
|
1586
|
-
telemetryRef.current.track('dead_click', {
|
|
1587
|
-
x: Math.round(event.nativeEvent.pageX),
|
|
1588
|
-
y: Math.round(event.nativeEvent.pageY),
|
|
1589
|
-
screen: telemetryRef.current.screen,
|
|
1590
|
-
});
|
|
1591
|
-
}
|
|
1592
|
-
}
|
|
1593
|
-
// IMPORTANT: return false so we don't steal the touch from the actual button
|
|
1594
|
-
return false;
|
|
1595
|
-
}}
|
|
1596
|
-
>
|
|
1597
|
-
<AgentErrorBoundary
|
|
1598
|
-
telemetryRef={telemetryRef}
|
|
1599
|
-
onError={(error, componentStack) => {
|
|
1600
|
-
const errorMsg = `⚠️ A rendering error occurred: ${error.message}`;
|
|
1601
|
-
lastAgentErrorRef.current = errorMsg;
|
|
1602
|
-
logger.warn('AIAgent', `🛡️ Error caught by boundary: ${error.message}\n${componentStack || ''}`);
|
|
1603
|
-
}}
|
|
1604
|
-
>
|
|
1605
|
-
{children}
|
|
1606
|
-
</AgentErrorBoundary>
|
|
1607
|
-
</View>
|
|
1608
|
-
|
|
1609
|
-
{/* Floating UI — absolute-positioned View that passes touches pass-through unless interacting */}
|
|
1610
|
-
<View style={styles.floatingLayer} pointerEvents="box-none">
|
|
1611
|
-
{/* Highlight Overlay (always active, listens to events) */}
|
|
1612
|
-
<HighlightOverlay />
|
|
1613
|
-
|
|
1614
|
-
{/* Overlay (shown while thinking) */}
|
|
1615
|
-
<AgentOverlay visible={isThinking} statusText={statusText} onCancel={handleCancel} />
|
|
1616
|
-
|
|
1617
|
-
{/* Chat bar wrapped in Proactive Hint */}
|
|
1618
|
-
{showChatBar && (
|
|
1619
|
-
<ProactiveHint
|
|
1620
|
-
stage={proactiveStage}
|
|
1621
|
-
badgeText={proactiveBadgeText}
|
|
1622
|
-
onDismiss={() => idleDetectorRef.current?.dismiss()}
|
|
1623
|
-
>
|
|
1624
|
-
<AgentChatBar
|
|
1625
|
-
onSend={handleSend}
|
|
1626
|
-
isThinking={isThinking}
|
|
1627
|
-
lastResult={lastResult}
|
|
1628
|
-
lastUserMessage={lastUserMessage}
|
|
1629
|
-
language={'en'}
|
|
1630
|
-
onDismiss={() => { setLastResult(null); setLastUserMessage(null); }}
|
|
1631
|
-
theme={accentColor || theme ? {
|
|
1632
|
-
...(accentColor ? { primaryColor: accentColor } : {}),
|
|
1633
|
-
...theme,
|
|
1634
|
-
} : undefined}
|
|
1635
|
-
availableModes={availableModes}
|
|
1636
|
-
mode={mode}
|
|
1637
|
-
onModeChange={(newMode) => {
|
|
1638
|
-
logger.info('AIAgent', '★ onModeChange:', mode, '→', newMode, '| tickets:', tickets.length, 'selectedTicketId:', selectedTicketId);
|
|
1639
|
-
setMode(newMode);
|
|
1640
|
-
}}
|
|
1641
|
-
isMicActive={isMicActive}
|
|
1642
|
-
isSpeakerMuted={isSpeakerMuted}
|
|
1643
|
-
isAISpeaking={isAISpeaking}
|
|
1644
|
-
isAgentTyping={isLiveAgentTyping}
|
|
1645
|
-
onStopSession={stopVoiceSession}
|
|
1646
|
-
isVoiceConnected={isVoiceConnected}
|
|
1647
|
-
onMicToggle={(active) => {
|
|
1648
|
-
if (active && !isVoiceConnected) {
|
|
1649
|
-
logger.warn('AIAgent', 'Cannot toggle mic — VoiceService not connected yet');
|
|
1650
|
-
return;
|
|
1651
|
-
}
|
|
1652
|
-
logger.info('AIAgent', `Mic toggle: ${active ? 'ON' : 'OFF'}`);
|
|
1653
|
-
setIsMicActive(active);
|
|
1654
|
-
if (active) {
|
|
1655
|
-
logger.info('AIAgent', 'Starting AudioInput...');
|
|
1656
|
-
audioInputRef.current?.start().then((ok) => {
|
|
1657
|
-
logger.info('AIAgent', `AudioInput start result: ${ok}`);
|
|
1658
|
-
});
|
|
1659
|
-
} else {
|
|
1660
|
-
logger.info('AIAgent', 'Stopping AudioInput...');
|
|
1661
|
-
audioInputRef.current?.stop();
|
|
1662
|
-
}
|
|
1663
|
-
}}
|
|
1664
|
-
onSpeakerToggle={(muted) => {
|
|
1665
|
-
logger.info('AIAgent', `Speaker toggle: ${muted ? 'MUTED' : 'UNMUTED'}`);
|
|
1666
|
-
setIsSpeakerMuted(muted);
|
|
1667
|
-
if (muted) {
|
|
1668
|
-
audioOutputRef.current?.mute();
|
|
1669
|
-
} else {
|
|
1670
|
-
audioOutputRef.current?.unmute();
|
|
1671
|
-
}
|
|
1672
|
-
}}
|
|
1673
|
-
tickets={tickets}
|
|
1674
|
-
selectedTicketId={selectedTicketId}
|
|
1675
|
-
onTicketSelect={handleTicketSelect}
|
|
1676
|
-
onBackToTickets={handleBackToTickets}
|
|
1677
|
-
autoExpandTrigger={autoExpandTrigger}
|
|
1678
|
-
unreadCounts={unreadCounts}
|
|
1679
|
-
totalUnread={totalUnread}
|
|
1680
|
-
showDiscoveryTooltip={tooltipVisible}
|
|
1681
|
-
onTooltipDismiss={handleTooltipDismiss}
|
|
1682
|
-
/>
|
|
1683
|
-
</ProactiveHint>
|
|
1684
|
-
)}
|
|
1685
|
-
|
|
1686
|
-
{/* Support chat modal — opens when user taps a ticket */}
|
|
1687
|
-
<SupportChatModal
|
|
1688
|
-
visible={mode === 'human' && !!selectedTicketId}
|
|
1689
|
-
messages={messages}
|
|
1690
|
-
onSend={handleSend}
|
|
1691
|
-
onClose={handleBackToTickets}
|
|
1692
|
-
isAgentTyping={isLiveAgentTyping}
|
|
1693
|
-
isThinking={isThinking}
|
|
1694
|
-
scrollToEndTrigger={chatScrollTrigger}
|
|
1695
|
-
ticketStatus={tickets.find(t => t.id === selectedTicketId)?.status}
|
|
1696
|
-
/>
|
|
1697
|
-
</View>
|
|
1698
|
-
</View>
|
|
1699
|
-
</AgentContext.Provider>
|
|
1700
|
-
);
|
|
1701
|
-
}
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
const styles = StyleSheet.create({
|
|
1705
|
-
root: {
|
|
1706
|
-
flex: 1,
|
|
1707
|
-
},
|
|
1708
|
-
floatingLayer: {
|
|
1709
|
-
position: 'absolute',
|
|
1710
|
-
top: 0,
|
|
1711
|
-
left: 0,
|
|
1712
|
-
right: 0,
|
|
1713
|
-
bottom: 0,
|
|
1714
|
-
zIndex: 99999,
|
|
1715
|
-
},
|
|
1716
|
-
});
|