machinaos 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.template +71 -0
- package/LICENSE +21 -0
- package/README.md +87 -0
- package/bin/cli.js +159 -0
- package/client/.dockerignore +45 -0
- package/client/Dockerfile +68 -0
- package/client/eslint.config.js +29 -0
- package/client/index.html +13 -0
- package/client/nginx.conf +66 -0
- package/client/package.json +48 -0
- package/client/src/App.tsx +27 -0
- package/client/src/Dashboard.tsx +1173 -0
- package/client/src/ParameterPanel.tsx +301 -0
- package/client/src/components/AIAgentNode.tsx +321 -0
- package/client/src/components/APIKeyValidator.tsx +118 -0
- package/client/src/components/ClaudeChatModelNode.tsx +18 -0
- package/client/src/components/ConditionalEdge.tsx +189 -0
- package/client/src/components/CredentialsModal.tsx +306 -0
- package/client/src/components/EdgeConditionEditor.tsx +443 -0
- package/client/src/components/GeminiChatModelNode.tsx +18 -0
- package/client/src/components/GenericNode.tsx +357 -0
- package/client/src/components/LocationParameterPanel.tsx +154 -0
- package/client/src/components/ModelNode.tsx +286 -0
- package/client/src/components/OpenAIChatModelNode.tsx +18 -0
- package/client/src/components/OutputPanel.tsx +471 -0
- package/client/src/components/ParameterRenderer.tsx +1874 -0
- package/client/src/components/SkillEditorModal.tsx +417 -0
- package/client/src/components/SquareNode.tsx +797 -0
- package/client/src/components/StartNode.tsx +250 -0
- package/client/src/components/ToolkitNode.tsx +365 -0
- package/client/src/components/TriggerNode.tsx +463 -0
- package/client/src/components/auth/LoginPage.tsx +247 -0
- package/client/src/components/auth/ProtectedRoute.tsx +59 -0
- package/client/src/components/base/BaseChatModelNode.tsx +271 -0
- package/client/src/components/icons/AIProviderIcons.tsx +50 -0
- package/client/src/components/maps/GoogleMapsPicker.tsx +137 -0
- package/client/src/components/maps/MapsPreviewPanel.tsx +110 -0
- package/client/src/components/maps/index.ts +26 -0
- package/client/src/components/parameterPanel/InputSection.tsx +1094 -0
- package/client/src/components/parameterPanel/LocationPanelLayout.tsx +65 -0
- package/client/src/components/parameterPanel/MapsSection.tsx +92 -0
- package/client/src/components/parameterPanel/MiddleSection.tsx +571 -0
- package/client/src/components/parameterPanel/OutputSection.tsx +81 -0
- package/client/src/components/parameterPanel/ParameterPanelLayout.tsx +82 -0
- package/client/src/components/parameterPanel/ToolSchemaEditor.tsx +436 -0
- package/client/src/components/parameterPanel/index.ts +42 -0
- package/client/src/components/shared/DataPanel.tsx +142 -0
- package/client/src/components/shared/JSONTreeRenderer.tsx +106 -0
- package/client/src/components/ui/AIResultModal.tsx +204 -0
- package/client/src/components/ui/AndroidSettingsPanel.tsx +401 -0
- package/client/src/components/ui/CodeEditor.tsx +81 -0
- package/client/src/components/ui/CollapsibleSection.tsx +88 -0
- package/client/src/components/ui/ComponentItem.tsx +154 -0
- package/client/src/components/ui/ComponentPalette.tsx +321 -0
- package/client/src/components/ui/ConsolePanel.tsx +1074 -0
- package/client/src/components/ui/ErrorBoundary.tsx +196 -0
- package/client/src/components/ui/InputNodesPanel.tsx +204 -0
- package/client/src/components/ui/MapSelector.tsx +314 -0
- package/client/src/components/ui/Modal.tsx +149 -0
- package/client/src/components/ui/NodeContextMenu.tsx +192 -0
- package/client/src/components/ui/NodeOutputPanel.tsx +1150 -0
- package/client/src/components/ui/OutputDisplayPanel.tsx +381 -0
- package/client/src/components/ui/SettingsPanel.tsx +243 -0
- package/client/src/components/ui/TopToolbar.tsx +736 -0
- package/client/src/components/ui/WhatsAppSettingsPanel.tsx +345 -0
- package/client/src/components/ui/WorkflowSidebar.tsx +294 -0
- package/client/src/config/antdTheme.ts +186 -0
- package/client/src/config/api.ts +54 -0
- package/client/src/contexts/AuthContext.tsx +221 -0
- package/client/src/contexts/ThemeContext.tsx +42 -0
- package/client/src/contexts/WebSocketContext.tsx +1971 -0
- package/client/src/factories/baseChatModelFactory.ts +256 -0
- package/client/src/hooks/useAndroidOperations.ts +164 -0
- package/client/src/hooks/useApiKeyValidation.ts +107 -0
- package/client/src/hooks/useApiKeys.ts +238 -0
- package/client/src/hooks/useAppTheme.ts +17 -0
- package/client/src/hooks/useComponentPalette.ts +51 -0
- package/client/src/hooks/useCopyPaste.ts +155 -0
- package/client/src/hooks/useDragAndDrop.ts +124 -0
- package/client/src/hooks/useDragVariable.ts +88 -0
- package/client/src/hooks/useExecution.ts +313 -0
- package/client/src/hooks/useParameterPanel.ts +176 -0
- package/client/src/hooks/useReactFlowNodes.ts +189 -0
- package/client/src/hooks/useToolSchema.ts +209 -0
- package/client/src/hooks/useWhatsApp.ts +196 -0
- package/client/src/hooks/useWorkflowManagement.ts +46 -0
- package/client/src/index.css +315 -0
- package/client/src/main.tsx +19 -0
- package/client/src/nodeDefinitions/aiAgentNodes.ts +336 -0
- package/client/src/nodeDefinitions/aiModelNodes.ts +340 -0
- package/client/src/nodeDefinitions/androidDeviceNodes.ts +140 -0
- package/client/src/nodeDefinitions/androidServiceNodes.ts +383 -0
- package/client/src/nodeDefinitions/chatNodes.ts +135 -0
- package/client/src/nodeDefinitions/codeNodes.ts +54 -0
- package/client/src/nodeDefinitions/documentNodes.ts +379 -0
- package/client/src/nodeDefinitions/index.ts +15 -0
- package/client/src/nodeDefinitions/locationNodes.ts +463 -0
- package/client/src/nodeDefinitions/schedulerNodes.ts +220 -0
- package/client/src/nodeDefinitions/skillNodes.ts +211 -0
- package/client/src/nodeDefinitions/toolNodes.ts +198 -0
- package/client/src/nodeDefinitions/utilityNodes.ts +284 -0
- package/client/src/nodeDefinitions/whatsappNodes.ts +865 -0
- package/client/src/nodeDefinitions/workflowNodes.ts +41 -0
- package/client/src/nodeDefinitions.ts +104 -0
- package/client/src/schemas/workflowSchema.ts +264 -0
- package/client/src/services/dynamicParameterService.ts +96 -0
- package/client/src/services/execution/aiAgentExecutionService.ts +35 -0
- package/client/src/services/executionService.ts +232 -0
- package/client/src/services/workflowApi.ts +91 -0
- package/client/src/store/useAppStore.ts +582 -0
- package/client/src/styles/theme.ts +508 -0
- package/client/src/styles/zIndex.ts +17 -0
- package/client/src/types/ComponentTypes.ts +39 -0
- package/client/src/types/EdgeCondition.ts +231 -0
- package/client/src/types/INodeProperties.ts +288 -0
- package/client/src/types/NodeTypes.ts +28 -0
- package/client/src/utils/formatters.ts +33 -0
- package/client/src/utils/googleMapsLoader.ts +140 -0
- package/client/src/utils/locationUtils.ts +85 -0
- package/client/src/utils/nodeUtils.ts +31 -0
- package/client/src/utils/workflow.ts +30 -0
- package/client/src/utils/workflowExport.ts +120 -0
- package/client/src/vite-env.d.ts +12 -0
- package/client/tailwind.config.js +60 -0
- package/client/tsconfig.json +25 -0
- package/client/tsconfig.node.json +11 -0
- package/client/vite.config.js +35 -0
- package/docker-compose.prod.yml +107 -0
- package/docker-compose.yml +104 -0
- package/docs-MachinaOs/README.md +85 -0
- package/docs-MachinaOs/deployment/docker.mdx +228 -0
- package/docs-MachinaOs/deployment/production.mdx +345 -0
- package/docs-MachinaOs/docs.json +75 -0
- package/docs-MachinaOs/faq.mdx +309 -0
- package/docs-MachinaOs/favicon.svg +5 -0
- package/docs-MachinaOs/installation.mdx +160 -0
- package/docs-MachinaOs/introduction.mdx +114 -0
- package/docs-MachinaOs/logo/dark.svg +6 -0
- package/docs-MachinaOs/logo/light.svg +6 -0
- package/docs-MachinaOs/nodes/ai-agent.mdx +216 -0
- package/docs-MachinaOs/nodes/ai-models.mdx +240 -0
- package/docs-MachinaOs/nodes/android.mdx +411 -0
- package/docs-MachinaOs/nodes/overview.mdx +181 -0
- package/docs-MachinaOs/nodes/schedulers.mdx +316 -0
- package/docs-MachinaOs/nodes/webhooks.mdx +330 -0
- package/docs-MachinaOs/nodes/whatsapp.mdx +305 -0
- package/docs-MachinaOs/quickstart.mdx +119 -0
- package/docs-MachinaOs/tutorials/ai-agent-workflow.mdx +177 -0
- package/docs-MachinaOs/tutorials/android-automation.mdx +242 -0
- package/docs-MachinaOs/tutorials/first-workflow.mdx +134 -0
- package/docs-MachinaOs/tutorials/whatsapp-automation.mdx +185 -0
- package/nul +0 -0
- package/package.json +70 -0
- package/scripts/build.js +158 -0
- package/scripts/check-ports.ps1 +33 -0
- package/scripts/clean.js +40 -0
- package/scripts/docker.js +93 -0
- package/scripts/kill-port.ps1 +154 -0
- package/scripts/start.js +210 -0
- package/scripts/stop.js +325 -0
- package/server/.dockerignore +44 -0
- package/server/Dockerfile +45 -0
- package/server/constants.py +249 -0
- package/server/core/__init__.py +1 -0
- package/server/core/cache.py +461 -0
- package/server/core/config.py +128 -0
- package/server/core/container.py +99 -0
- package/server/core/database.py +1211 -0
- package/server/core/logging.py +314 -0
- package/server/main.py +289 -0
- package/server/middleware/__init__.py +5 -0
- package/server/middleware/auth.py +89 -0
- package/server/models/__init__.py +1 -0
- package/server/models/auth.py +52 -0
- package/server/models/cache.py +24 -0
- package/server/models/database.py +211 -0
- package/server/models/nodes.py +455 -0
- package/server/package.json +9 -0
- package/server/pyproject.toml +72 -0
- package/server/requirements.txt +83 -0
- package/server/routers/__init__.py +1 -0
- package/server/routers/android.py +294 -0
- package/server/routers/auth.py +203 -0
- package/server/routers/database.py +151 -0
- package/server/routers/maps.py +142 -0
- package/server/routers/nodejs_compat.py +289 -0
- package/server/routers/webhook.py +90 -0
- package/server/routers/websocket.py +2127 -0
- package/server/routers/whatsapp.py +761 -0
- package/server/routers/workflow.py +200 -0
- package/server/services/__init__.py +1 -0
- package/server/services/ai.py +2415 -0
- package/server/services/android/__init__.py +27 -0
- package/server/services/android/broadcaster.py +114 -0
- package/server/services/android/client.py +608 -0
- package/server/services/android/manager.py +78 -0
- package/server/services/android/protocol.py +165 -0
- package/server/services/android_service.py +588 -0
- package/server/services/auth.py +131 -0
- package/server/services/chat_client.py +160 -0
- package/server/services/deployment/__init__.py +12 -0
- package/server/services/deployment/manager.py +706 -0
- package/server/services/deployment/state.py +47 -0
- package/server/services/deployment/triggers.py +275 -0
- package/server/services/event_waiter.py +785 -0
- package/server/services/execution/__init__.py +77 -0
- package/server/services/execution/cache.py +769 -0
- package/server/services/execution/conditions.py +373 -0
- package/server/services/execution/dlq.py +132 -0
- package/server/services/execution/executor.py +1351 -0
- package/server/services/execution/models.py +531 -0
- package/server/services/execution/recovery.py +235 -0
- package/server/services/handlers/__init__.py +126 -0
- package/server/services/handlers/ai.py +355 -0
- package/server/services/handlers/android.py +260 -0
- package/server/services/handlers/code.py +278 -0
- package/server/services/handlers/document.py +598 -0
- package/server/services/handlers/http.py +193 -0
- package/server/services/handlers/polyglot.py +105 -0
- package/server/services/handlers/tools.py +845 -0
- package/server/services/handlers/triggers.py +107 -0
- package/server/services/handlers/utility.py +822 -0
- package/server/services/handlers/whatsapp.py +476 -0
- package/server/services/maps.py +289 -0
- package/server/services/memory_store.py +103 -0
- package/server/services/node_executor.py +375 -0
- package/server/services/parameter_resolver.py +218 -0
- package/server/services/polyglot_client.py +169 -0
- package/server/services/scheduler.py +155 -0
- package/server/services/skill_loader.py +417 -0
- package/server/services/status_broadcaster.py +826 -0
- package/server/services/temporal/__init__.py +23 -0
- package/server/services/temporal/activities.py +344 -0
- package/server/services/temporal/client.py +76 -0
- package/server/services/temporal/executor.py +147 -0
- package/server/services/temporal/worker.py +251 -0
- package/server/services/temporal/workflow.py +355 -0
- package/server/services/temporal/ws_client.py +236 -0
- package/server/services/text.py +111 -0
- package/server/services/user_auth.py +172 -0
- package/server/services/websocket_client.py +29 -0
- package/server/services/workflow.py +597 -0
- package/server/skills/android-skill/SKILL.md +82 -0
- package/server/skills/assistant-personality/SKILL.md +45 -0
- package/server/skills/code-skill/SKILL.md +140 -0
- package/server/skills/http-skill/SKILL.md +161 -0
- package/server/skills/maps-skill/SKILL.md +170 -0
- package/server/skills/memory-skill/SKILL.md +154 -0
- package/server/skills/scheduler-skill/SKILL.md +84 -0
- package/server/skills/whatsapp-skill/SKILL.md +283 -0
- package/server/uv.lock +2916 -0
- package/server/whatsapp-rpc/.dockerignore +30 -0
- package/server/whatsapp-rpc/Dockerfile +44 -0
- package/server/whatsapp-rpc/Dockerfile.web +17 -0
- package/server/whatsapp-rpc/README.md +139 -0
- package/server/whatsapp-rpc/cli.js +95 -0
- package/server/whatsapp-rpc/configs/config.yaml +7 -0
- package/server/whatsapp-rpc/docker-compose.yml +35 -0
- package/server/whatsapp-rpc/docs/API.md +410 -0
- package/server/whatsapp-rpc/go.mod +67 -0
- package/server/whatsapp-rpc/go.sum +203 -0
- package/server/whatsapp-rpc/package.json +30 -0
- package/server/whatsapp-rpc/schema.json +1294 -0
- package/server/whatsapp-rpc/scripts/clean.cjs +66 -0
- package/server/whatsapp-rpc/scripts/cli.js +162 -0
- package/server/whatsapp-rpc/src/go/cmd/server/main.go +91 -0
- package/server/whatsapp-rpc/src/go/config/config.go +49 -0
- package/server/whatsapp-rpc/src/go/rpc/rpc.go +446 -0
- package/server/whatsapp-rpc/src/go/rpc/server.go +112 -0
- package/server/whatsapp-rpc/src/go/whatsapp/history.go +166 -0
- package/server/whatsapp-rpc/src/go/whatsapp/messages.go +390 -0
- package/server/whatsapp-rpc/src/go/whatsapp/service.go +2130 -0
- package/server/whatsapp-rpc/src/go/whatsapp/types.go +261 -0
- package/server/whatsapp-rpc/src/python/pyproject.toml +15 -0
- package/server/whatsapp-rpc/src/python/whatsapp_rpc/__init__.py +4 -0
- package/server/whatsapp-rpc/src/python/whatsapp_rpc/client.py +427 -0
- package/server/whatsapp-rpc/web/app.py +609 -0
- package/server/whatsapp-rpc/web/requirements.txt +6 -0
- package/server/whatsapp-rpc/web/rpc_client.py +427 -0
- package/server/whatsapp-rpc/web/static/openapi.yaml +59 -0
- package/server/whatsapp-rpc/web/templates/base.html +150 -0
- package/server/whatsapp-rpc/web/templates/contacts.html +240 -0
- package/server/whatsapp-rpc/web/templates/dashboard.html +320 -0
- package/server/whatsapp-rpc/web/templates/groups.html +328 -0
- package/server/whatsapp-rpc/web/templates/messages.html +465 -0
- package/server/whatsapp-rpc/web/templates/messaging.html +681 -0
- package/server/whatsapp-rpc/web/templates/send.html +259 -0
- package/server/whatsapp-rpc/web/templates/settings.html +459 -0
|
@@ -0,0 +1,2127 @@
|
|
|
1
|
+
"""WebSocket router for real-time bidirectional communication.
|
|
2
|
+
|
|
3
|
+
Handles WebSocket connections from frontend clients for ALL operations:
|
|
4
|
+
- Node parameters (get, save, delete)
|
|
5
|
+
- Node execution
|
|
6
|
+
- AI execution and model fetching
|
|
7
|
+
- API key validation and storage
|
|
8
|
+
- Android device operations
|
|
9
|
+
- Google Maps key validation
|
|
10
|
+
- Status broadcasts
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import time
|
|
14
|
+
import asyncio
|
|
15
|
+
import weakref
|
|
16
|
+
from typing import Dict, Any, Callable, Awaitable, Optional, Set
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
|
|
19
|
+
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
|
20
|
+
from services.status_broadcaster import get_status_broadcaster
|
|
21
|
+
from core.container import container
|
|
22
|
+
from core.logging import get_logger
|
|
23
|
+
|
|
24
|
+
logger = get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
router = APIRouter(tags=["websocket"])
|
|
27
|
+
|
|
28
|
+
# =============================================================================
|
|
29
|
+
# Concurrent Send Protection
|
|
30
|
+
# =============================================================================
|
|
31
|
+
# Use WeakKeyDictionary to auto-cleanup when WebSocket is garbage collected
|
|
32
|
+
_send_locks: weakref.WeakKeyDictionary = weakref.WeakKeyDictionary()
|
|
33
|
+
|
|
34
|
+
# Track running handler tasks per WebSocket for cleanup on disconnect
|
|
35
|
+
_handler_tasks: weakref.WeakKeyDictionary = weakref.WeakKeyDictionary()
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
async def _safe_send(websocket: WebSocket, data: dict):
|
|
39
|
+
"""Thread-safe WebSocket send with lock to prevent concurrent writes."""
|
|
40
|
+
if websocket not in _send_locks:
|
|
41
|
+
_send_locks[websocket] = asyncio.Lock()
|
|
42
|
+
async with _send_locks[websocket]:
|
|
43
|
+
try:
|
|
44
|
+
await websocket.send_json(data)
|
|
45
|
+
except Exception as e:
|
|
46
|
+
logger.error(f"[WebSocket] Send error: {e}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# Type for message handlers
|
|
50
|
+
MessageHandler = Callable[[Dict[str, Any], WebSocket], Awaitable[Dict[str, Any]]]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def ws_handler(*required_fields: str):
|
|
54
|
+
"""Simple decorator for WebSocket handlers. Validates required fields and wraps errors."""
|
|
55
|
+
def decorator(func: MessageHandler) -> MessageHandler:
|
|
56
|
+
async def wrapper(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
57
|
+
for field in required_fields:
|
|
58
|
+
if not data.get(field):
|
|
59
|
+
return {"success": False, "error": f"{field} required"}
|
|
60
|
+
try:
|
|
61
|
+
result = await func(data, websocket)
|
|
62
|
+
if "success" not in result:
|
|
63
|
+
result = {"success": True, **result}
|
|
64
|
+
return result
|
|
65
|
+
except Exception as e:
|
|
66
|
+
logger.error(f"Handler error: {e}", exc_info=True)
|
|
67
|
+
return {"success": False, "error": str(e)}
|
|
68
|
+
return wrapper
|
|
69
|
+
return decorator
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
# ============================================================================
|
|
73
|
+
# Message Handlers
|
|
74
|
+
# ============================================================================
|
|
75
|
+
|
|
76
|
+
async def handle_ping(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
77
|
+
"""Handle ping request."""
|
|
78
|
+
return {"type": "pong", "timestamp": time.time()}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
async def handle_get_status(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
82
|
+
"""Get current full status."""
|
|
83
|
+
broadcaster = get_status_broadcaster()
|
|
84
|
+
return {"type": "full_status", "data": broadcaster.get_status()}
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def handle_get_android_status(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
88
|
+
"""Get Android connection status."""
|
|
89
|
+
broadcaster = get_status_broadcaster()
|
|
90
|
+
return {"type": "android_status", "data": broadcaster.get_android_status()}
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
async def handle_get_node_status(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
94
|
+
"""Get specific node status."""
|
|
95
|
+
broadcaster = get_status_broadcaster()
|
|
96
|
+
node_id = data.get("node_id")
|
|
97
|
+
if node_id:
|
|
98
|
+
status = broadcaster.get_node_status(node_id)
|
|
99
|
+
return {"type": "node_status", "node_id": node_id, "data": status}
|
|
100
|
+
return {"type": "error", "message": "node_id required"}
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
async def handle_get_variable(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
104
|
+
"""Get variable value."""
|
|
105
|
+
broadcaster = get_status_broadcaster()
|
|
106
|
+
name = data.get("name")
|
|
107
|
+
if name:
|
|
108
|
+
value = broadcaster.get_variable(name)
|
|
109
|
+
return {"type": "variable_update", "name": name, "value": value}
|
|
110
|
+
return {"type": "error", "message": "name required"}
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
# ============================================================================
|
|
114
|
+
# Node Parameters Handlers
|
|
115
|
+
# ============================================================================
|
|
116
|
+
|
|
117
|
+
@ws_handler("node_id")
|
|
118
|
+
async def handle_get_node_parameters(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
119
|
+
"""Get parameters for a specific node."""
|
|
120
|
+
database = container.database()
|
|
121
|
+
node_id = data["node_id"]
|
|
122
|
+
parameters = await database.get_node_parameters(node_id)
|
|
123
|
+
logger.debug(f"[GET_PARAMS] Node ID: {node_id}")
|
|
124
|
+
logger.debug(f"[GET_PARAMS] Raw from DB: {parameters}")
|
|
125
|
+
logger.debug(f"[GET_PARAMS] Code length: {len(parameters.get('code', '')) if parameters and 'code' in parameters else 'no code field'}")
|
|
126
|
+
return {"node_id": node_id, "parameters": parameters or {}, "version": 1, "timestamp": time.time()}
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@ws_handler()
|
|
130
|
+
async def handle_get_all_node_parameters(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
131
|
+
"""Get parameters for multiple nodes."""
|
|
132
|
+
database = container.database()
|
|
133
|
+
result = {}
|
|
134
|
+
for node_id in data.get("node_ids", []):
|
|
135
|
+
parameters = await database.get_node_parameters(node_id)
|
|
136
|
+
if parameters:
|
|
137
|
+
result[node_id] = {"parameters": parameters, "version": 1}
|
|
138
|
+
return {"parameters": result, "timestamp": time.time()}
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@ws_handler("node_id")
|
|
142
|
+
async def handle_save_node_parameters(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
143
|
+
"""Save node parameters and broadcast to all clients."""
|
|
144
|
+
database = container.database()
|
|
145
|
+
broadcaster = get_status_broadcaster()
|
|
146
|
+
node_id, parameters = data["node_id"], data.get("parameters", {})
|
|
147
|
+
|
|
148
|
+
logger.debug(f"[SAVE_PARAMS] Node ID: {node_id}, has_code: {'code' in parameters}, code_len: {len(parameters.get('code', '')) if 'code' in parameters else 0}")
|
|
149
|
+
await database.save_node_parameters(node_id, parameters)
|
|
150
|
+
await broadcaster.broadcast({
|
|
151
|
+
"type": "node_parameters_updated", "node_id": node_id,
|
|
152
|
+
"parameters": parameters, "version": 1, "timestamp": time.time()
|
|
153
|
+
})
|
|
154
|
+
return {"node_id": node_id, "parameters": parameters, "version": 1, "timestamp": time.time()}
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@ws_handler("node_id")
|
|
158
|
+
async def handle_delete_node_parameters(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
159
|
+
"""Delete node parameters."""
|
|
160
|
+
database = container.database()
|
|
161
|
+
await database.delete_node_parameters(data["node_id"])
|
|
162
|
+
return {"node_id": data["node_id"]}
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# ============================================================================
|
|
166
|
+
# Tool Schema Handlers (Source of truth for tool node configurations)
|
|
167
|
+
# ============================================================================
|
|
168
|
+
|
|
169
|
+
@ws_handler("node_id")
|
|
170
|
+
async def handle_get_tool_schema(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
171
|
+
"""Get tool schema for a node."""
|
|
172
|
+
database = container.database()
|
|
173
|
+
schema = await database.get_tool_schema(data["node_id"])
|
|
174
|
+
return {"node_id": data["node_id"], "schema": schema}
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
@ws_handler("node_id", "tool_name", "tool_description", "schema_config")
|
|
178
|
+
async def handle_save_tool_schema(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
179
|
+
"""Save tool schema for a node. Used by Android Toolkit to update connected service schemas."""
|
|
180
|
+
database = container.database()
|
|
181
|
+
broadcaster = get_status_broadcaster()
|
|
182
|
+
|
|
183
|
+
node_id = data["node_id"]
|
|
184
|
+
tool_name = data["tool_name"]
|
|
185
|
+
tool_description = data["tool_description"]
|
|
186
|
+
schema_config = data["schema_config"]
|
|
187
|
+
connected_services = data.get("connected_services")
|
|
188
|
+
|
|
189
|
+
success = await database.save_tool_schema(
|
|
190
|
+
node_id=node_id,
|
|
191
|
+
tool_name=tool_name,
|
|
192
|
+
tool_description=tool_description,
|
|
193
|
+
schema_config=schema_config,
|
|
194
|
+
connected_services=connected_services
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
if success:
|
|
198
|
+
# Broadcast schema update to all clients
|
|
199
|
+
await broadcaster.broadcast({
|
|
200
|
+
"type": "tool_schema_updated",
|
|
201
|
+
"node_id": node_id,
|
|
202
|
+
"tool_name": tool_name,
|
|
203
|
+
"timestamp": time.time()
|
|
204
|
+
})
|
|
205
|
+
|
|
206
|
+
return {
|
|
207
|
+
"node_id": node_id,
|
|
208
|
+
"tool_name": tool_name,
|
|
209
|
+
"saved": success
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@ws_handler("node_id")
|
|
214
|
+
async def handle_delete_tool_schema(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
215
|
+
"""Delete tool schema for a node."""
|
|
216
|
+
database = container.database()
|
|
217
|
+
await database.delete_tool_schema(data["node_id"])
|
|
218
|
+
return {"node_id": data["node_id"]}
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
async def handle_get_all_tool_schemas(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
222
|
+
"""Get all tool schemas."""
|
|
223
|
+
database = container.database()
|
|
224
|
+
schemas = await database.get_all_tool_schemas()
|
|
225
|
+
return {"success": True, "schemas": schemas}
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
# ============================================================================
|
|
229
|
+
# Node Execution Handlers
|
|
230
|
+
# ============================================================================
|
|
231
|
+
|
|
232
|
+
@ws_handler("node_id", "node_type")
|
|
233
|
+
async def handle_execute_node(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
234
|
+
"""Execute a workflow node with per-workflow status scoping (n8n pattern)."""
|
|
235
|
+
workflow_service = container.workflow_service()
|
|
236
|
+
broadcaster = get_status_broadcaster()
|
|
237
|
+
node_id, node_type = data["node_id"], data["node_type"]
|
|
238
|
+
workflow_id = data.get("workflow_id") # Per-workflow isolation
|
|
239
|
+
|
|
240
|
+
await broadcaster.update_node_status(node_id, "executing", workflow_id=workflow_id)
|
|
241
|
+
result = await workflow_service.execute_node(
|
|
242
|
+
node_id=node_id, node_type=node_type,
|
|
243
|
+
parameters=data.get("parameters", {}),
|
|
244
|
+
nodes=data.get("nodes", []), edges=data.get("edges", []),
|
|
245
|
+
session_id=data.get("session_id", "default"),
|
|
246
|
+
workflow_id=workflow_id,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
if result.get("success"):
|
|
250
|
+
await broadcaster.update_node_status(node_id, "success", result.get("result"), workflow_id=workflow_id)
|
|
251
|
+
await broadcaster.update_node_output(node_id, result.get("result"), workflow_id=workflow_id)
|
|
252
|
+
elif result.get("error") == "Cancelled by user":
|
|
253
|
+
# Cancelled trigger nodes go back to idle, not error
|
|
254
|
+
await broadcaster.update_node_status(node_id, "idle", {"message": "Cancelled"}, workflow_id=workflow_id)
|
|
255
|
+
else:
|
|
256
|
+
await broadcaster.update_node_status(node_id, "error", {"error": result.get("error")}, workflow_id=workflow_id)
|
|
257
|
+
|
|
258
|
+
# Explicitly pass through success status (don't let decorator default to True)
|
|
259
|
+
ws_result = {
|
|
260
|
+
"success": result.get("success", False),
|
|
261
|
+
"node_id": node_id,
|
|
262
|
+
"result": result.get("result"),
|
|
263
|
+
"error": result.get("error"),
|
|
264
|
+
"execution_time": result.get("execution_time"),
|
|
265
|
+
"timestamp": time.time()
|
|
266
|
+
}
|
|
267
|
+
# Debug: Log what we're returning to WebSocket
|
|
268
|
+
result_data = result.get("result")
|
|
269
|
+
logger.debug(f"[WS execute_node] Returning: success={ws_result['success']}, result.response={repr(result_data.get('response', 'MISSING')[:100] if result_data and result_data.get('response') else 'None')}")
|
|
270
|
+
return ws_result
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
@ws_handler("node_id")
|
|
274
|
+
async def handle_cancel_execution(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
275
|
+
"""Cancel node execution."""
|
|
276
|
+
broadcaster = get_status_broadcaster()
|
|
277
|
+
await broadcaster.update_node_status(data["node_id"], "idle")
|
|
278
|
+
return {"node_id": data["node_id"], "message": "Execution cancelled"}
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
@ws_handler()
|
|
282
|
+
async def handle_cancel_event_wait(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
283
|
+
"""Cancel an active event waiter (for trigger nodes).
|
|
284
|
+
|
|
285
|
+
Can cancel by waiter_id or node_id.
|
|
286
|
+
Note: Status update to "idle" happens in handle_execute_node when it catches CancelledError.
|
|
287
|
+
"""
|
|
288
|
+
from services import event_waiter
|
|
289
|
+
|
|
290
|
+
waiter_id = data.get("waiter_id")
|
|
291
|
+
node_id = data.get("node_id")
|
|
292
|
+
|
|
293
|
+
logger.debug(f"[WebSocket] handle_cancel_event_wait called: waiter_id={waiter_id}, node_id={node_id}")
|
|
294
|
+
|
|
295
|
+
if waiter_id:
|
|
296
|
+
success = event_waiter.cancel(waiter_id)
|
|
297
|
+
logger.debug(f"[WebSocket] cancel by waiter_id result: success={success}")
|
|
298
|
+
return {"success": success, "waiter_id": waiter_id, "message": "Cancelled" if success else "Not found"}
|
|
299
|
+
elif node_id:
|
|
300
|
+
count = event_waiter.cancel_for_node(node_id)
|
|
301
|
+
logger.debug(f"[WebSocket] cancel by node_id result: cancelled_count={count}")
|
|
302
|
+
return {"success": count > 0, "node_id": node_id, "cancelled_count": count}
|
|
303
|
+
else:
|
|
304
|
+
return {"success": False, "error": "waiter_id or node_id required"}
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
@ws_handler()
|
|
308
|
+
async def handle_get_active_waiters(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
309
|
+
"""Get list of active event waiters (for debugging/UI)."""
|
|
310
|
+
from services import event_waiter
|
|
311
|
+
return {"waiters": event_waiter.get_active_waiters()}
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
# ============================================================================
|
|
315
|
+
# Dead Letter Queue (DLQ) Handlers
|
|
316
|
+
# ============================================================================
|
|
317
|
+
|
|
318
|
+
@ws_handler()
|
|
319
|
+
async def handle_get_dlq_entries(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
320
|
+
"""Get DLQ entries with optional filtering.
|
|
321
|
+
|
|
322
|
+
Optional params:
|
|
323
|
+
workflow_id: Filter by workflow ID
|
|
324
|
+
node_type: Filter by node type
|
|
325
|
+
limit: Max entries to return (default 100)
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
List of DLQ entries
|
|
329
|
+
"""
|
|
330
|
+
from services.execution import ExecutionCache
|
|
331
|
+
cache_service = container.cache()
|
|
332
|
+
execution_cache = ExecutionCache(cache_service)
|
|
333
|
+
|
|
334
|
+
entries = await execution_cache.get_dlq_entries(
|
|
335
|
+
workflow_id=data.get("workflow_id"),
|
|
336
|
+
node_type=data.get("node_type"),
|
|
337
|
+
limit=data.get("limit", 100)
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
return {
|
|
341
|
+
"entries": [entry.to_dict() for entry in entries],
|
|
342
|
+
"count": len(entries),
|
|
343
|
+
"timestamp": time.time()
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
@ws_handler("entry_id")
|
|
348
|
+
async def handle_get_dlq_entry(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
349
|
+
"""Get a single DLQ entry by ID.
|
|
350
|
+
|
|
351
|
+
Required:
|
|
352
|
+
entry_id: DLQ entry ID
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
DLQ entry details
|
|
356
|
+
"""
|
|
357
|
+
from services.execution import ExecutionCache
|
|
358
|
+
cache_service = container.cache()
|
|
359
|
+
execution_cache = ExecutionCache(cache_service)
|
|
360
|
+
|
|
361
|
+
entry = await execution_cache.get_dlq_entry(data["entry_id"])
|
|
362
|
+
|
|
363
|
+
if entry:
|
|
364
|
+
return {"entry": entry.to_dict(), "timestamp": time.time()}
|
|
365
|
+
else:
|
|
366
|
+
return {"success": False, "error": "DLQ entry not found"}
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
@ws_handler()
|
|
370
|
+
async def handle_get_dlq_stats(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
371
|
+
"""Get DLQ statistics.
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
Total count, breakdown by node type and workflow
|
|
375
|
+
"""
|
|
376
|
+
from services.execution import ExecutionCache
|
|
377
|
+
cache_service = container.cache()
|
|
378
|
+
execution_cache = ExecutionCache(cache_service)
|
|
379
|
+
|
|
380
|
+
stats = await execution_cache.get_dlq_stats()
|
|
381
|
+
return {"stats": stats, "timestamp": time.time()}
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
@ws_handler("entry_id")
|
|
385
|
+
async def handle_replay_dlq_entry(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
386
|
+
"""Replay a failed node from the DLQ.
|
|
387
|
+
|
|
388
|
+
Required:
|
|
389
|
+
entry_id: DLQ entry ID to replay
|
|
390
|
+
nodes: Workflow nodes
|
|
391
|
+
edges: Workflow edges
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
Replay execution result
|
|
395
|
+
"""
|
|
396
|
+
from services.execution import ExecutionCache, WorkflowExecutor
|
|
397
|
+
cache_service = container.cache()
|
|
398
|
+
execution_cache = ExecutionCache(cache_service)
|
|
399
|
+
workflow_service = container.workflow_service()
|
|
400
|
+
broadcaster = get_status_broadcaster()
|
|
401
|
+
|
|
402
|
+
entry_id = data["entry_id"]
|
|
403
|
+
nodes = data.get("nodes", [])
|
|
404
|
+
edges = data.get("edges", [])
|
|
405
|
+
|
|
406
|
+
# Get the entry to find the node_id
|
|
407
|
+
entry = await execution_cache.get_dlq_entry(entry_id)
|
|
408
|
+
if not entry:
|
|
409
|
+
return {"success": False, "error": "DLQ entry not found"}
|
|
410
|
+
|
|
411
|
+
# Update status
|
|
412
|
+
await broadcaster.update_node_status(entry.node_id, "executing", {
|
|
413
|
+
"message": "Replaying from DLQ"
|
|
414
|
+
})
|
|
415
|
+
|
|
416
|
+
# Create executor with node adapter
|
|
417
|
+
async def node_executor(node_id: str, node_type: str, params: dict, context: dict) -> dict:
|
|
418
|
+
return await workflow_service.execute_node(
|
|
419
|
+
node_id=node_id,
|
|
420
|
+
node_type=node_type,
|
|
421
|
+
parameters=params,
|
|
422
|
+
nodes=context.get("nodes", []),
|
|
423
|
+
edges=context.get("edges", []),
|
|
424
|
+
session_id=context.get("session_id", "dlq_replay"),
|
|
425
|
+
execution_id=context.get("execution_id")
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
async def status_callback(node_id: str, status: str, status_data: dict):
|
|
429
|
+
await broadcaster.update_node_status(node_id, status, status_data)
|
|
430
|
+
|
|
431
|
+
# DLQ replay needs DLQ enabled to re-add on failure
|
|
432
|
+
settings = container.settings()
|
|
433
|
+
executor = WorkflowExecutor(
|
|
434
|
+
cache=execution_cache,
|
|
435
|
+
node_executor=node_executor,
|
|
436
|
+
status_callback=status_callback,
|
|
437
|
+
dlq_enabled=settings.dlq_enabled
|
|
438
|
+
)
|
|
439
|
+
|
|
440
|
+
result = await executor.replay_dlq_entry(entry_id, nodes, edges)
|
|
441
|
+
|
|
442
|
+
# Update final status
|
|
443
|
+
if result.get("success"):
|
|
444
|
+
await broadcaster.update_node_status(entry.node_id, "success", result.get("result"))
|
|
445
|
+
else:
|
|
446
|
+
await broadcaster.update_node_status(entry.node_id, "error", {"error": result.get("error")})
|
|
447
|
+
|
|
448
|
+
return result
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@ws_handler("entry_id")
|
|
452
|
+
async def handle_remove_dlq_entry(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
453
|
+
"""Remove an entry from the DLQ without replaying.
|
|
454
|
+
|
|
455
|
+
Required:
|
|
456
|
+
entry_id: DLQ entry ID to remove
|
|
457
|
+
|
|
458
|
+
Returns:
|
|
459
|
+
Success status
|
|
460
|
+
"""
|
|
461
|
+
from services.execution import ExecutionCache
|
|
462
|
+
cache_service = container.cache()
|
|
463
|
+
execution_cache = ExecutionCache(cache_service)
|
|
464
|
+
|
|
465
|
+
success = await execution_cache.remove_from_dlq(data["entry_id"])
|
|
466
|
+
return {"removed": success, "entry_id": data["entry_id"]}
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
@ws_handler()
|
|
470
|
+
async def handle_purge_dlq(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
471
|
+
"""Purge entries from the DLQ.
|
|
472
|
+
|
|
473
|
+
Optional params:
|
|
474
|
+
workflow_id: Only purge entries for this workflow
|
|
475
|
+
node_type: Only purge entries for this node type
|
|
476
|
+
older_than_hours: Only purge entries older than X hours
|
|
477
|
+
|
|
478
|
+
Returns:
|
|
479
|
+
Number of entries purged
|
|
480
|
+
"""
|
|
481
|
+
from services.execution import ExecutionCache
|
|
482
|
+
cache_service = container.cache()
|
|
483
|
+
execution_cache = ExecutionCache(cache_service)
|
|
484
|
+
|
|
485
|
+
older_than = None
|
|
486
|
+
if data.get("older_than_hours"):
|
|
487
|
+
older_than = time.time() - (data["older_than_hours"] * 3600)
|
|
488
|
+
|
|
489
|
+
purged = await execution_cache.purge_dlq(
|
|
490
|
+
workflow_id=data.get("workflow_id"),
|
|
491
|
+
node_type=data.get("node_type"),
|
|
492
|
+
older_than=older_than
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
return {"purged": purged, "timestamp": time.time()}
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
@ws_handler("node_id")
|
|
499
|
+
async def handle_get_node_output(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
500
|
+
"""Get output data for a specific node."""
|
|
501
|
+
workflow_service = container.workflow_service()
|
|
502
|
+
node_id = data["node_id"]
|
|
503
|
+
output_name = data.get("output_name", "output_0")
|
|
504
|
+
output_data = await workflow_service.get_node_output(data.get("session_id", "default"), node_id, output_name)
|
|
505
|
+
return {"node_id": node_id, "output_name": output_name, "data": output_data, "timestamp": time.time()}
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
@ws_handler("node_id")
|
|
509
|
+
async def handle_clear_node_output(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
510
|
+
"""Clear output data for a specific node from memory, database, and broadcaster cache."""
|
|
511
|
+
workflow_service = container.workflow_service()
|
|
512
|
+
database = container.database()
|
|
513
|
+
broadcaster = get_status_broadcaster()
|
|
514
|
+
node_id = data["node_id"]
|
|
515
|
+
|
|
516
|
+
# Clear from memory - find keys ending with _{node_id}
|
|
517
|
+
memory_cleared = 0
|
|
518
|
+
keys_to_delete = [key for key in workflow_service.node_outputs.keys() if key.endswith(f"_{node_id}")]
|
|
519
|
+
for key in keys_to_delete:
|
|
520
|
+
del workflow_service.node_outputs[key]
|
|
521
|
+
memory_cleared += 1
|
|
522
|
+
|
|
523
|
+
# Clear from database (persisted storage)
|
|
524
|
+
db_cleared = await database.delete_node_output(node_id)
|
|
525
|
+
|
|
526
|
+
# Clear from broadcaster's status cache (prevents reload from showing old data)
|
|
527
|
+
broadcaster_cleared = await broadcaster.clear_node_status(node_id)
|
|
528
|
+
|
|
529
|
+
logger.info("Cleared node output", node_id=node_id, memory_cleared=memory_cleared,
|
|
530
|
+
db_cleared=db_cleared, broadcaster_cleared=broadcaster_cleared)
|
|
531
|
+
|
|
532
|
+
return {"node_id": node_id, "cleared": True, "memory_cleared": memory_cleared,
|
|
533
|
+
"db_cleared": db_cleared, "broadcaster_cleared": broadcaster_cleared}
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
@ws_handler()
|
|
537
|
+
async def handle_execute_workflow(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
538
|
+
"""Execute entire workflow from start node to end.
|
|
539
|
+
|
|
540
|
+
Expects:
|
|
541
|
+
workflow_id: Workflow identifier for per-workflow status scoping
|
|
542
|
+
nodes: List of workflow nodes with {id, type, data}
|
|
543
|
+
edges: List of edges with {id, source, target}
|
|
544
|
+
session_id: Optional session identifier
|
|
545
|
+
|
|
546
|
+
Returns:
|
|
547
|
+
Workflow execution result with all node outputs
|
|
548
|
+
"""
|
|
549
|
+
workflow_service = container.workflow_service()
|
|
550
|
+
broadcaster = get_status_broadcaster()
|
|
551
|
+
|
|
552
|
+
workflow_id = data.get("workflow_id") # Per-workflow isolation (n8n pattern)
|
|
553
|
+
nodes = data.get("nodes", [])
|
|
554
|
+
edges = data.get("edges", [])
|
|
555
|
+
session_id = data.get("session_id", "default")
|
|
556
|
+
|
|
557
|
+
if not nodes:
|
|
558
|
+
return {"success": False, "error": "No nodes provided"}
|
|
559
|
+
|
|
560
|
+
# Broadcast workflow starting status
|
|
561
|
+
await broadcaster.update_workflow_status(executing=True, current_node=None, progress=0)
|
|
562
|
+
|
|
563
|
+
# Create status callback with workflow_id for per-workflow scoping (n8n pattern)
|
|
564
|
+
async def status_callback(node_id: str, status: str, node_data: Optional[Dict] = None):
|
|
565
|
+
await broadcaster.update_node_status(node_id, status, node_data, workflow_id=workflow_id)
|
|
566
|
+
if status == "executing":
|
|
567
|
+
position = node_data.get("position", 0) if node_data else 0
|
|
568
|
+
total = node_data.get("total", 1) if node_data else 1
|
|
569
|
+
progress = int((position / total) * 100) if total > 0 else 0
|
|
570
|
+
await broadcaster.update_workflow_status(executing=True, current_node=node_id, progress=progress)
|
|
571
|
+
|
|
572
|
+
# Execute the workflow with workflow_id for per-workflow status scoping
|
|
573
|
+
result = await workflow_service.execute_workflow(
|
|
574
|
+
nodes=nodes,
|
|
575
|
+
edges=edges,
|
|
576
|
+
session_id=session_id,
|
|
577
|
+
status_callback=status_callback,
|
|
578
|
+
workflow_id=workflow_id,
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
# Broadcast workflow completed status
|
|
582
|
+
await broadcaster.update_workflow_status(
|
|
583
|
+
executing=False,
|
|
584
|
+
current_node=None,
|
|
585
|
+
progress=100 if result.get("success") else 0
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
return {
|
|
589
|
+
"success": result.get("success", False),
|
|
590
|
+
"nodes_executed": result.get("nodes_executed", []),
|
|
591
|
+
"node_results": result.get("node_results", {}),
|
|
592
|
+
"execution_order": result.get("execution_order", []),
|
|
593
|
+
"errors": result.get("errors", []),
|
|
594
|
+
"error": result.get("error"),
|
|
595
|
+
"total_nodes": result.get("total_nodes", 0),
|
|
596
|
+
"completed_nodes": result.get("completed_nodes", 0),
|
|
597
|
+
"execution_time": result.get("execution_time", 0),
|
|
598
|
+
"timestamp": time.time()
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
# Per-workflow deployment tasks for proper cancellation (Temporal/n8n pattern)
|
|
603
|
+
# Maps workflow_id -> asyncio.Task for parallel workflow deployments
|
|
604
|
+
_deployment_tasks: Dict[str, asyncio.Task] = {}
|
|
605
|
+
|
|
606
|
+
|
|
607
|
+
@ws_handler()
|
|
608
|
+
async def handle_deploy_workflow(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
609
|
+
"""Deploy workflow to run continuously until cancelled.
|
|
610
|
+
|
|
611
|
+
Expects:
|
|
612
|
+
workflow_id: Workflow identifier (required for locking)
|
|
613
|
+
nodes: List of workflow nodes with {id, type, data}
|
|
614
|
+
edges: List of edges with {id, source, target}
|
|
615
|
+
session_id: Optional session identifier
|
|
616
|
+
delay_between_runs: Optional delay in seconds between iterations (default: 1.0)
|
|
617
|
+
|
|
618
|
+
Returns:
|
|
619
|
+
Deployment start confirmation (deployment runs in background)
|
|
620
|
+
"""
|
|
621
|
+
global _deployment_tasks
|
|
622
|
+
workflow_service = container.workflow_service()
|
|
623
|
+
broadcaster = get_status_broadcaster()
|
|
624
|
+
|
|
625
|
+
workflow_id = data.get("workflow_id")
|
|
626
|
+
nodes = data.get("nodes", [])
|
|
627
|
+
edges = data.get("edges", [])
|
|
628
|
+
session_id = data.get("session_id", "default")
|
|
629
|
+
|
|
630
|
+
# DEBUG: Log received edges to trace tool connection issues
|
|
631
|
+
logger.info(f"[Deploy] Received {len(edges)} edges for workflow {workflow_id}")
|
|
632
|
+
for e in edges:
|
|
633
|
+
target_handle = e.get('targetHandle')
|
|
634
|
+
if target_handle and target_handle.startswith('input-') and target_handle != 'input-main':
|
|
635
|
+
logger.info(f"[Deploy] Config edge: {e.get('source')} -> {e.get('target')} (handle={target_handle})")
|
|
636
|
+
|
|
637
|
+
# Check for tool connections to AI Agent
|
|
638
|
+
tool_edges = [e for e in edges if e.get('targetHandle') == 'input-tools']
|
|
639
|
+
if tool_edges:
|
|
640
|
+
logger.info(f"[Deploy] Tool edges found: {len(tool_edges)}")
|
|
641
|
+
for te in tool_edges:
|
|
642
|
+
logger.info(f"[Deploy] Tool edge: source={te.get('source')} -> target={te.get('target')}")
|
|
643
|
+
else:
|
|
644
|
+
logger.info(f"[Deploy] No input-tools edges found")
|
|
645
|
+
|
|
646
|
+
if not nodes:
|
|
647
|
+
return {"success": False, "error": "No nodes provided"}
|
|
648
|
+
|
|
649
|
+
if not workflow_id:
|
|
650
|
+
return {"success": False, "error": "workflow_id is required for deployment"}
|
|
651
|
+
|
|
652
|
+
# Check if THIS specific workflow is already deployed (per-workflow isolation)
|
|
653
|
+
if workflow_service.is_workflow_deployed(workflow_id):
|
|
654
|
+
status = workflow_service.get_deployment_status(workflow_id)
|
|
655
|
+
return {
|
|
656
|
+
"success": False,
|
|
657
|
+
"error": f"Workflow {workflow_id} is already deployed. Cancel it first.",
|
|
658
|
+
"workflow_id": workflow_id,
|
|
659
|
+
"is_running": True,
|
|
660
|
+
"run_counter": status.get("run_counter", 0)
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
# Acquire workflow lock before starting deployment (per-workflow locking - n8n pattern)
|
|
664
|
+
lock_acquired = await broadcaster.lock_workflow(workflow_id, reason="deployment")
|
|
665
|
+
if not lock_acquired:
|
|
666
|
+
lock_info = broadcaster.get_workflow_lock(workflow_id)
|
|
667
|
+
return {
|
|
668
|
+
"success": False,
|
|
669
|
+
"error": f"Workflow {workflow_id} is already locked for {lock_info.get('reason', 'deployment')}",
|
|
670
|
+
"locked_by": lock_info.get("workflow_id"),
|
|
671
|
+
"locked_at": lock_info.get("locked_at")
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
# Broadcast deployment starting status using centralized method
|
|
675
|
+
await broadcaster.update_workflow_status(executing=True, current_node=None, progress=0)
|
|
676
|
+
await broadcaster.update_deployment_status(
|
|
677
|
+
is_running=True,
|
|
678
|
+
status="starting",
|
|
679
|
+
active_runs=0,
|
|
680
|
+
workflow_id=workflow_id
|
|
681
|
+
)
|
|
682
|
+
|
|
683
|
+
# Create status callback to broadcast node and deployment updates
|
|
684
|
+
# Include workflow_id in all node status updates (n8n pattern for workflow isolation)
|
|
685
|
+
async def status_callback(node_id: str, status: str, node_data: Optional[Dict] = None):
|
|
686
|
+
if node_id == "__deployment__":
|
|
687
|
+
# Deployment-level status update using centralized method
|
|
688
|
+
active_runs = node_data.get("active_runs", 0) if node_data else 0
|
|
689
|
+
await broadcaster.update_deployment_status(
|
|
690
|
+
is_running=True,
|
|
691
|
+
status=status,
|
|
692
|
+
active_runs=active_runs,
|
|
693
|
+
workflow_id=workflow_id,
|
|
694
|
+
data=node_data
|
|
695
|
+
)
|
|
696
|
+
else:
|
|
697
|
+
# Node-level status update with workflow_id for frontend filtering
|
|
698
|
+
await broadcaster.update_node_status(node_id, status, node_data, workflow_id=workflow_id)
|
|
699
|
+
if status == "executing":
|
|
700
|
+
position = node_data.get("position", 0) if node_data else 0
|
|
701
|
+
total = node_data.get("total", 1) if node_data else 1
|
|
702
|
+
progress = int((position / total) * 100) if total > 0 else 0
|
|
703
|
+
await broadcaster.update_workflow_status(executing=True, current_node=node_id, progress=progress)
|
|
704
|
+
|
|
705
|
+
# Start deployment as background task (per-workflow - Temporal/n8n pattern)
|
|
706
|
+
# In the event-driven pattern, deploy_workflow() returns immediately after setting up triggers.
|
|
707
|
+
# The workflow stays locked until cancel_deployment is called.
|
|
708
|
+
async def run_deployment():
|
|
709
|
+
try:
|
|
710
|
+
result = await workflow_service.deploy_workflow(
|
|
711
|
+
nodes=nodes,
|
|
712
|
+
edges=edges,
|
|
713
|
+
session_id=session_id,
|
|
714
|
+
status_callback=status_callback,
|
|
715
|
+
workflow_id=workflow_id
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
# In event-driven mode, deploy_workflow returns immediately after trigger setup.
|
|
719
|
+
# If it failed, unlock and report error. If successful, stay running.
|
|
720
|
+
if not result.get("success"):
|
|
721
|
+
# Setup failed - unlock and report error
|
|
722
|
+
logger.error("Deployment setup failed", error=result.get("error"), workflow_id=workflow_id)
|
|
723
|
+
await broadcaster.update_deployment_status(
|
|
724
|
+
is_running=False,
|
|
725
|
+
status="error",
|
|
726
|
+
active_runs=0,
|
|
727
|
+
workflow_id=workflow_id,
|
|
728
|
+
error=result.get("error")
|
|
729
|
+
)
|
|
730
|
+
await broadcaster.unlock_workflow(workflow_id)
|
|
731
|
+
# Clean up task reference for this workflow
|
|
732
|
+
_deployment_tasks.pop(workflow_id, None)
|
|
733
|
+
else:
|
|
734
|
+
# Deployment successful - triggers are set up and running.
|
|
735
|
+
# Workflow stays locked until cancel_deployment is called.
|
|
736
|
+
await broadcaster.update_deployment_status(
|
|
737
|
+
is_running=True,
|
|
738
|
+
status="running",
|
|
739
|
+
active_runs=0,
|
|
740
|
+
workflow_id=workflow_id,
|
|
741
|
+
data={
|
|
742
|
+
"triggers_setup": result.get("triggers_setup", []),
|
|
743
|
+
"deployment_id": result.get("deployment_id")
|
|
744
|
+
}
|
|
745
|
+
)
|
|
746
|
+
logger.info("[Deployment] Event-driven deployment active",
|
|
747
|
+
deployment_id=result.get("deployment_id"),
|
|
748
|
+
workflow_id=workflow_id,
|
|
749
|
+
triggers=len(result.get("triggers_setup", [])))
|
|
750
|
+
|
|
751
|
+
except Exception as e:
|
|
752
|
+
logger.error("Deployment task error", workflow_id=workflow_id, error=str(e))
|
|
753
|
+
await broadcaster.update_deployment_status(
|
|
754
|
+
is_running=False,
|
|
755
|
+
status="error",
|
|
756
|
+
active_runs=0,
|
|
757
|
+
workflow_id=workflow_id,
|
|
758
|
+
error=str(e)
|
|
759
|
+
)
|
|
760
|
+
await broadcaster.unlock_workflow(workflow_id)
|
|
761
|
+
# Clean up task reference for this workflow
|
|
762
|
+
_deployment_tasks.pop(workflow_id, None)
|
|
763
|
+
|
|
764
|
+
# Store task per workflow for independent cancellation
|
|
765
|
+
_deployment_tasks[workflow_id] = asyncio.create_task(run_deployment())
|
|
766
|
+
|
|
767
|
+
return {
|
|
768
|
+
"success": True,
|
|
769
|
+
"message": "Deployment started",
|
|
770
|
+
"workflow_id": workflow_id,
|
|
771
|
+
"is_running": True,
|
|
772
|
+
"locked": True,
|
|
773
|
+
"timestamp": time.time()
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
@ws_handler()
|
|
778
|
+
async def handle_cancel_deployment(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
779
|
+
"""Cancel running deployment for a specific workflow (Temporal/n8n pattern).
|
|
780
|
+
|
|
781
|
+
Expects:
|
|
782
|
+
workflow_id: Workflow to cancel (required).
|
|
783
|
+
|
|
784
|
+
Also cancels any active event waiters (trigger nodes) and unlocks the workflow.
|
|
785
|
+
|
|
786
|
+
Returns:
|
|
787
|
+
Cancellation result with iterations completed
|
|
788
|
+
"""
|
|
789
|
+
global _deployment_tasks
|
|
790
|
+
from services import event_waiter
|
|
791
|
+
|
|
792
|
+
workflow_service = container.workflow_service()
|
|
793
|
+
broadcaster = get_status_broadcaster()
|
|
794
|
+
|
|
795
|
+
workflow_id = data.get("workflow_id")
|
|
796
|
+
|
|
797
|
+
if not workflow_id:
|
|
798
|
+
return {"success": False, "error": "workflow_id is required for cancellation"}
|
|
799
|
+
|
|
800
|
+
result = await workflow_service.cancel_deployment(workflow_id)
|
|
801
|
+
|
|
802
|
+
# Cancel event waiters for this specific workflow's nodes
|
|
803
|
+
cancelled_waiters = 0
|
|
804
|
+
if result.get("success"):
|
|
805
|
+
cancelled_waiters = result.get("waiters_cancelled", 0)
|
|
806
|
+
|
|
807
|
+
# Cancel the deployment task for THIS specific workflow only (per-workflow isolation)
|
|
808
|
+
task = _deployment_tasks.pop(workflow_id, None)
|
|
809
|
+
if task and not task.done():
|
|
810
|
+
task.cancel()
|
|
811
|
+
try:
|
|
812
|
+
await task
|
|
813
|
+
except asyncio.CancelledError:
|
|
814
|
+
logger.info("[Deployment] Deployment task cancelled", workflow_id=workflow_id)
|
|
815
|
+
|
|
816
|
+
# Unlock this specific workflow
|
|
817
|
+
if workflow_id:
|
|
818
|
+
await broadcaster.unlock_workflow(workflow_id)
|
|
819
|
+
|
|
820
|
+
if result.get("success"):
|
|
821
|
+
# Clear node statuses for all trigger nodes that were waiting
|
|
822
|
+
# The result contains info about cancelled listeners (listener_{node_id})
|
|
823
|
+
# Use existing clear_node_status method which broadcasts node_status_cleared
|
|
824
|
+
for node_id in result.get("cancelled_listener_node_ids", []):
|
|
825
|
+
await broadcaster.clear_node_status(node_id)
|
|
826
|
+
|
|
827
|
+
# Broadcast deployment cancelled status using centralized method
|
|
828
|
+
await broadcaster.update_workflow_status(executing=False, current_node=None, progress=0)
|
|
829
|
+
await broadcaster.update_deployment_status(
|
|
830
|
+
is_running=False,
|
|
831
|
+
status="cancelled",
|
|
832
|
+
active_runs=0,
|
|
833
|
+
workflow_id=workflow_id,
|
|
834
|
+
data={
|
|
835
|
+
"iterations_completed": result.get("iterations_completed", 0)
|
|
836
|
+
}
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
return {
|
|
840
|
+
"success": result.get("success", False),
|
|
841
|
+
"message": result.get("message", result.get("error")),
|
|
842
|
+
"workflow_id": workflow_id,
|
|
843
|
+
"was_running": result.get("was_running", False),
|
|
844
|
+
"iterations_completed": result.get("iterations_completed", 0),
|
|
845
|
+
"cancelled_waiters": cancelled_waiters,
|
|
846
|
+
"unlocked": workflow_id is not None,
|
|
847
|
+
"timestamp": time.time()
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
|
|
851
|
+
@ws_handler()
|
|
852
|
+
async def handle_get_deployment_status(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
853
|
+
"""Get current deployment status including workflow lock info.
|
|
854
|
+
|
|
855
|
+
Expects (optional):
|
|
856
|
+
workflow_id: Get status for specific workflow. If not provided, returns global status.
|
|
857
|
+
|
|
858
|
+
Returns:
|
|
859
|
+
Current deployment state, iteration count, and lock status
|
|
860
|
+
"""
|
|
861
|
+
workflow_service = container.workflow_service()
|
|
862
|
+
broadcaster = get_status_broadcaster()
|
|
863
|
+
|
|
864
|
+
workflow_id = data.get("workflow_id")
|
|
865
|
+
status = workflow_service.get_deployment_status(workflow_id)
|
|
866
|
+
|
|
867
|
+
return {
|
|
868
|
+
"is_running": workflow_service.is_deployment_running(workflow_id),
|
|
869
|
+
"run_counter": status.get("run_counter", 0),
|
|
870
|
+
"active_runs": status.get("active_runs", 0),
|
|
871
|
+
"settings": workflow_service.get_deployment_settings(),
|
|
872
|
+
"workflow_id": workflow_id or status.get("workflow_id"),
|
|
873
|
+
"deployed_workflows": status.get("deployed_workflows", []),
|
|
874
|
+
"lock": broadcaster.get_workflow_lock(),
|
|
875
|
+
"timestamp": time.time()
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
@ws_handler()
|
|
880
|
+
async def handle_get_workflow_lock(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
881
|
+
"""Get current workflow lock status.
|
|
882
|
+
|
|
883
|
+
Returns:
|
|
884
|
+
Current lock state including locked workflow_id and reason
|
|
885
|
+
"""
|
|
886
|
+
broadcaster = get_status_broadcaster()
|
|
887
|
+
|
|
888
|
+
return {
|
|
889
|
+
"lock": broadcaster.get_workflow_lock(),
|
|
890
|
+
"timestamp": time.time()
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
|
|
894
|
+
@ws_handler()
|
|
895
|
+
async def handle_update_deployment_settings(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
896
|
+
"""Update deployment settings (can be called during active deployment).
|
|
897
|
+
|
|
898
|
+
Expects any of:
|
|
899
|
+
delay_between_runs: float - Seconds to wait between iterations
|
|
900
|
+
stop_on_error: bool - Stop deployment when a node fails
|
|
901
|
+
max_iterations: int - Max iterations (0 = unlimited)
|
|
902
|
+
|
|
903
|
+
Returns:
|
|
904
|
+
Updated settings and current deployment state
|
|
905
|
+
"""
|
|
906
|
+
workflow_service = container.workflow_service()
|
|
907
|
+
broadcaster = get_status_broadcaster()
|
|
908
|
+
|
|
909
|
+
settings_to_update = {}
|
|
910
|
+
if "delay_between_runs" in data:
|
|
911
|
+
settings_to_update["delay_between_runs"] = data["delay_between_runs"]
|
|
912
|
+
if "stop_on_error" in data:
|
|
913
|
+
settings_to_update["stop_on_error"] = data["stop_on_error"]
|
|
914
|
+
if "max_iterations" in data:
|
|
915
|
+
settings_to_update["max_iterations"] = data["max_iterations"]
|
|
916
|
+
|
|
917
|
+
updated_settings = await workflow_service.update_deployment_settings(settings_to_update)
|
|
918
|
+
|
|
919
|
+
# Broadcast settings update
|
|
920
|
+
status = workflow_service.get_deployment_status()
|
|
921
|
+
await broadcaster.broadcast({
|
|
922
|
+
"type": "deployment_settings_updated",
|
|
923
|
+
"settings": updated_settings,
|
|
924
|
+
"is_running": workflow_service.is_deployment_running(),
|
|
925
|
+
"run_counter": status.get("run_counter", 0)
|
|
926
|
+
})
|
|
927
|
+
|
|
928
|
+
return {
|
|
929
|
+
"success": True,
|
|
930
|
+
"settings": updated_settings,
|
|
931
|
+
"is_running": workflow_service.is_deployment_running(),
|
|
932
|
+
"run_counter": status.get("run_counter", 0),
|
|
933
|
+
"active_runs": status.get("active_runs", 0),
|
|
934
|
+
"timestamp": time.time()
|
|
935
|
+
}
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
# ============================================================================
|
|
939
|
+
# AI Handlers
|
|
940
|
+
# ============================================================================
|
|
941
|
+
|
|
942
|
+
@ws_handler("node_id", "node_type")
|
|
943
|
+
async def handle_execute_ai_node(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
944
|
+
"""Execute an AI node (chat model or agent)."""
|
|
945
|
+
workflow_service = container.workflow_service()
|
|
946
|
+
broadcaster = get_status_broadcaster()
|
|
947
|
+
node_id, node_type = data["node_id"], data["node_type"]
|
|
948
|
+
|
|
949
|
+
await broadcaster.update_node_status(node_id, "executing")
|
|
950
|
+
result = await workflow_service.execute_node(
|
|
951
|
+
node_id=node_id, node_type=node_type,
|
|
952
|
+
parameters=data.get("parameters", {}),
|
|
953
|
+
nodes=data.get("nodes", []), edges=data.get("edges", []),
|
|
954
|
+
session_id=data.get("session_id", "default")
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
if result.get("success"):
|
|
958
|
+
await broadcaster.update_node_status(node_id, "success", result.get("result"))
|
|
959
|
+
await broadcaster.update_node_output(node_id, result.get("result"))
|
|
960
|
+
else:
|
|
961
|
+
await broadcaster.update_node_status(node_id, "error", {"error": result.get("error")})
|
|
962
|
+
|
|
963
|
+
return {"node_id": node_id, "result": result.get("result"), "error": result.get("error"),
|
|
964
|
+
"execution_time": result.get("execution_time"), "timestamp": time.time()}
|
|
965
|
+
|
|
966
|
+
|
|
967
|
+
@ws_handler("provider", "api_key")
|
|
968
|
+
async def handle_get_ai_models(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
969
|
+
"""Get available AI models for a provider."""
|
|
970
|
+
ai_service = container.ai_service()
|
|
971
|
+
models = await ai_service.fetch_models(data["provider"], data["api_key"])
|
|
972
|
+
return {"provider": data["provider"], "models": models, "timestamp": time.time()}
|
|
973
|
+
|
|
974
|
+
|
|
975
|
+
# ============================================================================
|
|
976
|
+
# API Key Handlers
|
|
977
|
+
# ============================================================================
|
|
978
|
+
|
|
979
|
+
@ws_handler("provider", "api_key")
|
|
980
|
+
async def handle_validate_api_key(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
981
|
+
"""Validate and store an API key."""
|
|
982
|
+
ai_service = container.ai_service()
|
|
983
|
+
auth_service = container.auth_service()
|
|
984
|
+
broadcaster = get_status_broadcaster()
|
|
985
|
+
provider, api_key = data["provider"].lower(), data["api_key"].strip()
|
|
986
|
+
|
|
987
|
+
models = await ai_service.fetch_models(provider, api_key)
|
|
988
|
+
await auth_service.store_api_key(provider=provider, api_key=api_key, models=models,
|
|
989
|
+
session_id=data.get("session_id", "default"))
|
|
990
|
+
# Broadcast with hasKey and models so frontend can update reactively
|
|
991
|
+
await broadcaster.update_api_key_status(
|
|
992
|
+
provider=provider, valid=True, message="API key validated",
|
|
993
|
+
has_key=True, models=models
|
|
994
|
+
)
|
|
995
|
+
return {"provider": provider, "valid": True, "models": models, "timestamp": time.time()}
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
@ws_handler("provider")
|
|
999
|
+
async def handle_get_stored_api_key(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1000
|
+
"""Get stored API key for a provider."""
|
|
1001
|
+
auth_service = container.auth_service()
|
|
1002
|
+
provider = data["provider"].lower()
|
|
1003
|
+
api_key = await auth_service.get_api_key(provider, data.get("session_id", "default"))
|
|
1004
|
+
if not api_key:
|
|
1005
|
+
return {"provider": provider, "has_key": False}
|
|
1006
|
+
models = await auth_service.get_stored_models(provider, data.get("session_id", "default"))
|
|
1007
|
+
return {"provider": provider, "has_key": True, "api_key": api_key, "models": models, "timestamp": time.time()}
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
@ws_handler("provider", "api_key")
|
|
1011
|
+
async def handle_save_api_key(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1012
|
+
"""Save an API key (without validation)."""
|
|
1013
|
+
auth_service = container.auth_service()
|
|
1014
|
+
await auth_service.store_api_key(provider=data["provider"].lower(), api_key=data["api_key"].strip(),
|
|
1015
|
+
models=data.get("models", []), session_id=data.get("session_id", "default"))
|
|
1016
|
+
return {"provider": data["provider"]}
|
|
1017
|
+
|
|
1018
|
+
|
|
1019
|
+
@ws_handler("provider")
|
|
1020
|
+
async def handle_delete_api_key(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1021
|
+
"""Delete stored API key."""
|
|
1022
|
+
auth_service = container.auth_service()
|
|
1023
|
+
await auth_service.remove_api_key(data["provider"].lower(), data.get("session_id", "default"))
|
|
1024
|
+
return {"provider": data["provider"]}
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
# ============================================================================
|
|
1028
|
+
# Android Handlers
|
|
1029
|
+
# ============================================================================
|
|
1030
|
+
|
|
1031
|
+
@ws_handler()
|
|
1032
|
+
async def handle_get_android_devices(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1033
|
+
"""Get list of connected Android devices."""
|
|
1034
|
+
android_service = container.android_service()
|
|
1035
|
+
devices = await android_service.list_devices()
|
|
1036
|
+
return {"devices": devices, "timestamp": time.time()}
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
@ws_handler("service_id", "action")
|
|
1040
|
+
async def handle_execute_android_action(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1041
|
+
"""Execute an Android service action."""
|
|
1042
|
+
android_service = container.android_service()
|
|
1043
|
+
broadcaster = get_status_broadcaster()
|
|
1044
|
+
service_id, action = data["service_id"], data["action"]
|
|
1045
|
+
node_id = data.get("node_id", f"android_{service_id}_{action}")
|
|
1046
|
+
|
|
1047
|
+
await broadcaster.update_node_status(node_id, "executing")
|
|
1048
|
+
result = await android_service.execute_service(
|
|
1049
|
+
node_id=node_id, service_id=service_id, action=action,
|
|
1050
|
+
parameters=data.get("parameters", {}),
|
|
1051
|
+
android_host=data.get("android_host", "localhost"),
|
|
1052
|
+
android_port=data.get("android_port", 8888)
|
|
1053
|
+
)
|
|
1054
|
+
|
|
1055
|
+
status = "success" if result.get("success") else "error"
|
|
1056
|
+
await broadcaster.update_node_status(node_id, status, result.get("result") or {"error": result.get("error")})
|
|
1057
|
+
return result
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
@ws_handler()
|
|
1061
|
+
async def handle_setup_android_device(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1062
|
+
"""Setup Android device connection."""
|
|
1063
|
+
workflow_service = container.workflow_service()
|
|
1064
|
+
broadcaster = get_status_broadcaster()
|
|
1065
|
+
connection_type = data.get("connection_type", "local")
|
|
1066
|
+
|
|
1067
|
+
result = await workflow_service.execute_node(
|
|
1068
|
+
node_id=data.get("node_id", "android_setup"),
|
|
1069
|
+
node_type="androidDeviceSetup",
|
|
1070
|
+
parameters={
|
|
1071
|
+
"connection_type": connection_type,
|
|
1072
|
+
"device_id": data.get("device_id", ""),
|
|
1073
|
+
"websocket_url": data.get("websocket_url", ""),
|
|
1074
|
+
"port": data.get("port", 8888),
|
|
1075
|
+
"auto_forward": data.get("auto_forward", True)
|
|
1076
|
+
}
|
|
1077
|
+
)
|
|
1078
|
+
|
|
1079
|
+
if result.get("success"):
|
|
1080
|
+
info = result.get("result", {})
|
|
1081
|
+
has_device = info.get("has_real_device", False)
|
|
1082
|
+
await broadcaster.update_android_status(
|
|
1083
|
+
connected=has_device,
|
|
1084
|
+
device_id=info.get("android_device") if has_device else None,
|
|
1085
|
+
connected_devices=info.get("connected_devices", []),
|
|
1086
|
+
connection_type=connection_type if has_device else "proxy_only"
|
|
1087
|
+
)
|
|
1088
|
+
return result
|
|
1089
|
+
|
|
1090
|
+
|
|
1091
|
+
@ws_handler()
|
|
1092
|
+
async def handle_android_relay_connect(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1093
|
+
"""Connect to Android relay server.
|
|
1094
|
+
|
|
1095
|
+
Establishes WebSocket connection to relay server and broadcasts QR code for pairing.
|
|
1096
|
+
Status updates are automatically broadcast via the relay client's broadcaster integration.
|
|
1097
|
+
"""
|
|
1098
|
+
from services.android import get_relay_client
|
|
1099
|
+
|
|
1100
|
+
url = data.get("url", "")
|
|
1101
|
+
api_key = data.get("api_key")
|
|
1102
|
+
|
|
1103
|
+
if not url:
|
|
1104
|
+
return {
|
|
1105
|
+
"success": False,
|
|
1106
|
+
"connected": False,
|
|
1107
|
+
"error": "Relay URL is required"
|
|
1108
|
+
}
|
|
1109
|
+
|
|
1110
|
+
if not api_key:
|
|
1111
|
+
return {
|
|
1112
|
+
"success": False,
|
|
1113
|
+
"connected": False,
|
|
1114
|
+
"error": "API key is required"
|
|
1115
|
+
}
|
|
1116
|
+
|
|
1117
|
+
logger.info(f"[WebSocket] Android relay connect: {url}")
|
|
1118
|
+
|
|
1119
|
+
try:
|
|
1120
|
+
client, error = await get_relay_client(url, api_key)
|
|
1121
|
+
if client:
|
|
1122
|
+
return {
|
|
1123
|
+
"success": True,
|
|
1124
|
+
"connected": True,
|
|
1125
|
+
"session_token": client.session_token,
|
|
1126
|
+
"qr_data": client.qr_data,
|
|
1127
|
+
"message": "Connected to relay server"
|
|
1128
|
+
}
|
|
1129
|
+
else:
|
|
1130
|
+
return {
|
|
1131
|
+
"success": False,
|
|
1132
|
+
"connected": False,
|
|
1133
|
+
"error": error or "Failed to connect to relay server"
|
|
1134
|
+
}
|
|
1135
|
+
except Exception as e:
|
|
1136
|
+
logger.error(f"[WebSocket] Android relay connect error: {e}")
|
|
1137
|
+
return {"success": False, "error": str(e)}
|
|
1138
|
+
|
|
1139
|
+
|
|
1140
|
+
@ws_handler()
|
|
1141
|
+
async def handle_android_relay_disconnect(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1142
|
+
"""Disconnect from Android relay server.
|
|
1143
|
+
|
|
1144
|
+
Closes the relay WebSocket connection and broadcasts disconnected status.
|
|
1145
|
+
"""
|
|
1146
|
+
from services.android import close_relay_client
|
|
1147
|
+
|
|
1148
|
+
logger.info("[WebSocket] Android relay disconnect requested")
|
|
1149
|
+
|
|
1150
|
+
try:
|
|
1151
|
+
await close_relay_client()
|
|
1152
|
+
return {
|
|
1153
|
+
"success": True,
|
|
1154
|
+
"connected": False,
|
|
1155
|
+
"message": "Disconnected from relay server"
|
|
1156
|
+
}
|
|
1157
|
+
except Exception as e:
|
|
1158
|
+
logger.error(f"[WebSocket] Android relay disconnect error: {e}")
|
|
1159
|
+
return {"success": False, "error": str(e)}
|
|
1160
|
+
|
|
1161
|
+
|
|
1162
|
+
@ws_handler()
|
|
1163
|
+
async def handle_android_relay_reconnect(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1164
|
+
"""Reconnect to Android relay server with a new session token.
|
|
1165
|
+
|
|
1166
|
+
Forces disconnect and reconnect to get fresh session_token and QR code.
|
|
1167
|
+
Useful when pairing fails or Android device needs to re-pair.
|
|
1168
|
+
"""
|
|
1169
|
+
from services.android import close_relay_client, get_relay_client
|
|
1170
|
+
|
|
1171
|
+
url = data.get("url", "")
|
|
1172
|
+
api_key = data.get("api_key")
|
|
1173
|
+
|
|
1174
|
+
if not url:
|
|
1175
|
+
return {
|
|
1176
|
+
"success": False,
|
|
1177
|
+
"connected": False,
|
|
1178
|
+
"error": "Relay URL is required"
|
|
1179
|
+
}
|
|
1180
|
+
|
|
1181
|
+
if not api_key:
|
|
1182
|
+
return {
|
|
1183
|
+
"success": False,
|
|
1184
|
+
"connected": False,
|
|
1185
|
+
"error": "API key is required"
|
|
1186
|
+
}
|
|
1187
|
+
|
|
1188
|
+
logger.info("[WebSocket] Android relay reconnect: forcing new session")
|
|
1189
|
+
|
|
1190
|
+
try:
|
|
1191
|
+
# Force disconnect existing connection
|
|
1192
|
+
await close_relay_client()
|
|
1193
|
+
|
|
1194
|
+
# Small delay to ensure clean disconnect
|
|
1195
|
+
await asyncio.sleep(0.5)
|
|
1196
|
+
|
|
1197
|
+
# Reconnect with fresh session
|
|
1198
|
+
client, error = await get_relay_client(url, api_key)
|
|
1199
|
+
if client:
|
|
1200
|
+
return {
|
|
1201
|
+
"success": True,
|
|
1202
|
+
"connected": True,
|
|
1203
|
+
"session_token": client.session_token,
|
|
1204
|
+
"qr_data": client.qr_data,
|
|
1205
|
+
"message": "Reconnected with new session token"
|
|
1206
|
+
}
|
|
1207
|
+
else:
|
|
1208
|
+
return {
|
|
1209
|
+
"success": False,
|
|
1210
|
+
"connected": False,
|
|
1211
|
+
"error": error or "Failed to reconnect to relay server"
|
|
1212
|
+
}
|
|
1213
|
+
except Exception as e:
|
|
1214
|
+
logger.error(f"[WebSocket] Android relay reconnect error: {e}")
|
|
1215
|
+
return {"success": False, "error": str(e)}
|
|
1216
|
+
|
|
1217
|
+
|
|
1218
|
+
# ============================================================================
|
|
1219
|
+
# Maps Handlers
|
|
1220
|
+
# ============================================================================
|
|
1221
|
+
|
|
1222
|
+
async def handle_validate_maps_key(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1223
|
+
"""Validate Google Maps API key and save to database if valid."""
|
|
1224
|
+
import httpx
|
|
1225
|
+
broadcaster = get_status_broadcaster()
|
|
1226
|
+
auth_service = container.auth_service()
|
|
1227
|
+
|
|
1228
|
+
api_key = data.get("api_key", "").strip()
|
|
1229
|
+
session_id = data.get("session_id", "default")
|
|
1230
|
+
|
|
1231
|
+
if not api_key:
|
|
1232
|
+
return {"success": False, "valid": False, "error": "api_key required"}
|
|
1233
|
+
|
|
1234
|
+
try:
|
|
1235
|
+
# Test the API key with a simple geocoding request
|
|
1236
|
+
async with httpx.AsyncClient() as client:
|
|
1237
|
+
response = await client.get(
|
|
1238
|
+
"https://maps.googleapis.com/maps/api/geocode/json",
|
|
1239
|
+
params={
|
|
1240
|
+
"address": "1600 Amphitheatre Parkway, Mountain View, CA",
|
|
1241
|
+
"key": api_key
|
|
1242
|
+
},
|
|
1243
|
+
timeout=10.0
|
|
1244
|
+
)
|
|
1245
|
+
|
|
1246
|
+
response_data = response.json()
|
|
1247
|
+
|
|
1248
|
+
if response_data.get("status") == "OK":
|
|
1249
|
+
# Save the validated key to database
|
|
1250
|
+
await auth_service.store_api_key(
|
|
1251
|
+
provider="google_maps",
|
|
1252
|
+
api_key=api_key,
|
|
1253
|
+
models=[],
|
|
1254
|
+
session_id=session_id
|
|
1255
|
+
)
|
|
1256
|
+
await broadcaster.update_api_key_status(
|
|
1257
|
+
provider="google_maps",
|
|
1258
|
+
valid=True,
|
|
1259
|
+
message="API key validated successfully"
|
|
1260
|
+
)
|
|
1261
|
+
return {"success": True, "valid": True, "message": "Google Maps API key is valid"}
|
|
1262
|
+
|
|
1263
|
+
elif response_data.get("status") == "REQUEST_DENIED":
|
|
1264
|
+
error_msg = response_data.get("error_message", "Invalid API key")
|
|
1265
|
+
await broadcaster.update_api_key_status(
|
|
1266
|
+
provider="google_maps",
|
|
1267
|
+
valid=False,
|
|
1268
|
+
message=error_msg
|
|
1269
|
+
)
|
|
1270
|
+
return {"success": True, "valid": False, "message": error_msg}
|
|
1271
|
+
|
|
1272
|
+
else:
|
|
1273
|
+
# Other statuses like ZERO_RESULTS still mean the key works
|
|
1274
|
+
# Save the validated key to database
|
|
1275
|
+
await auth_service.store_api_key(
|
|
1276
|
+
provider="google_maps",
|
|
1277
|
+
api_key=api_key,
|
|
1278
|
+
models=[],
|
|
1279
|
+
session_id=session_id
|
|
1280
|
+
)
|
|
1281
|
+
await broadcaster.update_api_key_status(
|
|
1282
|
+
provider="google_maps",
|
|
1283
|
+
valid=True,
|
|
1284
|
+
message="API key validated"
|
|
1285
|
+
)
|
|
1286
|
+
return {"success": True, "valid": True, "message": f"API key is valid (status: {response_data.get('status')})"}
|
|
1287
|
+
|
|
1288
|
+
except httpx.TimeoutException:
|
|
1289
|
+
await broadcaster.update_api_key_status(
|
|
1290
|
+
provider="google_maps",
|
|
1291
|
+
valid=False,
|
|
1292
|
+
message="Validation request timed out"
|
|
1293
|
+
)
|
|
1294
|
+
return {"success": False, "valid": False, "error": "Validation request timed out"}
|
|
1295
|
+
|
|
1296
|
+
except Exception as e:
|
|
1297
|
+
logger.error("Maps key validation failed", error=str(e))
|
|
1298
|
+
await broadcaster.update_api_key_status(
|
|
1299
|
+
provider="google_maps",
|
|
1300
|
+
valid=False,
|
|
1301
|
+
message=str(e)
|
|
1302
|
+
)
|
|
1303
|
+
return {"success": False, "valid": False, "error": str(e)}
|
|
1304
|
+
|
|
1305
|
+
|
|
1306
|
+
# ============================================================================
|
|
1307
|
+
# WhatsApp Handlers - Wrappers for routers.whatsapp functions
|
|
1308
|
+
# ============================================================================
|
|
1309
|
+
|
|
1310
|
+
from routers.whatsapp import (
|
|
1311
|
+
handle_whatsapp_status as _wa_status,
|
|
1312
|
+
handle_whatsapp_qr as _wa_qr,
|
|
1313
|
+
handle_whatsapp_send as _wa_send,
|
|
1314
|
+
handle_whatsapp_start as _wa_start,
|
|
1315
|
+
handle_whatsapp_restart as _wa_restart,
|
|
1316
|
+
handle_whatsapp_groups as _wa_groups,
|
|
1317
|
+
handle_whatsapp_group_info as _wa_group_info,
|
|
1318
|
+
handle_whatsapp_chat_history as _wa_chat_history,
|
|
1319
|
+
)
|
|
1320
|
+
|
|
1321
|
+
|
|
1322
|
+
async def handle_whatsapp_status(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1323
|
+
return await _wa_status()
|
|
1324
|
+
|
|
1325
|
+
|
|
1326
|
+
async def handle_whatsapp_qr(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1327
|
+
return await _wa_qr()
|
|
1328
|
+
|
|
1329
|
+
|
|
1330
|
+
async def handle_whatsapp_send(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1331
|
+
"""Forward all send params to WhatsApp handler - supports all message types."""
|
|
1332
|
+
return await _wa_send(data)
|
|
1333
|
+
|
|
1334
|
+
|
|
1335
|
+
async def handle_whatsapp_start(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1336
|
+
return await _wa_start()
|
|
1337
|
+
|
|
1338
|
+
|
|
1339
|
+
async def handle_whatsapp_restart(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1340
|
+
return await _wa_restart()
|
|
1341
|
+
|
|
1342
|
+
|
|
1343
|
+
async def handle_whatsapp_groups(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1344
|
+
return await _wa_groups()
|
|
1345
|
+
|
|
1346
|
+
|
|
1347
|
+
async def handle_whatsapp_group_info(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1348
|
+
"""Get group participants with resolved phone numbers."""
|
|
1349
|
+
group_id = data.get("group_id", "")
|
|
1350
|
+
return await _wa_group_info(group_id)
|
|
1351
|
+
|
|
1352
|
+
|
|
1353
|
+
async def handle_whatsapp_chat_history(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1354
|
+
"""Get chat history from WhatsApp history store."""
|
|
1355
|
+
return await _wa_chat_history(data)
|
|
1356
|
+
|
|
1357
|
+
|
|
1358
|
+
# ============================================================================
|
|
1359
|
+
# Workflow Storage Operations
|
|
1360
|
+
# ============================================================================
|
|
1361
|
+
|
|
1362
|
+
async def handle_save_workflow(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1363
|
+
"""Save workflow to database."""
|
|
1364
|
+
database = container.database()
|
|
1365
|
+
success = await database.save_workflow(
|
|
1366
|
+
workflow_id=data["workflow_id"],
|
|
1367
|
+
name=data["name"],
|
|
1368
|
+
data=data.get("data", {})
|
|
1369
|
+
)
|
|
1370
|
+
return {"success": success, "workflow_id": data["workflow_id"]}
|
|
1371
|
+
|
|
1372
|
+
|
|
1373
|
+
async def handle_get_workflow(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1374
|
+
"""Get workflow by ID."""
|
|
1375
|
+
database = container.database()
|
|
1376
|
+
workflow = await database.get_workflow(data["workflow_id"])
|
|
1377
|
+
if workflow:
|
|
1378
|
+
return {
|
|
1379
|
+
"success": True,
|
|
1380
|
+
"workflow": {
|
|
1381
|
+
"id": workflow.id,
|
|
1382
|
+
"name": workflow.name,
|
|
1383
|
+
"data": workflow.data,
|
|
1384
|
+
"created_at": workflow.created_at.isoformat() if workflow.created_at else None,
|
|
1385
|
+
"updated_at": workflow.updated_at.isoformat() if workflow.updated_at else None
|
|
1386
|
+
}
|
|
1387
|
+
}
|
|
1388
|
+
return {"success": False, "error": "Workflow not found"}
|
|
1389
|
+
|
|
1390
|
+
|
|
1391
|
+
async def handle_get_all_workflows(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1392
|
+
"""Get all workflows."""
|
|
1393
|
+
database = container.database()
|
|
1394
|
+
workflows = await database.get_all_workflows()
|
|
1395
|
+
return {
|
|
1396
|
+
"success": True,
|
|
1397
|
+
"workflows": [
|
|
1398
|
+
{
|
|
1399
|
+
"id": w.id,
|
|
1400
|
+
"name": w.name,
|
|
1401
|
+
"nodeCount": len(w.data.get("nodes", [])) if w.data else 0,
|
|
1402
|
+
"created_at": w.created_at.isoformat() if w.created_at else None,
|
|
1403
|
+
"updated_at": w.updated_at.isoformat() if w.updated_at else None
|
|
1404
|
+
}
|
|
1405
|
+
for w in workflows
|
|
1406
|
+
]
|
|
1407
|
+
}
|
|
1408
|
+
|
|
1409
|
+
|
|
1410
|
+
async def handle_delete_workflow(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1411
|
+
"""Delete workflow."""
|
|
1412
|
+
database = container.database()
|
|
1413
|
+
success = await database.delete_workflow(data["workflow_id"])
|
|
1414
|
+
return {"success": success, "workflow_id": data["workflow_id"]}
|
|
1415
|
+
|
|
1416
|
+
|
|
1417
|
+
# ============================================================================
|
|
1418
|
+
# Chat Message Handler (for chatTrigger nodes)
|
|
1419
|
+
# ============================================================================
|
|
1420
|
+
|
|
1421
|
+
@ws_handler("message")
|
|
1422
|
+
async def handle_send_chat_message(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1423
|
+
"""Handle chat message from console panel - dispatches to chatTrigger nodes.
|
|
1424
|
+
|
|
1425
|
+
This handler receives messages from the frontend chat panel and dispatches
|
|
1426
|
+
them as 'chat_message_received' events to any waiting chatTrigger nodes.
|
|
1427
|
+
Also saves the message to database for persistence across restarts.
|
|
1428
|
+
"""
|
|
1429
|
+
from services import event_waiter
|
|
1430
|
+
|
|
1431
|
+
message = data["message"]
|
|
1432
|
+
role = data.get("role", "user")
|
|
1433
|
+
session_id = data.get("session_id", "default")
|
|
1434
|
+
timestamp = data.get("timestamp") or datetime.now().isoformat()
|
|
1435
|
+
|
|
1436
|
+
# Save to database for persistence
|
|
1437
|
+
database = container.database()
|
|
1438
|
+
await database.add_chat_message(session_id, role, message)
|
|
1439
|
+
|
|
1440
|
+
# Build event data matching chatTrigger output schema
|
|
1441
|
+
event_data = {
|
|
1442
|
+
"message": message,
|
|
1443
|
+
"timestamp": timestamp,
|
|
1444
|
+
"session_id": session_id
|
|
1445
|
+
}
|
|
1446
|
+
|
|
1447
|
+
# Dispatch to chatTrigger waiters
|
|
1448
|
+
resolved = event_waiter.dispatch("chat_message_received", event_data)
|
|
1449
|
+
|
|
1450
|
+
logger.info(f"[ChatMessage] Dispatched message to {resolved} chatTrigger waiter(s)")
|
|
1451
|
+
|
|
1452
|
+
return {
|
|
1453
|
+
"success": True,
|
|
1454
|
+
"message": "Chat message sent",
|
|
1455
|
+
"resolved_count": resolved,
|
|
1456
|
+
"timestamp": timestamp
|
|
1457
|
+
}
|
|
1458
|
+
|
|
1459
|
+
|
|
1460
|
+
@ws_handler()
|
|
1461
|
+
async def handle_get_chat_messages(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1462
|
+
"""Get chat messages from database for a session."""
|
|
1463
|
+
session_id = data.get("session_id", "default")
|
|
1464
|
+
limit = data.get("limit") # Optional limit
|
|
1465
|
+
|
|
1466
|
+
database = container.database()
|
|
1467
|
+
messages = await database.get_chat_messages(session_id, limit)
|
|
1468
|
+
|
|
1469
|
+
return {
|
|
1470
|
+
"success": True,
|
|
1471
|
+
"messages": messages,
|
|
1472
|
+
"session_id": session_id
|
|
1473
|
+
}
|
|
1474
|
+
|
|
1475
|
+
|
|
1476
|
+
@ws_handler()
|
|
1477
|
+
async def handle_clear_chat_messages(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1478
|
+
"""Clear all chat messages for a session."""
|
|
1479
|
+
session_id = data.get("session_id", "default")
|
|
1480
|
+
|
|
1481
|
+
database = container.database()
|
|
1482
|
+
count = await database.clear_chat_messages(session_id)
|
|
1483
|
+
|
|
1484
|
+
return {
|
|
1485
|
+
"success": True,
|
|
1486
|
+
"message": f"Cleared {count} chat messages",
|
|
1487
|
+
"cleared_count": count
|
|
1488
|
+
}
|
|
1489
|
+
|
|
1490
|
+
|
|
1491
|
+
@ws_handler("message", "role")
|
|
1492
|
+
async def handle_save_chat_message(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1493
|
+
"""Save a single chat message (used for assistant responses)."""
|
|
1494
|
+
message = data["message"]
|
|
1495
|
+
role = data["role"]
|
|
1496
|
+
session_id = data.get("session_id", "default")
|
|
1497
|
+
|
|
1498
|
+
database = container.database()
|
|
1499
|
+
success = await database.add_chat_message(session_id, role, message)
|
|
1500
|
+
|
|
1501
|
+
return {
|
|
1502
|
+
"success": success,
|
|
1503
|
+
"message": "Chat message saved" if success else "Failed to save chat message"
|
|
1504
|
+
}
|
|
1505
|
+
|
|
1506
|
+
|
|
1507
|
+
@ws_handler()
|
|
1508
|
+
async def handle_get_chat_sessions(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1509
|
+
"""Get list of all chat sessions."""
|
|
1510
|
+
database = container.database()
|
|
1511
|
+
sessions = await database.get_chat_sessions()
|
|
1512
|
+
|
|
1513
|
+
return {
|
|
1514
|
+
"success": True,
|
|
1515
|
+
"sessions": sessions
|
|
1516
|
+
}
|
|
1517
|
+
|
|
1518
|
+
|
|
1519
|
+
# ============================================================================
|
|
1520
|
+
# Terminal Logs Handlers
|
|
1521
|
+
# ============================================================================
|
|
1522
|
+
|
|
1523
|
+
@ws_handler()
|
|
1524
|
+
async def handle_get_terminal_logs(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1525
|
+
"""Get terminal log history."""
|
|
1526
|
+
broadcaster = get_status_broadcaster()
|
|
1527
|
+
logs = broadcaster.get_terminal_logs()
|
|
1528
|
+
return {"success": True, "logs": logs}
|
|
1529
|
+
|
|
1530
|
+
|
|
1531
|
+
@ws_handler()
|
|
1532
|
+
async def handle_clear_terminal_logs(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1533
|
+
"""Clear terminal log history."""
|
|
1534
|
+
broadcaster = get_status_broadcaster()
|
|
1535
|
+
await broadcaster.clear_terminal_logs()
|
|
1536
|
+
return {"success": True, "message": "Terminal logs cleared"}
|
|
1537
|
+
|
|
1538
|
+
|
|
1539
|
+
# ============================================================================
|
|
1540
|
+
# User Skills Handlers
|
|
1541
|
+
# ============================================================================
|
|
1542
|
+
|
|
1543
|
+
@ws_handler("skill_name")
|
|
1544
|
+
async def handle_get_skill_content(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1545
|
+
"""Get skill content (instructions) by skill name.
|
|
1546
|
+
|
|
1547
|
+
Works for both built-in skills (from SKILL.md files) and user skills (from database).
|
|
1548
|
+
"""
|
|
1549
|
+
from services.skill_loader import get_skill_loader
|
|
1550
|
+
|
|
1551
|
+
skill_name = data["skill_name"]
|
|
1552
|
+
skill_loader = get_skill_loader()
|
|
1553
|
+
|
|
1554
|
+
# Try to load the skill
|
|
1555
|
+
skill = skill_loader.load_skill(skill_name)
|
|
1556
|
+
if skill:
|
|
1557
|
+
return {
|
|
1558
|
+
"success": True,
|
|
1559
|
+
"skill_name": skill_name,
|
|
1560
|
+
"instructions": skill.instructions,
|
|
1561
|
+
"description": skill.metadata.description,
|
|
1562
|
+
"allowed_tools": skill.metadata.allowed_tools,
|
|
1563
|
+
"is_builtin": skill.metadata.path is not None,
|
|
1564
|
+
"timestamp": time.time()
|
|
1565
|
+
}
|
|
1566
|
+
|
|
1567
|
+
# Try loading from database for user skills
|
|
1568
|
+
database = container.database()
|
|
1569
|
+
user_skill = await database.get_user_skill(skill_name)
|
|
1570
|
+
if user_skill:
|
|
1571
|
+
return {
|
|
1572
|
+
"success": True,
|
|
1573
|
+
"skill_name": skill_name,
|
|
1574
|
+
"instructions": user_skill.instructions,
|
|
1575
|
+
"description": user_skill.description,
|
|
1576
|
+
"allowed_tools": user_skill.allowed_tools.split(',') if user_skill.allowed_tools else [],
|
|
1577
|
+
"is_builtin": False,
|
|
1578
|
+
"timestamp": time.time()
|
|
1579
|
+
}
|
|
1580
|
+
|
|
1581
|
+
return {"success": False, "error": f"Skill '{skill_name}' not found"}
|
|
1582
|
+
|
|
1583
|
+
|
|
1584
|
+
@ws_handler("skill_name", "instructions")
|
|
1585
|
+
async def handle_save_skill_content(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1586
|
+
"""Save skill content (instructions) by skill name.
|
|
1587
|
+
|
|
1588
|
+
For built-in skills, writes to the SKILL.md file.
|
|
1589
|
+
For user skills, updates the database.
|
|
1590
|
+
"""
|
|
1591
|
+
import re
|
|
1592
|
+
from pathlib import Path
|
|
1593
|
+
from services.skill_loader import get_skill_loader
|
|
1594
|
+
|
|
1595
|
+
skill_name = data["skill_name"]
|
|
1596
|
+
new_instructions = data["instructions"]
|
|
1597
|
+
skill_loader = get_skill_loader()
|
|
1598
|
+
|
|
1599
|
+
# Check if it's a built-in skill
|
|
1600
|
+
if skill_name in skill_loader._registry:
|
|
1601
|
+
metadata = skill_loader._registry[skill_name]
|
|
1602
|
+
if metadata.path is not None:
|
|
1603
|
+
# It's a built-in skill - update SKILL.md file
|
|
1604
|
+
skill_md_path = metadata.path / "SKILL.md"
|
|
1605
|
+
|
|
1606
|
+
if not skill_md_path.exists():
|
|
1607
|
+
return {"success": False, "error": f"SKILL.md not found for '{skill_name}'"}
|
|
1608
|
+
|
|
1609
|
+
# Read existing file to preserve frontmatter
|
|
1610
|
+
content = skill_md_path.read_text(encoding='utf-8')
|
|
1611
|
+
|
|
1612
|
+
# Parse frontmatter
|
|
1613
|
+
frontmatter_match = re.match(r'^(---\s*\n.*?\n---\s*\n)', content, re.DOTALL)
|
|
1614
|
+
if frontmatter_match:
|
|
1615
|
+
# Keep frontmatter, replace body
|
|
1616
|
+
new_content = frontmatter_match.group(1) + new_instructions
|
|
1617
|
+
else:
|
|
1618
|
+
# No frontmatter, just write instructions
|
|
1619
|
+
new_content = new_instructions
|
|
1620
|
+
|
|
1621
|
+
# Write back to file
|
|
1622
|
+
skill_md_path.write_text(new_content, encoding='utf-8')
|
|
1623
|
+
|
|
1624
|
+
# Clear cache so next load gets fresh content
|
|
1625
|
+
skill_loader.clear_cache()
|
|
1626
|
+
|
|
1627
|
+
logger.info(f"[Skills] Updated built-in skill: {skill_name}")
|
|
1628
|
+
return {
|
|
1629
|
+
"success": True,
|
|
1630
|
+
"skill_name": skill_name,
|
|
1631
|
+
"is_builtin": True,
|
|
1632
|
+
"message": f"Skill '{skill_name}' saved to SKILL.md",
|
|
1633
|
+
"timestamp": time.time()
|
|
1634
|
+
}
|
|
1635
|
+
|
|
1636
|
+
# It's a user skill - update in database
|
|
1637
|
+
database = container.database()
|
|
1638
|
+
user_skill = await database.get_user_skill(skill_name)
|
|
1639
|
+
if user_skill:
|
|
1640
|
+
updated = await database.update_user_skill(
|
|
1641
|
+
name=skill_name,
|
|
1642
|
+
instructions=new_instructions
|
|
1643
|
+
)
|
|
1644
|
+
if updated:
|
|
1645
|
+
logger.info(f"[Skills] Updated user skill: {skill_name}")
|
|
1646
|
+
return {
|
|
1647
|
+
"success": True,
|
|
1648
|
+
"skill_name": skill_name,
|
|
1649
|
+
"is_builtin": False,
|
|
1650
|
+
"message": f"Skill '{skill_name}' saved to database",
|
|
1651
|
+
"timestamp": time.time()
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
return {"success": False, "error": f"Skill '{skill_name}' not found"}
|
|
1655
|
+
|
|
1656
|
+
|
|
1657
|
+
@ws_handler()
|
|
1658
|
+
async def handle_get_user_skills(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1659
|
+
"""Get all user-created skills."""
|
|
1660
|
+
database = container.database()
|
|
1661
|
+
active_only = data.get("active_only", True)
|
|
1662
|
+
skills = await database.get_all_user_skills(active_only=active_only)
|
|
1663
|
+
return {"skills": skills, "count": len(skills), "timestamp": time.time()}
|
|
1664
|
+
|
|
1665
|
+
|
|
1666
|
+
@ws_handler("name")
|
|
1667
|
+
async def handle_get_user_skill(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1668
|
+
"""Get a specific user skill by name."""
|
|
1669
|
+
database = container.database()
|
|
1670
|
+
skill = await database.get_user_skill(data["name"])
|
|
1671
|
+
if skill:
|
|
1672
|
+
return {"skill": skill, "timestamp": time.time()}
|
|
1673
|
+
return {"success": False, "error": f"Skill '{data['name']}' not found"}
|
|
1674
|
+
|
|
1675
|
+
|
|
1676
|
+
@ws_handler("name", "display_name", "description", "instructions")
|
|
1677
|
+
async def handle_create_user_skill(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1678
|
+
"""Create a new user skill."""
|
|
1679
|
+
database = container.database()
|
|
1680
|
+
broadcaster = get_status_broadcaster()
|
|
1681
|
+
|
|
1682
|
+
skill = await database.create_user_skill(
|
|
1683
|
+
name=data["name"],
|
|
1684
|
+
display_name=data["display_name"],
|
|
1685
|
+
description=data["description"],
|
|
1686
|
+
instructions=data["instructions"],
|
|
1687
|
+
allowed_tools=data.get("allowed_tools"),
|
|
1688
|
+
category=data.get("category", "custom"),
|
|
1689
|
+
icon=data.get("icon", "star"),
|
|
1690
|
+
color=data.get("color", "#6366F1"),
|
|
1691
|
+
metadata_json=data.get("metadata"),
|
|
1692
|
+
created_by=data.get("created_by")
|
|
1693
|
+
)
|
|
1694
|
+
|
|
1695
|
+
if skill:
|
|
1696
|
+
# Broadcast skill created to all clients
|
|
1697
|
+
await broadcaster.broadcast({
|
|
1698
|
+
"type": "user_skill_created",
|
|
1699
|
+
"skill": skill,
|
|
1700
|
+
"timestamp": time.time()
|
|
1701
|
+
})
|
|
1702
|
+
return {"skill": skill, "timestamp": time.time()}
|
|
1703
|
+
return {"success": False, "error": f"Failed to create skill. Name '{data['name']}' may already exist."}
|
|
1704
|
+
|
|
1705
|
+
|
|
1706
|
+
@ws_handler("name")
|
|
1707
|
+
async def handle_update_user_skill(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1708
|
+
"""Update an existing user skill."""
|
|
1709
|
+
database = container.database()
|
|
1710
|
+
broadcaster = get_status_broadcaster()
|
|
1711
|
+
|
|
1712
|
+
skill = await database.update_user_skill(
|
|
1713
|
+
name=data["name"],
|
|
1714
|
+
display_name=data.get("display_name"),
|
|
1715
|
+
description=data.get("description"),
|
|
1716
|
+
instructions=data.get("instructions"),
|
|
1717
|
+
allowed_tools=data.get("allowed_tools"),
|
|
1718
|
+
category=data.get("category"),
|
|
1719
|
+
icon=data.get("icon"),
|
|
1720
|
+
color=data.get("color"),
|
|
1721
|
+
metadata_json=data.get("metadata"),
|
|
1722
|
+
is_active=data.get("is_active")
|
|
1723
|
+
)
|
|
1724
|
+
|
|
1725
|
+
if skill:
|
|
1726
|
+
# Broadcast skill updated to all clients
|
|
1727
|
+
await broadcaster.broadcast({
|
|
1728
|
+
"type": "user_skill_updated",
|
|
1729
|
+
"skill": skill,
|
|
1730
|
+
"timestamp": time.time()
|
|
1731
|
+
})
|
|
1732
|
+
return {"skill": skill, "timestamp": time.time()}
|
|
1733
|
+
return {"success": False, "error": f"Skill '{data['name']}' not found"}
|
|
1734
|
+
|
|
1735
|
+
|
|
1736
|
+
@ws_handler("name")
|
|
1737
|
+
async def handle_delete_user_skill(data: Dict[str, Any], websocket: WebSocket) -> Dict[str, Any]:
|
|
1738
|
+
"""Delete a user skill."""
|
|
1739
|
+
database = container.database()
|
|
1740
|
+
broadcaster = get_status_broadcaster()
|
|
1741
|
+
|
|
1742
|
+
deleted = await database.delete_user_skill(data["name"])
|
|
1743
|
+
|
|
1744
|
+
if deleted:
|
|
1745
|
+
# Broadcast skill deleted to all clients
|
|
1746
|
+
await broadcaster.broadcast({
|
|
1747
|
+
"type": "user_skill_deleted",
|
|
1748
|
+
"name": data["name"],
|
|
1749
|
+
"timestamp": time.time()
|
|
1750
|
+
})
|
|
1751
|
+
return {"deleted": True, "name": data["name"], "timestamp": time.time()}
|
|
1752
|
+
return {"success": False, "error": f"Skill '{data['name']}' not found"}
|
|
1753
|
+
|
|
1754
|
+
|
|
1755
|
+
# ============================================================================
|
|
1756
|
+
# Message Router
|
|
1757
|
+
# ============================================================================
|
|
1758
|
+
|
|
1759
|
+
MESSAGE_HANDLERS: Dict[str, MessageHandler] = {
|
|
1760
|
+
# Status/ping
|
|
1761
|
+
"ping": handle_ping,
|
|
1762
|
+
"get_status": handle_get_status,
|
|
1763
|
+
"get_android_status": handle_get_android_status,
|
|
1764
|
+
"get_node_status": handle_get_node_status,
|
|
1765
|
+
"get_variable": handle_get_variable,
|
|
1766
|
+
|
|
1767
|
+
# Node parameters
|
|
1768
|
+
"get_node_parameters": handle_get_node_parameters,
|
|
1769
|
+
"get_all_node_parameters": handle_get_all_node_parameters,
|
|
1770
|
+
"save_node_parameters": handle_save_node_parameters,
|
|
1771
|
+
"delete_node_parameters": handle_delete_node_parameters,
|
|
1772
|
+
|
|
1773
|
+
# Tool schemas (source of truth for tool configurations)
|
|
1774
|
+
"get_tool_schema": handle_get_tool_schema,
|
|
1775
|
+
"save_tool_schema": handle_save_tool_schema,
|
|
1776
|
+
"delete_tool_schema": handle_delete_tool_schema,
|
|
1777
|
+
"get_all_tool_schemas": handle_get_all_tool_schemas,
|
|
1778
|
+
|
|
1779
|
+
# Node execution
|
|
1780
|
+
"execute_node": handle_execute_node,
|
|
1781
|
+
"execute_workflow": handle_execute_workflow,
|
|
1782
|
+
"cancel_execution": handle_cancel_execution,
|
|
1783
|
+
"get_node_output": handle_get_node_output,
|
|
1784
|
+
"clear_node_output": handle_clear_node_output,
|
|
1785
|
+
|
|
1786
|
+
# Trigger/event waiting
|
|
1787
|
+
"cancel_event_wait": handle_cancel_event_wait,
|
|
1788
|
+
"get_active_waiters": handle_get_active_waiters,
|
|
1789
|
+
|
|
1790
|
+
# Dead Letter Queue (DLQ) operations
|
|
1791
|
+
"get_dlq_entries": handle_get_dlq_entries,
|
|
1792
|
+
"get_dlq_entry": handle_get_dlq_entry,
|
|
1793
|
+
"get_dlq_stats": handle_get_dlq_stats,
|
|
1794
|
+
"replay_dlq_entry": handle_replay_dlq_entry,
|
|
1795
|
+
"remove_dlq_entry": handle_remove_dlq_entry,
|
|
1796
|
+
"purge_dlq": handle_purge_dlq,
|
|
1797
|
+
|
|
1798
|
+
# Deployment operations
|
|
1799
|
+
"deploy_workflow": handle_deploy_workflow,
|
|
1800
|
+
"cancel_deployment": handle_cancel_deployment,
|
|
1801
|
+
"get_deployment_status": handle_get_deployment_status,
|
|
1802
|
+
"get_workflow_lock": handle_get_workflow_lock,
|
|
1803
|
+
"update_deployment_settings": handle_update_deployment_settings,
|
|
1804
|
+
|
|
1805
|
+
# AI operations
|
|
1806
|
+
"execute_ai_node": handle_execute_ai_node,
|
|
1807
|
+
"get_ai_models": handle_get_ai_models,
|
|
1808
|
+
|
|
1809
|
+
# API key operations
|
|
1810
|
+
"validate_api_key": handle_validate_api_key,
|
|
1811
|
+
"get_stored_api_key": handle_get_stored_api_key,
|
|
1812
|
+
"save_api_key": handle_save_api_key,
|
|
1813
|
+
"delete_api_key": handle_delete_api_key,
|
|
1814
|
+
|
|
1815
|
+
# Android operations
|
|
1816
|
+
"get_android_devices": handle_get_android_devices,
|
|
1817
|
+
"execute_android_action": handle_execute_android_action,
|
|
1818
|
+
"setup_android_device": handle_setup_android_device,
|
|
1819
|
+
"android_relay_connect": handle_android_relay_connect,
|
|
1820
|
+
"android_relay_disconnect": handle_android_relay_disconnect,
|
|
1821
|
+
"android_relay_reconnect": handle_android_relay_reconnect,
|
|
1822
|
+
|
|
1823
|
+
# Maps operations
|
|
1824
|
+
"validate_maps_key": handle_validate_maps_key,
|
|
1825
|
+
|
|
1826
|
+
# WhatsApp operations
|
|
1827
|
+
"whatsapp_status": handle_whatsapp_status,
|
|
1828
|
+
"whatsapp_qr": handle_whatsapp_qr,
|
|
1829
|
+
"whatsapp_send": handle_whatsapp_send,
|
|
1830
|
+
"whatsapp_start": handle_whatsapp_start,
|
|
1831
|
+
"whatsapp_restart": handle_whatsapp_restart,
|
|
1832
|
+
"whatsapp_groups": handle_whatsapp_groups,
|
|
1833
|
+
"whatsapp_group_info": handle_whatsapp_group_info,
|
|
1834
|
+
"whatsapp_chat_history": handle_whatsapp_chat_history,
|
|
1835
|
+
|
|
1836
|
+
# Workflow storage operations
|
|
1837
|
+
"save_workflow": handle_save_workflow,
|
|
1838
|
+
"get_workflow": handle_get_workflow,
|
|
1839
|
+
"get_all_workflows": handle_get_all_workflows,
|
|
1840
|
+
"delete_workflow": handle_delete_workflow,
|
|
1841
|
+
|
|
1842
|
+
# Chat message (for chatTrigger nodes)
|
|
1843
|
+
"send_chat_message": handle_send_chat_message,
|
|
1844
|
+
"get_chat_messages": handle_get_chat_messages,
|
|
1845
|
+
"clear_chat_messages": handle_clear_chat_messages,
|
|
1846
|
+
"save_chat_message": handle_save_chat_message,
|
|
1847
|
+
|
|
1848
|
+
# Terminal logs
|
|
1849
|
+
"get_terminal_logs": handle_get_terminal_logs,
|
|
1850
|
+
"clear_terminal_logs": handle_clear_terminal_logs,
|
|
1851
|
+
|
|
1852
|
+
# User Skills
|
|
1853
|
+
"get_user_skills": handle_get_user_skills,
|
|
1854
|
+
"get_user_skill": handle_get_user_skill,
|
|
1855
|
+
"create_user_skill": handle_create_user_skill,
|
|
1856
|
+
"update_user_skill": handle_update_user_skill,
|
|
1857
|
+
"delete_user_skill": handle_delete_user_skill,
|
|
1858
|
+
|
|
1859
|
+
# Skill Content (built-in and user skills)
|
|
1860
|
+
"get_skill_content": handle_get_skill_content,
|
|
1861
|
+
"save_skill_content": handle_save_skill_content,
|
|
1862
|
+
}
|
|
1863
|
+
|
|
1864
|
+
|
|
1865
|
+
async def _execute_handler(
|
|
1866
|
+
handler: MessageHandler,
|
|
1867
|
+
data: Dict[str, Any],
|
|
1868
|
+
websocket: WebSocket,
|
|
1869
|
+
msg_type: str,
|
|
1870
|
+
request_id: Optional[str]
|
|
1871
|
+
):
|
|
1872
|
+
"""Execute handler and send response using safe send."""
|
|
1873
|
+
try:
|
|
1874
|
+
result = await handler(data, websocket)
|
|
1875
|
+
|
|
1876
|
+
if request_id:
|
|
1877
|
+
await _safe_send(websocket, {
|
|
1878
|
+
"type": f"{msg_type}_result",
|
|
1879
|
+
"request_id": request_id,
|
|
1880
|
+
**result
|
|
1881
|
+
})
|
|
1882
|
+
else:
|
|
1883
|
+
await _safe_send(websocket, result)
|
|
1884
|
+
|
|
1885
|
+
except asyncio.CancelledError:
|
|
1886
|
+
# Task was cancelled (e.g., WebSocket disconnected)
|
|
1887
|
+
logger.debug(f"[WebSocket] Handler cancelled: {msg_type}")
|
|
1888
|
+
raise
|
|
1889
|
+
except Exception as e:
|
|
1890
|
+
logger.error("Handler error", msg_type=msg_type, error=str(e))
|
|
1891
|
+
if request_id:
|
|
1892
|
+
await _safe_send(websocket, {
|
|
1893
|
+
"type": f"{msg_type}_result",
|
|
1894
|
+
"request_id": request_id,
|
|
1895
|
+
"success": False,
|
|
1896
|
+
"error": str(e)
|
|
1897
|
+
})
|
|
1898
|
+
|
|
1899
|
+
|
|
1900
|
+
@router.websocket("/ws/status")
|
|
1901
|
+
async def websocket_status_endpoint(websocket: WebSocket):
|
|
1902
|
+
"""WebSocket endpoint for real-time bidirectional communication.
|
|
1903
|
+
|
|
1904
|
+
Uses decoupled receive/process pattern with asyncio.Queue:
|
|
1905
|
+
- Receive task: continuously receives messages into queue (never blocks)
|
|
1906
|
+
- Process task: reads from queue and spawns handler tasks (can be long-running)
|
|
1907
|
+
|
|
1908
|
+
This ensures cancel messages are always processed immediately, even when
|
|
1909
|
+
long-running handlers (like trigger node execution) are active.
|
|
1910
|
+
|
|
1911
|
+
All client requests include a request_id for correlation.
|
|
1912
|
+
The server responds with the same request_id for request/response matching.
|
|
1913
|
+
Broadcasts (without request_id) are sent to all connected clients.
|
|
1914
|
+
"""
|
|
1915
|
+
# Authenticate via cookie before accepting connection
|
|
1916
|
+
settings = container.settings()
|
|
1917
|
+
|
|
1918
|
+
# Check if auth is disabled (VITE_AUTH_ENABLED=false)
|
|
1919
|
+
auth_disabled = settings.vite_auth_enabled and settings.vite_auth_enabled.lower() == 'false'
|
|
1920
|
+
|
|
1921
|
+
if not auth_disabled:
|
|
1922
|
+
# Auth enabled - verify token
|
|
1923
|
+
token = websocket.cookies.get(settings.jwt_cookie_name)
|
|
1924
|
+
|
|
1925
|
+
if not token:
|
|
1926
|
+
await websocket.close(code=4001, reason="Not authenticated")
|
|
1927
|
+
return
|
|
1928
|
+
|
|
1929
|
+
user_auth = container.user_auth_service()
|
|
1930
|
+
payload = user_auth.verify_token(token)
|
|
1931
|
+
|
|
1932
|
+
if not payload:
|
|
1933
|
+
await websocket.close(code=4001, reason="Invalid or expired session")
|
|
1934
|
+
return
|
|
1935
|
+
|
|
1936
|
+
broadcaster = get_status_broadcaster()
|
|
1937
|
+
await broadcaster.connect(websocket)
|
|
1938
|
+
|
|
1939
|
+
# Message queue for decoupling receive from processing
|
|
1940
|
+
message_queue: asyncio.Queue = asyncio.Queue()
|
|
1941
|
+
|
|
1942
|
+
# Track handler tasks for this WebSocket
|
|
1943
|
+
handler_tasks: Set[asyncio.Task] = set()
|
|
1944
|
+
_handler_tasks[websocket] = handler_tasks
|
|
1945
|
+
|
|
1946
|
+
async def receive_loop():
|
|
1947
|
+
"""Receives messages and puts them in queue - never blocks on handlers."""
|
|
1948
|
+
try:
|
|
1949
|
+
while True:
|
|
1950
|
+
data = await websocket.receive_json()
|
|
1951
|
+
await message_queue.put(data)
|
|
1952
|
+
except WebSocketDisconnect:
|
|
1953
|
+
# Don't log here - logging during shutdown can raise KeyboardInterrupt
|
|
1954
|
+
await message_queue.put(None) # Signal shutdown
|
|
1955
|
+
except asyncio.CancelledError:
|
|
1956
|
+
# Task cancelled during shutdown - this is expected
|
|
1957
|
+
await message_queue.put(None)
|
|
1958
|
+
raise
|
|
1959
|
+
except Exception as e:
|
|
1960
|
+
# Only log if it's not a shutdown-related error
|
|
1961
|
+
if not isinstance(e, (KeyboardInterrupt, SystemExit)):
|
|
1962
|
+
logger.error(f"[WebSocket] Receive error: {e}")
|
|
1963
|
+
await message_queue.put(None)
|
|
1964
|
+
|
|
1965
|
+
async def process_loop():
|
|
1966
|
+
"""Processes messages from queue - spawns handler tasks that can run concurrently."""
|
|
1967
|
+
while True:
|
|
1968
|
+
data = await message_queue.get()
|
|
1969
|
+
|
|
1970
|
+
if data is None: # Shutdown signal
|
|
1971
|
+
break
|
|
1972
|
+
|
|
1973
|
+
msg_type = data.get("type", "")
|
|
1974
|
+
request_id = data.get("request_id")
|
|
1975
|
+
|
|
1976
|
+
logger.debug("WebSocket message received", msg_type=msg_type, has_request_id=bool(request_id))
|
|
1977
|
+
|
|
1978
|
+
handler = MESSAGE_HANDLERS.get(msg_type)
|
|
1979
|
+
|
|
1980
|
+
if handler:
|
|
1981
|
+
# Run handler as task so it doesn't block queue processing
|
|
1982
|
+
# This allows cancel_event_wait to be processed while execute_node is waiting
|
|
1983
|
+
task = asyncio.create_task(
|
|
1984
|
+
_execute_handler(handler, data, websocket, msg_type, request_id)
|
|
1985
|
+
)
|
|
1986
|
+
handler_tasks.add(task)
|
|
1987
|
+
task.add_done_callback(handler_tasks.discard)
|
|
1988
|
+
else:
|
|
1989
|
+
logger.warning("Unknown message type", msg_type=msg_type)
|
|
1990
|
+
if request_id:
|
|
1991
|
+
await _safe_send(websocket, {
|
|
1992
|
+
"type": "error",
|
|
1993
|
+
"request_id": request_id,
|
|
1994
|
+
"code": "UNKNOWN_MESSAGE_TYPE",
|
|
1995
|
+
"message": f"Unknown message type: {msg_type}"
|
|
1996
|
+
})
|
|
1997
|
+
|
|
1998
|
+
try:
|
|
1999
|
+
# Run receive and process loops concurrently using TaskGroup (Python 3.11+)
|
|
2000
|
+
async with asyncio.TaskGroup() as tg:
|
|
2001
|
+
tg.create_task(receive_loop())
|
|
2002
|
+
tg.create_task(process_loop())
|
|
2003
|
+
|
|
2004
|
+
except* WebSocketDisconnect:
|
|
2005
|
+
pass # Normal disconnect - don't log during shutdown
|
|
2006
|
+
except* asyncio.CancelledError:
|
|
2007
|
+
pass # Task cancelled during shutdown - expected
|
|
2008
|
+
except* (KeyboardInterrupt, SystemExit):
|
|
2009
|
+
pass # Server shutdown - don't log
|
|
2010
|
+
except* Exception as eg:
|
|
2011
|
+
for exc in eg.exceptions:
|
|
2012
|
+
if not isinstance(exc, (WebSocketDisconnect, asyncio.CancelledError, KeyboardInterrupt, SystemExit)):
|
|
2013
|
+
logger.error(f"[WebSocket] TaskGroup error: {exc}")
|
|
2014
|
+
finally:
|
|
2015
|
+
# Cancel any running handler tasks on disconnect
|
|
2016
|
+
for task in list(handler_tasks):
|
|
2017
|
+
if not task.done():
|
|
2018
|
+
task.cancel()
|
|
2019
|
+
|
|
2020
|
+
# Wait for tasks to finish cancellation
|
|
2021
|
+
if handler_tasks:
|
|
2022
|
+
await asyncio.gather(*handler_tasks, return_exceptions=True)
|
|
2023
|
+
|
|
2024
|
+
# Cleanup
|
|
2025
|
+
_handler_tasks.pop(websocket, None)
|
|
2026
|
+
await broadcaster.disconnect(websocket)
|
|
2027
|
+
|
|
2028
|
+
|
|
2029
|
+
@router.websocket("/ws/internal")
|
|
2030
|
+
async def websocket_internal_endpoint(websocket: WebSocket):
|
|
2031
|
+
"""Internal WebSocket endpoint for Temporal workers.
|
|
2032
|
+
|
|
2033
|
+
This endpoint bypasses authentication and is intended for internal
|
|
2034
|
+
service-to-service communication (e.g., Temporal activity -> MachinaOs).
|
|
2035
|
+
|
|
2036
|
+
Security: Should only be exposed on localhost/internal network.
|
|
2037
|
+
"""
|
|
2038
|
+
broadcaster = get_status_broadcaster()
|
|
2039
|
+
await websocket.accept()
|
|
2040
|
+
|
|
2041
|
+
logger.info("[WebSocket Internal] Temporal worker connected")
|
|
2042
|
+
|
|
2043
|
+
# Message queue for decoupling receive from processing
|
|
2044
|
+
message_queue: asyncio.Queue = asyncio.Queue()
|
|
2045
|
+
|
|
2046
|
+
# Track handler tasks for this WebSocket
|
|
2047
|
+
handler_tasks: Set[asyncio.Task] = set()
|
|
2048
|
+
|
|
2049
|
+
async def receive_loop():
|
|
2050
|
+
"""Receives messages and puts them in queue."""
|
|
2051
|
+
try:
|
|
2052
|
+
while True:
|
|
2053
|
+
data = await websocket.receive_json()
|
|
2054
|
+
await message_queue.put(data)
|
|
2055
|
+
except WebSocketDisconnect:
|
|
2056
|
+
await message_queue.put(None)
|
|
2057
|
+
except asyncio.CancelledError:
|
|
2058
|
+
await message_queue.put(None)
|
|
2059
|
+
raise
|
|
2060
|
+
except Exception as e:
|
|
2061
|
+
if not isinstance(e, (KeyboardInterrupt, SystemExit)):
|
|
2062
|
+
logger.error(f"[WebSocket Internal] Receive error: {e}")
|
|
2063
|
+
await message_queue.put(None)
|
|
2064
|
+
|
|
2065
|
+
async def process_loop():
|
|
2066
|
+
"""Processes messages from queue."""
|
|
2067
|
+
while True:
|
|
2068
|
+
data = await message_queue.get()
|
|
2069
|
+
|
|
2070
|
+
if data is None:
|
|
2071
|
+
break
|
|
2072
|
+
|
|
2073
|
+
msg_type = data.get("type", "")
|
|
2074
|
+
request_id = data.get("request_id")
|
|
2075
|
+
|
|
2076
|
+
handler = MESSAGE_HANDLERS.get(msg_type)
|
|
2077
|
+
|
|
2078
|
+
if handler:
|
|
2079
|
+
task = asyncio.create_task(
|
|
2080
|
+
_execute_handler(handler, data, websocket, msg_type, request_id)
|
|
2081
|
+
)
|
|
2082
|
+
handler_tasks.add(task)
|
|
2083
|
+
task.add_done_callback(handler_tasks.discard)
|
|
2084
|
+
else:
|
|
2085
|
+
logger.warning(f"[WebSocket Internal] Unknown message type: {msg_type}")
|
|
2086
|
+
if request_id:
|
|
2087
|
+
await _safe_send(websocket, {
|
|
2088
|
+
"type": "error",
|
|
2089
|
+
"request_id": request_id,
|
|
2090
|
+
"code": "UNKNOWN_MESSAGE_TYPE",
|
|
2091
|
+
"message": f"Unknown message type: {msg_type}"
|
|
2092
|
+
})
|
|
2093
|
+
|
|
2094
|
+
try:
|
|
2095
|
+
async with asyncio.TaskGroup() as tg:
|
|
2096
|
+
tg.create_task(receive_loop())
|
|
2097
|
+
tg.create_task(process_loop())
|
|
2098
|
+
|
|
2099
|
+
except* WebSocketDisconnect:
|
|
2100
|
+
pass # Normal disconnect
|
|
2101
|
+
except* asyncio.CancelledError:
|
|
2102
|
+
pass # Task cancelled during shutdown
|
|
2103
|
+
except* (KeyboardInterrupt, SystemExit):
|
|
2104
|
+
pass # Server shutdown
|
|
2105
|
+
except* Exception as eg:
|
|
2106
|
+
for exc in eg.exceptions:
|
|
2107
|
+
if not isinstance(exc, (WebSocketDisconnect, asyncio.CancelledError, KeyboardInterrupt, SystemExit)):
|
|
2108
|
+
logger.error(f"[WebSocket Internal] TaskGroup error: {exc}")
|
|
2109
|
+
finally:
|
|
2110
|
+
for task in list(handler_tasks):
|
|
2111
|
+
if not task.done():
|
|
2112
|
+
task.cancel()
|
|
2113
|
+
|
|
2114
|
+
if handler_tasks:
|
|
2115
|
+
await asyncio.gather(*handler_tasks, return_exceptions=True)
|
|
2116
|
+
|
|
2117
|
+
|
|
2118
|
+
@router.get("/ws/info")
|
|
2119
|
+
async def websocket_info():
|
|
2120
|
+
"""Get WebSocket connection info."""
|
|
2121
|
+
broadcaster = get_status_broadcaster()
|
|
2122
|
+
return {
|
|
2123
|
+
"endpoint": "/ws/status",
|
|
2124
|
+
"connected_clients": broadcaster.connection_count,
|
|
2125
|
+
"current_status": broadcaster.get_status(),
|
|
2126
|
+
"supported_message_types": list(MESSAGE_HANDLERS.keys())
|
|
2127
|
+
}
|