machinaos 0.0.1 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.template +71 -71
- package/LICENSE +21 -21
- package/README.md +145 -87
- package/bin/cli.js +62 -106
- package/client/.dockerignore +45 -45
- package/client/Dockerfile +68 -68
- package/client/dist/assets/index-DFSC53FP.css +1 -0
- package/client/dist/assets/index-fJ-1gTf5.js +613 -0
- package/client/dist/index.html +14 -0
- package/client/eslint.config.js +34 -16
- package/client/nginx.conf +66 -66
- package/client/package.json +61 -48
- package/client/src/App.tsx +27 -27
- package/client/src/Dashboard.tsx +1200 -1172
- package/client/src/ParameterPanel.tsx +302 -300
- package/client/src/components/AIAgentNode.tsx +315 -321
- package/client/src/components/APIKeyValidator.tsx +117 -117
- package/client/src/components/ClaudeChatModelNode.tsx +17 -17
- package/client/src/components/CredentialsModal.tsx +1200 -306
- package/client/src/components/GeminiChatModelNode.tsx +17 -17
- package/client/src/components/GenericNode.tsx +356 -356
- package/client/src/components/LocationParameterPanel.tsx +153 -153
- package/client/src/components/ModelNode.tsx +285 -285
- package/client/src/components/OpenAIChatModelNode.tsx +17 -17
- package/client/src/components/OutputPanel.tsx +470 -470
- package/client/src/components/ParameterRenderer.tsx +1873 -1873
- package/client/src/components/SkillEditorModal.tsx +3 -3
- package/client/src/components/SquareNode.tsx +812 -796
- package/client/src/components/ToolkitNode.tsx +365 -365
- package/client/src/components/auth/LoginPage.tsx +247 -247
- package/client/src/components/auth/ProtectedRoute.tsx +59 -59
- package/client/src/components/base/BaseChatModelNode.tsx +270 -270
- package/client/src/components/icons/AIProviderIcons.tsx +50 -50
- package/client/src/components/maps/GoogleMapsPicker.tsx +136 -136
- package/client/src/components/maps/MapsPreviewPanel.tsx +109 -109
- package/client/src/components/maps/index.ts +25 -25
- package/client/src/components/parameterPanel/InputSection.tsx +1094 -1094
- package/client/src/components/parameterPanel/LocationPanelLayout.tsx +64 -64
- package/client/src/components/parameterPanel/MapsSection.tsx +91 -91
- package/client/src/components/parameterPanel/MiddleSection.tsx +867 -571
- package/client/src/components/parameterPanel/OutputSection.tsx +80 -80
- package/client/src/components/parameterPanel/ParameterPanelLayout.tsx +81 -81
- package/client/src/components/parameterPanel/ToolSchemaEditor.tsx +436 -436
- package/client/src/components/parameterPanel/index.ts +41 -41
- package/client/src/components/shared/DataPanel.tsx +142 -142
- package/client/src/components/shared/JSONTreeRenderer.tsx +105 -105
- package/client/src/components/ui/AIResultModal.tsx +203 -203
- package/client/src/components/ui/ApiKeyInput.tsx +93 -0
- package/client/src/components/ui/CodeEditor.tsx +81 -81
- package/client/src/components/ui/CollapsibleSection.tsx +87 -87
- package/client/src/components/ui/ComponentItem.tsx +153 -153
- package/client/src/components/ui/ComponentPalette.tsx +320 -320
- package/client/src/components/ui/ConsolePanel.tsx +151 -43
- package/client/src/components/ui/ErrorBoundary.tsx +195 -195
- package/client/src/components/ui/InputNodesPanel.tsx +203 -203
- package/client/src/components/ui/MapSelector.tsx +313 -313
- package/client/src/components/ui/Modal.tsx +151 -148
- package/client/src/components/ui/NodeOutputPanel.tsx +1150 -1150
- package/client/src/components/ui/OutputDisplayPanel.tsx +381 -381
- package/client/src/components/ui/QRCodeDisplay.tsx +182 -0
- package/client/src/components/ui/TopToolbar.tsx +736 -736
- package/client/src/components/ui/WorkflowSidebar.tsx +293 -293
- package/client/src/config/antdTheme.ts +186 -186
- package/client/src/contexts/AuthContext.tsx +221 -221
- package/client/src/contexts/ThemeContext.tsx +42 -42
- package/client/src/contexts/WebSocketContext.tsx +2144 -1971
- package/client/src/factories/baseChatModelFactory.ts +255 -255
- package/client/src/hooks/useAndroidOperations.ts +118 -164
- package/client/src/hooks/useApiKeyValidation.ts +106 -106
- package/client/src/hooks/useApiKeys.ts +238 -238
- package/client/src/hooks/useAppTheme.ts +17 -17
- package/client/src/hooks/useComponentPalette.ts +50 -50
- package/client/src/hooks/useDragAndDrop.ts +123 -123
- package/client/src/hooks/useDragVariable.ts +88 -88
- package/client/src/hooks/useExecution.ts +319 -313
- package/client/src/hooks/useParameterPanel.ts +176 -176
- package/client/src/hooks/useReactFlowNodes.ts +188 -188
- package/client/src/hooks/useToolSchema.ts +209 -209
- package/client/src/hooks/useWhatsApp.ts +196 -196
- package/client/src/hooks/useWorkflowManagement.ts +45 -45
- package/client/src/index.css +314 -314
- package/client/src/nodeDefinitions/aiAgentNodes.ts +335 -335
- package/client/src/nodeDefinitions/aiModelNodes.ts +340 -340
- package/client/src/nodeDefinitions/androidServiceNodes.ts +383 -383
- package/client/src/nodeDefinitions/chatNodes.ts +135 -135
- package/client/src/nodeDefinitions/codeNodes.ts +54 -54
- package/client/src/nodeDefinitions/index.ts +14 -14
- package/client/src/nodeDefinitions/locationNodes.ts +462 -462
- package/client/src/nodeDefinitions/schedulerNodes.ts +220 -220
- package/client/src/nodeDefinitions/skillNodes.ts +17 -5
- package/client/src/nodeDefinitions/utilityNodes.ts +284 -284
- package/client/src/nodeDefinitions/whatsappNodes.ts +821 -865
- package/client/src/nodeDefinitions.ts +101 -103
- package/client/src/services/dynamicParameterService.ts +95 -95
- package/client/src/services/execution/aiAgentExecutionService.ts +34 -34
- package/client/src/services/executionService.ts +227 -231
- package/client/src/services/workflowApi.ts +91 -91
- package/client/src/store/useAppStore.ts +578 -581
- package/client/src/styles/theme.ts +513 -508
- package/client/src/styles/zIndex.ts +16 -16
- package/client/src/types/ComponentTypes.ts +38 -38
- package/client/src/types/INodeProperties.ts +287 -287
- package/client/src/types/NodeTypes.ts +27 -27
- package/client/src/utils/formatters.ts +32 -32
- package/client/src/utils/googleMapsLoader.ts +139 -139
- package/client/src/utils/locationUtils.ts +84 -84
- package/client/src/utils/nodeUtils.ts +30 -30
- package/client/src/utils/workflow.ts +29 -29
- package/client/src/vite-env.d.ts +12 -12
- package/client/tailwind.config.js +59 -59
- package/client/tsconfig.json +25 -25
- package/client/vite.config.js +35 -35
- package/package.json +78 -70
- package/scripts/build.js +153 -45
- package/scripts/clean.js +40 -40
- package/scripts/start.js +234 -210
- package/scripts/stop.js +301 -325
- package/server/.dockerignore +44 -44
- package/server/Dockerfile +45 -45
- package/server/constants.py +244 -249
- package/server/core/cache.py +460 -460
- package/server/core/config.py +127 -127
- package/server/core/container.py +98 -98
- package/server/core/database.py +1296 -1210
- package/server/core/logging.py +313 -313
- package/server/main.py +288 -288
- package/server/middleware/__init__.py +5 -5
- package/server/middleware/auth.py +89 -89
- package/server/models/auth.py +52 -52
- package/server/models/cache.py +24 -24
- package/server/models/database.py +235 -210
- package/server/models/nodes.py +435 -455
- package/server/pyproject.toml +75 -72
- package/server/requirements.txt +83 -83
- package/server/routers/android.py +294 -294
- package/server/routers/auth.py +203 -203
- package/server/routers/database.py +150 -150
- package/server/routers/maps.py +141 -141
- package/server/routers/nodejs_compat.py +288 -288
- package/server/routers/webhook.py +90 -90
- package/server/routers/websocket.py +2239 -2127
- package/server/routers/whatsapp.py +761 -761
- package/server/routers/workflow.py +199 -199
- package/server/services/ai.py +2444 -2414
- package/server/services/android_service.py +588 -588
- package/server/services/auth.py +130 -130
- package/server/services/chat_client.py +160 -160
- package/server/services/deployment/manager.py +706 -706
- package/server/services/event_waiter.py +675 -785
- package/server/services/execution/executor.py +1351 -1351
- package/server/services/execution/models.py +1 -1
- package/server/services/handlers/__init__.py +122 -126
- package/server/services/handlers/ai.py +390 -355
- package/server/services/handlers/android.py +69 -260
- package/server/services/handlers/code.py +278 -278
- package/server/services/handlers/http.py +193 -193
- package/server/services/handlers/tools.py +146 -32
- package/server/services/handlers/triggers.py +107 -107
- package/server/services/handlers/utility.py +822 -822
- package/server/services/handlers/whatsapp.py +423 -476
- package/server/services/maps.py +288 -288
- package/server/services/memory_store.py +103 -103
- package/server/services/node_executor.py +372 -375
- package/server/services/scheduler.py +155 -155
- package/server/services/skill_loader.py +1 -1
- package/server/services/status_broadcaster.py +834 -826
- package/server/services/temporal/__init__.py +23 -23
- package/server/services/temporal/activities.py +344 -344
- package/server/services/temporal/client.py +76 -76
- package/server/services/temporal/executor.py +147 -147
- package/server/services/temporal/worker.py +251 -251
- package/server/services/temporal/workflow.py +355 -355
- package/server/services/temporal/ws_client.py +236 -236
- package/server/services/text.py +110 -110
- package/server/services/user_auth.py +172 -172
- package/server/services/websocket_client.py +29 -29
- package/server/services/workflow.py +597 -597
- package/server/skills/android-skill/SKILL.md +4 -4
- package/server/skills/code-skill/SKILL.md +123 -89
- package/server/skills/maps-skill/SKILL.md +3 -3
- package/server/skills/memory-skill/SKILL.md +1 -1
- package/server/skills/web-search-skill/SKILL.md +154 -0
- package/server/skills/whatsapp-skill/SKILL.md +3 -3
- package/server/uv.lock +461 -100
- package/server/whatsapp-rpc/.dockerignore +30 -30
- package/server/whatsapp-rpc/Dockerfile +44 -44
- package/server/whatsapp-rpc/Dockerfile.web +17 -17
- package/server/whatsapp-rpc/README.md +139 -139
- package/server/whatsapp-rpc/bin/whatsapp-rpc-server +0 -0
- package/server/whatsapp-rpc/cli.js +95 -95
- package/server/whatsapp-rpc/configs/config.yaml +6 -6
- package/server/whatsapp-rpc/docker-compose.yml +35 -35
- package/server/whatsapp-rpc/docs/API.md +410 -410
- package/server/whatsapp-rpc/node_modules/.package-lock.json +259 -0
- package/server/whatsapp-rpc/node_modules/chalk/license +9 -0
- package/server/whatsapp-rpc/node_modules/chalk/package.json +83 -0
- package/server/whatsapp-rpc/node_modules/chalk/readme.md +297 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/index.d.ts +325 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/index.js +225 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/utilities.js +33 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/vendor/ansi-styles/index.d.ts +236 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/vendor/ansi-styles/index.js +223 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/vendor/supports-color/browser.d.ts +1 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/vendor/supports-color/browser.js +34 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/vendor/supports-color/index.d.ts +55 -0
- package/server/whatsapp-rpc/node_modules/chalk/source/vendor/supports-color/index.js +190 -0
- package/server/whatsapp-rpc/node_modules/commander/LICENSE +22 -0
- package/server/whatsapp-rpc/node_modules/commander/Readme.md +1148 -0
- package/server/whatsapp-rpc/node_modules/commander/esm.mjs +16 -0
- package/server/whatsapp-rpc/node_modules/commander/index.js +26 -0
- package/server/whatsapp-rpc/node_modules/commander/lib/argument.js +145 -0
- package/server/whatsapp-rpc/node_modules/commander/lib/command.js +2179 -0
- package/server/whatsapp-rpc/node_modules/commander/lib/error.js +43 -0
- package/server/whatsapp-rpc/node_modules/commander/lib/help.js +462 -0
- package/server/whatsapp-rpc/node_modules/commander/lib/option.js +329 -0
- package/server/whatsapp-rpc/node_modules/commander/lib/suggestSimilar.js +100 -0
- package/server/whatsapp-rpc/node_modules/commander/package-support.json +16 -0
- package/server/whatsapp-rpc/node_modules/commander/package.json +80 -0
- package/server/whatsapp-rpc/node_modules/commander/typings/esm.d.mts +3 -0
- package/server/whatsapp-rpc/node_modules/commander/typings/index.d.ts +884 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/LICENSE +21 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/README.md +89 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/index.js +39 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/lib/enoent.js +59 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/lib/parse.js +91 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/lib/util/escape.js +47 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/lib/util/readShebang.js +23 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/lib/util/resolveCommand.js +52 -0
- package/server/whatsapp-rpc/node_modules/cross-spawn/package.json +73 -0
- package/server/whatsapp-rpc/node_modules/execa/index.d.ts +955 -0
- package/server/whatsapp-rpc/node_modules/execa/index.js +309 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/command.js +119 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/error.js +87 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/kill.js +102 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/pipe.js +42 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/promise.js +36 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/stdio.js +49 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/stream.js +133 -0
- package/server/whatsapp-rpc/node_modules/execa/lib/verbose.js +19 -0
- package/server/whatsapp-rpc/node_modules/execa/license +9 -0
- package/server/whatsapp-rpc/node_modules/execa/package.json +90 -0
- package/server/whatsapp-rpc/node_modules/execa/readme.md +822 -0
- package/server/whatsapp-rpc/node_modules/get-stream/license +9 -0
- package/server/whatsapp-rpc/node_modules/get-stream/package.json +53 -0
- package/server/whatsapp-rpc/node_modules/get-stream/readme.md +291 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/array-buffer.js +84 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/array.js +32 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/buffer.js +20 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/contents.js +101 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/index.d.ts +119 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/index.js +5 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/string.js +36 -0
- package/server/whatsapp-rpc/node_modules/get-stream/source/utils.js +11 -0
- package/server/whatsapp-rpc/node_modules/get-them-args/LICENSE +21 -0
- package/server/whatsapp-rpc/node_modules/get-them-args/README.md +95 -0
- package/server/whatsapp-rpc/node_modules/get-them-args/index.js +97 -0
- package/server/whatsapp-rpc/node_modules/get-them-args/package.json +36 -0
- package/server/whatsapp-rpc/node_modules/human-signals/LICENSE +201 -0
- package/server/whatsapp-rpc/node_modules/human-signals/README.md +168 -0
- package/server/whatsapp-rpc/node_modules/human-signals/build/src/core.js +273 -0
- package/server/whatsapp-rpc/node_modules/human-signals/build/src/main.d.ts +73 -0
- package/server/whatsapp-rpc/node_modules/human-signals/build/src/main.js +70 -0
- package/server/whatsapp-rpc/node_modules/human-signals/build/src/realtime.js +16 -0
- package/server/whatsapp-rpc/node_modules/human-signals/build/src/signals.js +34 -0
- package/server/whatsapp-rpc/node_modules/human-signals/package.json +61 -0
- package/server/whatsapp-rpc/node_modules/is-stream/index.d.ts +81 -0
- package/server/whatsapp-rpc/node_modules/is-stream/index.js +29 -0
- package/server/whatsapp-rpc/node_modules/is-stream/license +9 -0
- package/server/whatsapp-rpc/node_modules/is-stream/package.json +44 -0
- package/server/whatsapp-rpc/node_modules/is-stream/readme.md +60 -0
- package/server/whatsapp-rpc/node_modules/isexe/LICENSE +15 -0
- package/server/whatsapp-rpc/node_modules/isexe/README.md +51 -0
- package/server/whatsapp-rpc/node_modules/isexe/index.js +57 -0
- package/server/whatsapp-rpc/node_modules/isexe/mode.js +41 -0
- package/server/whatsapp-rpc/node_modules/isexe/package.json +31 -0
- package/server/whatsapp-rpc/node_modules/isexe/test/basic.js +221 -0
- package/server/whatsapp-rpc/node_modules/isexe/windows.js +42 -0
- package/server/whatsapp-rpc/node_modules/kill-port/.editorconfig +12 -0
- package/server/whatsapp-rpc/node_modules/kill-port/.gitattributes +1 -0
- package/server/whatsapp-rpc/node_modules/kill-port/LICENSE +21 -0
- package/server/whatsapp-rpc/node_modules/kill-port/README.md +140 -0
- package/server/whatsapp-rpc/node_modules/kill-port/cli.js +25 -0
- package/server/whatsapp-rpc/node_modules/kill-port/example.js +21 -0
- package/server/whatsapp-rpc/node_modules/kill-port/index.js +46 -0
- package/server/whatsapp-rpc/node_modules/kill-port/logo.png +0 -0
- package/server/whatsapp-rpc/node_modules/kill-port/package.json +41 -0
- package/server/whatsapp-rpc/node_modules/kill-port/pnpm-lock.yaml +4606 -0
- package/server/whatsapp-rpc/node_modules/kill-port/test.js +16 -0
- package/server/whatsapp-rpc/node_modules/merge-stream/LICENSE +21 -0
- package/server/whatsapp-rpc/node_modules/merge-stream/README.md +78 -0
- package/server/whatsapp-rpc/node_modules/merge-stream/index.js +41 -0
- package/server/whatsapp-rpc/node_modules/merge-stream/package.json +19 -0
- package/server/whatsapp-rpc/node_modules/mimic-fn/index.d.ts +52 -0
- package/server/whatsapp-rpc/node_modules/mimic-fn/index.js +71 -0
- package/server/whatsapp-rpc/node_modules/mimic-fn/license +9 -0
- package/server/whatsapp-rpc/node_modules/mimic-fn/package.json +45 -0
- package/server/whatsapp-rpc/node_modules/mimic-fn/readme.md +90 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/index.d.ts +90 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/index.js +52 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/license +9 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/node_modules/path-key/index.d.ts +31 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/node_modules/path-key/index.js +12 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/node_modules/path-key/license +9 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/node_modules/path-key/package.json +41 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/node_modules/path-key/readme.md +57 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/package.json +49 -0
- package/server/whatsapp-rpc/node_modules/npm-run-path/readme.md +104 -0
- package/server/whatsapp-rpc/node_modules/onetime/index.d.ts +59 -0
- package/server/whatsapp-rpc/node_modules/onetime/index.js +41 -0
- package/server/whatsapp-rpc/node_modules/onetime/license +9 -0
- package/server/whatsapp-rpc/node_modules/onetime/package.json +45 -0
- package/server/whatsapp-rpc/node_modules/onetime/readme.md +94 -0
- package/server/whatsapp-rpc/node_modules/path-key/index.d.ts +40 -0
- package/server/whatsapp-rpc/node_modules/path-key/index.js +16 -0
- package/server/whatsapp-rpc/node_modules/path-key/license +9 -0
- package/server/whatsapp-rpc/node_modules/path-key/package.json +39 -0
- package/server/whatsapp-rpc/node_modules/path-key/readme.md +61 -0
- package/server/whatsapp-rpc/node_modules/shebang-command/index.js +19 -0
- package/server/whatsapp-rpc/node_modules/shebang-command/license +9 -0
- package/server/whatsapp-rpc/node_modules/shebang-command/package.json +34 -0
- package/server/whatsapp-rpc/node_modules/shebang-command/readme.md +34 -0
- package/server/whatsapp-rpc/node_modules/shebang-regex/index.d.ts +22 -0
- package/server/whatsapp-rpc/node_modules/shebang-regex/index.js +2 -0
- package/server/whatsapp-rpc/node_modules/shebang-regex/license +9 -0
- package/server/whatsapp-rpc/node_modules/shebang-regex/package.json +35 -0
- package/server/whatsapp-rpc/node_modules/shebang-regex/readme.md +33 -0
- package/server/whatsapp-rpc/node_modules/shell-exec/LICENSE +21 -0
- package/server/whatsapp-rpc/node_modules/shell-exec/README.md +60 -0
- package/server/whatsapp-rpc/node_modules/shell-exec/index.js +47 -0
- package/server/whatsapp-rpc/node_modules/shell-exec/package.json +29 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/LICENSE.txt +16 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/README.md +74 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/browser.d.ts +12 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/browser.d.ts.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/browser.js +10 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/browser.js.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/index.d.ts +48 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/index.d.ts.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/index.js +279 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/index.js.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/package.json +3 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/signals.d.ts +29 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/signals.d.ts.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/signals.js +42 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/cjs/signals.js.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/browser.d.ts +12 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/browser.d.ts.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/browser.js +4 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/browser.js.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/index.d.ts +48 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/index.d.ts.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/index.js +275 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/index.js.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/package.json +3 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/signals.d.ts +29 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/signals.d.ts.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/signals.js +39 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/dist/mjs/signals.js.map +1 -0
- package/server/whatsapp-rpc/node_modules/signal-exit/package.json +106 -0
- package/server/whatsapp-rpc/node_modules/strip-final-newline/index.js +14 -0
- package/server/whatsapp-rpc/node_modules/strip-final-newline/license +9 -0
- package/server/whatsapp-rpc/node_modules/strip-final-newline/package.json +43 -0
- package/server/whatsapp-rpc/node_modules/strip-final-newline/readme.md +35 -0
- package/server/whatsapp-rpc/node_modules/which/CHANGELOG.md +166 -0
- package/server/whatsapp-rpc/node_modules/which/LICENSE +15 -0
- package/server/whatsapp-rpc/node_modules/which/README.md +54 -0
- package/server/whatsapp-rpc/node_modules/which/bin/node-which +52 -0
- package/server/whatsapp-rpc/node_modules/which/package.json +43 -0
- package/server/whatsapp-rpc/node_modules/which/which.js +125 -0
- package/server/whatsapp-rpc/package-lock.json +272 -0
- package/server/whatsapp-rpc/package.json +30 -30
- package/server/whatsapp-rpc/schema.json +1294 -1294
- package/server/whatsapp-rpc/scripts/clean.cjs +66 -66
- package/server/whatsapp-rpc/scripts/cli.js +162 -162
- package/server/whatsapp-rpc/src/go/whatsapp/history.go +166 -166
- package/server/whatsapp-rpc/src/python/pyproject.toml +15 -15
- package/server/whatsapp-rpc/src/python/whatsapp_rpc/__init__.py +4 -4
- package/server/whatsapp-rpc/src/python/whatsapp_rpc/client.py +427 -427
- package/server/whatsapp-rpc/web/app.py +609 -609
- package/server/whatsapp-rpc/web/requirements.txt +6 -6
- package/server/whatsapp-rpc/web/rpc_client.py +427 -427
- package/server/whatsapp-rpc/web/static/openapi.yaml +59 -59
- package/server/whatsapp-rpc/web/templates/base.html +149 -149
- package/server/whatsapp-rpc/web/templates/contacts.html +240 -240
- package/server/whatsapp-rpc/web/templates/dashboard.html +319 -319
- package/server/whatsapp-rpc/web/templates/groups.html +328 -328
- package/server/whatsapp-rpc/web/templates/messages.html +465 -465
- package/server/whatsapp-rpc/web/templates/messaging.html +680 -680
- package/server/whatsapp-rpc/web/templates/send.html +258 -258
- package/server/whatsapp-rpc/web/templates/settings.html +459 -459
- package/client/src/components/ui/AndroidSettingsPanel.tsx +0 -401
- package/client/src/components/ui/WhatsAppSettingsPanel.tsx +0 -345
- package/client/src/nodeDefinitions/androidDeviceNodes.ts +0 -140
- package/docker-compose.prod.yml +0 -107
- package/docker-compose.yml +0 -104
- package/docs-MachinaOs/README.md +0 -85
- package/docs-MachinaOs/deployment/docker.mdx +0 -228
- package/docs-MachinaOs/deployment/production.mdx +0 -345
- package/docs-MachinaOs/docs.json +0 -75
- package/docs-MachinaOs/faq.mdx +0 -309
- package/docs-MachinaOs/favicon.svg +0 -5
- package/docs-MachinaOs/installation.mdx +0 -160
- package/docs-MachinaOs/introduction.mdx +0 -114
- package/docs-MachinaOs/logo/dark.svg +0 -6
- package/docs-MachinaOs/logo/light.svg +0 -6
- package/docs-MachinaOs/nodes/ai-agent.mdx +0 -216
- package/docs-MachinaOs/nodes/ai-models.mdx +0 -240
- package/docs-MachinaOs/nodes/android.mdx +0 -411
- package/docs-MachinaOs/nodes/overview.mdx +0 -181
- package/docs-MachinaOs/nodes/schedulers.mdx +0 -316
- package/docs-MachinaOs/nodes/webhooks.mdx +0 -330
- package/docs-MachinaOs/nodes/whatsapp.mdx +0 -305
- package/docs-MachinaOs/quickstart.mdx +0 -119
- package/docs-MachinaOs/tutorials/ai-agent-workflow.mdx +0 -177
- package/docs-MachinaOs/tutorials/android-automation.mdx +0 -242
- package/docs-MachinaOs/tutorials/first-workflow.mdx +0 -134
- package/docs-MachinaOs/tutorials/whatsapp-automation.mdx +0 -185
- package/nul +0 -0
- package/scripts/check-ports.ps1 +0 -33
- package/scripts/kill-port.ps1 +0 -154
|
@@ -1,706 +1,706 @@
|
|
|
1
|
-
"""Deployment Manager - Event-driven workflow deployment lifecycle.
|
|
2
|
-
|
|
3
|
-
Implements n8n/Conductor pattern where:
|
|
4
|
-
- Workflow is a template stored in memory
|
|
5
|
-
- Trigger events spawn independent execution runs
|
|
6
|
-
- Runs execute concurrently (up to max_concurrent_runs)
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import asyncio
|
|
10
|
-
import json
|
|
11
|
-
import time
|
|
12
|
-
from datetime import datetime
|
|
13
|
-
from typing import Dict, Any, List, Optional, Callable, TYPE_CHECKING
|
|
14
|
-
|
|
15
|
-
from core.logging import get_logger
|
|
16
|
-
from constants import WORKFLOW_TRIGGER_TYPES
|
|
17
|
-
from services import event_waiter
|
|
18
|
-
from .state import DeploymentState, TriggerInfo
|
|
19
|
-
from .triggers import TriggerManager
|
|
20
|
-
|
|
21
|
-
if TYPE_CHECKING:
|
|
22
|
-
from core.database import Database
|
|
23
|
-
|
|
24
|
-
logger = get_logger(__name__)
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class DeploymentManager:
|
|
28
|
-
"""Manages event-driven workflow deployment.
|
|
29
|
-
|
|
30
|
-
Supports per-workflow deployments following n8n pattern:
|
|
31
|
-
- Each workflow can be deployed independently
|
|
32
|
-
- Multiple workflows can run concurrently
|
|
33
|
-
- Each deployment has its own state, triggers, and runs
|
|
34
|
-
"""
|
|
35
|
-
|
|
36
|
-
def __init__(
|
|
37
|
-
self,
|
|
38
|
-
database: "Database",
|
|
39
|
-
execute_workflow_fn: Callable,
|
|
40
|
-
store_output_fn: Callable,
|
|
41
|
-
broadcaster: Any,
|
|
42
|
-
):
|
|
43
|
-
self.database = database
|
|
44
|
-
self._execute_workflow = execute_workflow_fn
|
|
45
|
-
self._store_output = store_output_fn
|
|
46
|
-
self._broadcaster = broadcaster
|
|
47
|
-
|
|
48
|
-
# Per-workflow deployment state (n8n pattern)
|
|
49
|
-
self._deployments: Dict[str, DeploymentState] = {}
|
|
50
|
-
self._trigger_managers: Dict[str, TriggerManager] = {}
|
|
51
|
-
self._active_runs: Dict[str, Dict[str, asyncio.Task]] = {} # workflow_id -> {run_id: task}
|
|
52
|
-
self._run_counters: Dict[str, int] = {}
|
|
53
|
-
self._status_callbacks: Dict[str, Callable] = {}
|
|
54
|
-
self._cron_iterations: Dict[str, int] = {} # node_id -> iteration count
|
|
55
|
-
self._main_loop: Optional[asyncio.AbstractEventLoop] = None
|
|
56
|
-
|
|
57
|
-
self._settings = {
|
|
58
|
-
"stop_on_error": False,
|
|
59
|
-
"max_concurrent_runs": 100,
|
|
60
|
-
"use_parallel_executor": True
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
@property
|
|
64
|
-
def is_running(self) -> bool:
|
|
65
|
-
"""Check if ANY deployment is running (backward compatibility)."""
|
|
66
|
-
return any(state.is_running for state in self._deployments.values())
|
|
67
|
-
|
|
68
|
-
def is_workflow_deployed(self, workflow_id: str) -> bool:
|
|
69
|
-
"""Check if a specific workflow is deployed."""
|
|
70
|
-
state = self._deployments.get(workflow_id)
|
|
71
|
-
return state is not None and state.is_running
|
|
72
|
-
|
|
73
|
-
def get_deployed_workflows(self) -> List[str]:
|
|
74
|
-
"""Get list of deployed workflow IDs."""
|
|
75
|
-
return [wid for wid, state in self._deployments.items() if state.is_running]
|
|
76
|
-
|
|
77
|
-
# =========================================================================
|
|
78
|
-
# DEPLOYMENT LIFECYCLE
|
|
79
|
-
# =========================================================================
|
|
80
|
-
|
|
81
|
-
async def deploy(
|
|
82
|
-
self,
|
|
83
|
-
nodes: List[Dict],
|
|
84
|
-
edges: List[Dict],
|
|
85
|
-
session_id: str = "default",
|
|
86
|
-
status_callback: Optional[Callable] = None,
|
|
87
|
-
workflow_id: Optional[str] = None,
|
|
88
|
-
) -> Dict[str, Any]:
|
|
89
|
-
"""Deploy workflow in event-driven mode.
|
|
90
|
-
|
|
91
|
-
Args:
|
|
92
|
-
nodes: Workflow nodes
|
|
93
|
-
edges: Workflow edges
|
|
94
|
-
session_id: Session identifier
|
|
95
|
-
status_callback: Status update callback
|
|
96
|
-
workflow_id: Workflow ID for per-workflow deployment tracking
|
|
97
|
-
"""
|
|
98
|
-
# Generate workflow_id if not provided
|
|
99
|
-
if not workflow_id:
|
|
100
|
-
workflow_id = f"workflow_{int(time.time() * 1000)}"
|
|
101
|
-
|
|
102
|
-
# Check if THIS workflow is already deployed
|
|
103
|
-
if self.is_workflow_deployed(workflow_id):
|
|
104
|
-
return {
|
|
105
|
-
"success": False,
|
|
106
|
-
"error": f"Workflow {workflow_id} is already deployed",
|
|
107
|
-
"workflow_id": workflow_id,
|
|
108
|
-
"deployment_id": self._deployments[workflow_id].deployment_id
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
# Setup
|
|
112
|
-
deployment_id = f"deploy_{workflow_id}_{int(time.time() * 1000)}"
|
|
113
|
-
self._status_callbacks[workflow_id] = status_callback
|
|
114
|
-
self._run_counters[workflow_id] = 0
|
|
115
|
-
self._active_runs[workflow_id] = {}
|
|
116
|
-
|
|
117
|
-
try:
|
|
118
|
-
self._main_loop = asyncio.get_running_loop()
|
|
119
|
-
except RuntimeError:
|
|
120
|
-
self._main_loop = asyncio.get_event_loop()
|
|
121
|
-
|
|
122
|
-
# Create trigger manager for this workflow
|
|
123
|
-
trigger_manager = TriggerManager()
|
|
124
|
-
trigger_manager.set_main_loop(self._main_loop)
|
|
125
|
-
trigger_manager.set_running(True)
|
|
126
|
-
self._trigger_managers[workflow_id] = trigger_manager
|
|
127
|
-
|
|
128
|
-
# Load settings
|
|
129
|
-
await self._load_settings()
|
|
130
|
-
|
|
131
|
-
# Create state for this workflow
|
|
132
|
-
self._deployments[workflow_id] = DeploymentState(
|
|
133
|
-
deployment_id=deployment_id,
|
|
134
|
-
workflow_id=workflow_id,
|
|
135
|
-
is_running=True,
|
|
136
|
-
nodes=nodes,
|
|
137
|
-
edges=edges,
|
|
138
|
-
session_id=session_id,
|
|
139
|
-
settings=self._settings.copy()
|
|
140
|
-
)
|
|
141
|
-
|
|
142
|
-
logger.info("Deployment starting", deployment_id=deployment_id, workflow_id=workflow_id, nodes=len(nodes))
|
|
143
|
-
|
|
144
|
-
triggers_setup = []
|
|
145
|
-
|
|
146
|
-
try:
|
|
147
|
-
# Setup cron triggers
|
|
148
|
-
for cron_node in TriggerManager.find_cron_nodes(nodes):
|
|
149
|
-
info = await self._setup_cron_trigger(cron_node, workflow_id)
|
|
150
|
-
triggers_setup.append(info.to_dict())
|
|
151
|
-
|
|
152
|
-
# Find start and event triggers
|
|
153
|
-
start_nodes, event_triggers = TriggerManager.find_trigger_nodes(nodes, edges)
|
|
154
|
-
|
|
155
|
-
# Fire start nodes immediately
|
|
156
|
-
for node in start_nodes:
|
|
157
|
-
info = await self._fire_start_trigger(node, workflow_id)
|
|
158
|
-
triggers_setup.append(info.to_dict())
|
|
159
|
-
|
|
160
|
-
# Setup event triggers
|
|
161
|
-
for node in event_triggers:
|
|
162
|
-
info = await self._setup_event_trigger(node, workflow_id)
|
|
163
|
-
triggers_setup.append(info.to_dict())
|
|
164
|
-
|
|
165
|
-
# Notify started
|
|
166
|
-
await self._notify("started", {
|
|
167
|
-
"deployment_id": deployment_id,
|
|
168
|
-
"workflow_id": workflow_id,
|
|
169
|
-
"triggers": triggers_setup
|
|
170
|
-
}, workflow_id)
|
|
171
|
-
|
|
172
|
-
return {
|
|
173
|
-
"success": True,
|
|
174
|
-
"deployment_id": deployment_id,
|
|
175
|
-
"workflow_id": workflow_id,
|
|
176
|
-
"message": "Workflow deployed",
|
|
177
|
-
"triggers_setup": triggers_setup
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
except Exception as e:
|
|
181
|
-
logger.error("Deployment failed", workflow_id=workflow_id, error=str(e))
|
|
182
|
-
await self.cancel(workflow_id)
|
|
183
|
-
return {"success": False, "error": str(e), "workflow_id": workflow_id}
|
|
184
|
-
|
|
185
|
-
async def cancel(self, workflow_id: Optional[str] = None) -> Dict[str, Any]:
|
|
186
|
-
"""Cancel deployment for a specific workflow.
|
|
187
|
-
|
|
188
|
-
Args:
|
|
189
|
-
workflow_id: Workflow to cancel. If None, cancels the first running deployment.
|
|
190
|
-
"""
|
|
191
|
-
# Find workflow to cancel
|
|
192
|
-
if workflow_id:
|
|
193
|
-
if not self.is_workflow_deployed(workflow_id):
|
|
194
|
-
return {"success": False, "error": f"Workflow {workflow_id} is not deployed"}
|
|
195
|
-
else:
|
|
196
|
-
# Backward compatibility: cancel first running deployment
|
|
197
|
-
deployed = self.get_deployed_workflows()
|
|
198
|
-
if not deployed:
|
|
199
|
-
return {"success": False, "error": "No deployment running"}
|
|
200
|
-
workflow_id = deployed[0]
|
|
201
|
-
|
|
202
|
-
state = self._deployments.get(workflow_id)
|
|
203
|
-
if not state:
|
|
204
|
-
return {"success": False, "error": f"Deployment state not found for {workflow_id}"}
|
|
205
|
-
|
|
206
|
-
deployment_id = state.deployment_id
|
|
207
|
-
logger.info("Cancelling deployment", deployment_id=deployment_id, workflow_id=workflow_id)
|
|
208
|
-
|
|
209
|
-
# Get trigger manager for this workflow
|
|
210
|
-
trigger_manager = self._trigger_managers.get(workflow_id)
|
|
211
|
-
if trigger_manager:
|
|
212
|
-
trigger_manager.set_running(False)
|
|
213
|
-
|
|
214
|
-
# Cancel active runs for this workflow
|
|
215
|
-
workflow_runs = self._active_runs.get(workflow_id, {})
|
|
216
|
-
listener_nodes = trigger_manager.get_listener_node_ids() if trigger_manager else []
|
|
217
|
-
|
|
218
|
-
for task in workflow_runs.values():
|
|
219
|
-
if not task.done():
|
|
220
|
-
task.cancel()
|
|
221
|
-
|
|
222
|
-
if workflow_runs:
|
|
223
|
-
await asyncio.gather(*workflow_runs.values(), return_exceptions=True)
|
|
224
|
-
run_count = len(workflow_runs)
|
|
225
|
-
|
|
226
|
-
# Cleanup triggers for this workflow
|
|
227
|
-
listener_count = 0
|
|
228
|
-
cron_count = 0
|
|
229
|
-
cron_node_ids = []
|
|
230
|
-
if trigger_manager:
|
|
231
|
-
# Get cron node IDs before teardown (they'll be cleared)
|
|
232
|
-
cron_node_ids = trigger_manager.get_cron_node_ids()
|
|
233
|
-
listener_count = await trigger_manager.teardown_all_listeners()
|
|
234
|
-
cron_count = trigger_manager.teardown_all_crons()
|
|
235
|
-
|
|
236
|
-
# Reset cron trigger node statuses to idle
|
|
237
|
-
for node_id in cron_node_ids:
|
|
238
|
-
await self._broadcaster.update_node_status(node_id, "idle", {}, workflow_id=workflow_id)
|
|
239
|
-
|
|
240
|
-
# Reset listener node statuses to idle
|
|
241
|
-
for node_id in listener_nodes:
|
|
242
|
-
await self._broadcaster.update_node_status(node_id, "idle", {}, workflow_id=workflow_id)
|
|
243
|
-
|
|
244
|
-
# Cancel event waiters for nodes in this workflow
|
|
245
|
-
waiter_count = 0
|
|
246
|
-
for node in state.nodes:
|
|
247
|
-
waiter_count += event_waiter.cancel_for_node(node['id'])
|
|
248
|
-
|
|
249
|
-
# Clear cron iteration counters for this workflow's cron nodes
|
|
250
|
-
for node_id in cron_node_ids:
|
|
251
|
-
self._cron_iterations.pop(node_id, None)
|
|
252
|
-
|
|
253
|
-
# Clear state for this workflow
|
|
254
|
-
self._deployments.pop(workflow_id, None)
|
|
255
|
-
self._trigger_managers.pop(workflow_id, None)
|
|
256
|
-
self._active_runs.pop(workflow_id, None)
|
|
257
|
-
self._run_counters.pop(workflow_id, None)
|
|
258
|
-
self._status_callbacks.pop(workflow_id, None)
|
|
259
|
-
|
|
260
|
-
return {
|
|
261
|
-
"success": True,
|
|
262
|
-
"deployment_id": deployment_id,
|
|
263
|
-
"workflow_id": workflow_id,
|
|
264
|
-
"runs_cancelled": run_count,
|
|
265
|
-
"listeners_cancelled": listener_count,
|
|
266
|
-
"crons_cancelled": cron_count,
|
|
267
|
-
"waiters_cancelled": waiter_count,
|
|
268
|
-
"cancelled_listener_node_ids": listener_nodes
|
|
269
|
-
}
|
|
270
|
-
|
|
271
|
-
def get_status(self, workflow_id: Optional[str] = None) -> Dict[str, Any]:
|
|
272
|
-
"""Get deployment status.
|
|
273
|
-
|
|
274
|
-
Args:
|
|
275
|
-
workflow_id: Get status for specific workflow. If None, returns global status.
|
|
276
|
-
"""
|
|
277
|
-
if workflow_id:
|
|
278
|
-
# Status for specific workflow
|
|
279
|
-
state = self._deployments.get(workflow_id)
|
|
280
|
-
if not state or not state.is_running:
|
|
281
|
-
return {"deployed": False, "deployment_id": None, "active_runs": 0, "workflow_id": workflow_id}
|
|
282
|
-
|
|
283
|
-
workflow_runs = self._active_runs.get(workflow_id, {})
|
|
284
|
-
execution_runs = [k for k in workflow_runs if k.startswith("run_")]
|
|
285
|
-
return {
|
|
286
|
-
"deployed": True,
|
|
287
|
-
"deployment_id": state.deployment_id,
|
|
288
|
-
"workflow_id": workflow_id,
|
|
289
|
-
"active_runs": len(execution_runs),
|
|
290
|
-
"active_listeners": len(workflow_runs) - len(execution_runs),
|
|
291
|
-
"run_counter": self._run_counters.get(workflow_id, 0),
|
|
292
|
-
"deployed_at": state.deployed_at
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
# Global status (backward compatibility)
|
|
296
|
-
if not self.is_running:
|
|
297
|
-
return {"deployed": False, "deployment_id": None, "active_runs": 0}
|
|
298
|
-
|
|
299
|
-
# Aggregate across all workflows
|
|
300
|
-
total_runs = 0
|
|
301
|
-
total_listeners = 0
|
|
302
|
-
total_run_counter = 0
|
|
303
|
-
deployed_workflows = []
|
|
304
|
-
|
|
305
|
-
for wid, state in self._deployments.items():
|
|
306
|
-
if state.is_running:
|
|
307
|
-
deployed_workflows.append(wid)
|
|
308
|
-
workflow_runs = self._active_runs.get(wid, {})
|
|
309
|
-
execution_runs = [k for k in workflow_runs if k.startswith("run_")]
|
|
310
|
-
total_runs += len(execution_runs)
|
|
311
|
-
total_listeners += len(workflow_runs) - len(execution_runs)
|
|
312
|
-
total_run_counter += self._run_counters.get(wid, 0)
|
|
313
|
-
|
|
314
|
-
return {
|
|
315
|
-
"deployed": True,
|
|
316
|
-
"deployed_workflows": deployed_workflows,
|
|
317
|
-
"active_runs": total_runs,
|
|
318
|
-
"active_listeners": total_listeners,
|
|
319
|
-
"run_counter": total_run_counter
|
|
320
|
-
}
|
|
321
|
-
|
|
322
|
-
# =========================================================================
|
|
323
|
-
# TRIGGER SETUP
|
|
324
|
-
# =========================================================================
|
|
325
|
-
|
|
326
|
-
async def _setup_cron_trigger(self, node: Dict, workflow_id: str) -> TriggerInfo:
|
|
327
|
-
"""Setup cron trigger for a node."""
|
|
328
|
-
node_id = node['id']
|
|
329
|
-
params = await self.database.get_node_parameters(node_id) or {}
|
|
330
|
-
|
|
331
|
-
cron_expr = TriggerManager.build_cron_expression(params)
|
|
332
|
-
timezone = params.get('timezone', 'UTC')
|
|
333
|
-
frequency = params.get('frequency', 'minutes')
|
|
334
|
-
|
|
335
|
-
# Initialize iteration counter for this cron node
|
|
336
|
-
self._cron_iterations[node_id] = 0
|
|
337
|
-
|
|
338
|
-
# Build schedule description for output
|
|
339
|
-
schedule_desc = self._get_schedule_description(params)
|
|
340
|
-
|
|
341
|
-
def on_tick():
|
|
342
|
-
if self._main_loop and self._main_loop.is_running():
|
|
343
|
-
# Increment iteration counter
|
|
344
|
-
self._cron_iterations[node_id] = self._cron_iterations.get(node_id, 0) + 1
|
|
345
|
-
iteration = self._cron_iterations[node_id]
|
|
346
|
-
|
|
347
|
-
trigger_data = {
|
|
348
|
-
'node_id': node_id,
|
|
349
|
-
'timestamp': datetime.now().isoformat(),
|
|
350
|
-
'trigger_type': 'cron',
|
|
351
|
-
'event_data': {
|
|
352
|
-
'timestamp': datetime.now().isoformat(),
|
|
353
|
-
'iteration': iteration,
|
|
354
|
-
'frequency': frequency,
|
|
355
|
-
'timezone': timezone,
|
|
356
|
-
'schedule': schedule_desc,
|
|
357
|
-
'cron_expression': cron_expr
|
|
358
|
-
}
|
|
359
|
-
}
|
|
360
|
-
asyncio.run_coroutine_threadsafe(
|
|
361
|
-
self._spawn_run(node_id, trigger_data, workflow_id=workflow_id),
|
|
362
|
-
self._main_loop
|
|
363
|
-
)
|
|
364
|
-
|
|
365
|
-
trigger_manager = self._trigger_managers.get(workflow_id)
|
|
366
|
-
if not trigger_manager:
|
|
367
|
-
raise RuntimeError(f"No trigger manager for workflow {workflow_id}")
|
|
368
|
-
|
|
369
|
-
job_id = trigger_manager.setup_cron(node_id, cron_expr, timezone, on_tick)
|
|
370
|
-
|
|
371
|
-
# Broadcast waiting status for cron trigger (like event triggers do)
|
|
372
|
-
await self._broadcaster.update_node_status(node_id, "waiting", {
|
|
373
|
-
"message": f"Waiting for schedule: {cron_expr}",
|
|
374
|
-
"cron_expression": cron_expr,
|
|
375
|
-
"timezone": timezone,
|
|
376
|
-
"job_id": job_id
|
|
377
|
-
}, workflow_id=workflow_id)
|
|
378
|
-
|
|
379
|
-
return TriggerInfo(node_id, "cron", job_id=job_id)
|
|
380
|
-
|
|
381
|
-
async def _fire_start_trigger(self, node: Dict, workflow_id: str) -> TriggerInfo:
|
|
382
|
-
"""Fire a start trigger immediately."""
|
|
383
|
-
node_id = node['id']
|
|
384
|
-
params = await self.database.get_node_parameters(node_id) or {}
|
|
385
|
-
|
|
386
|
-
initial_data_str = params.get('initialData', '{}')
|
|
387
|
-
try:
|
|
388
|
-
initial_data = json.loads(initial_data_str) if initial_data_str else {}
|
|
389
|
-
except json.JSONDecodeError:
|
|
390
|
-
initial_data = {}
|
|
391
|
-
|
|
392
|
-
trigger_data = {
|
|
393
|
-
'node_id': node_id,
|
|
394
|
-
'timestamp': datetime.now().isoformat(),
|
|
395
|
-
'trigger_type': 'start',
|
|
396
|
-
'event_data': initial_data
|
|
397
|
-
}
|
|
398
|
-
|
|
399
|
-
await self._spawn_run(node_id, trigger_data, workflow_id=workflow_id)
|
|
400
|
-
return TriggerInfo(node_id, "start", fired=True)
|
|
401
|
-
|
|
402
|
-
async def _setup_event_trigger(self, node: Dict, workflow_id: str) -> TriggerInfo:
|
|
403
|
-
"""Setup event-based trigger."""
|
|
404
|
-
node_id = node['id']
|
|
405
|
-
node_type = node.get('type', '')
|
|
406
|
-
params = await self.database.get_node_parameters(node_id) or {}
|
|
407
|
-
|
|
408
|
-
async def on_event(event_data: Dict):
|
|
409
|
-
trigger_data = {
|
|
410
|
-
'node_id': node_id,
|
|
411
|
-
'timestamp': datetime.now().isoformat(),
|
|
412
|
-
'trigger_type': node_type,
|
|
413
|
-
'event_data': event_data
|
|
414
|
-
}
|
|
415
|
-
await self._spawn_run(node_id, trigger_data, wait=True, workflow_id=workflow_id)
|
|
416
|
-
|
|
417
|
-
trigger_manager = self._trigger_managers.get(workflow_id)
|
|
418
|
-
if not trigger_manager:
|
|
419
|
-
raise RuntimeError(f"No trigger manager for workflow {workflow_id}")
|
|
420
|
-
|
|
421
|
-
await trigger_manager.setup_event_trigger(
|
|
422
|
-
node_id, node_type, params, on_event, self._broadcaster,
|
|
423
|
-
workflow_id=workflow_id
|
|
424
|
-
)
|
|
425
|
-
return TriggerInfo(node_id, node_type)
|
|
426
|
-
|
|
427
|
-
# =========================================================================
|
|
428
|
-
# EXECUTION RUNS
|
|
429
|
-
# =========================================================================
|
|
430
|
-
|
|
431
|
-
async def _spawn_run(
|
|
432
|
-
self,
|
|
433
|
-
trigger_node_id: str,
|
|
434
|
-
trigger_data: Dict[str, Any],
|
|
435
|
-
wait: bool = False,
|
|
436
|
-
workflow_id: Optional[str] = None
|
|
437
|
-
) -> Optional[asyncio.Task]:
|
|
438
|
-
"""Spawn a new execution run for a specific workflow."""
|
|
439
|
-
if not workflow_id:
|
|
440
|
-
# Backward compatibility: find workflow for this trigger node
|
|
441
|
-
for wid, state in self._deployments.items():
|
|
442
|
-
if state.is_running and any(n['id'] == trigger_node_id for n in state.nodes):
|
|
443
|
-
workflow_id = wid
|
|
444
|
-
break
|
|
445
|
-
|
|
446
|
-
if not workflow_id or not self.is_workflow_deployed(workflow_id):
|
|
447
|
-
return None
|
|
448
|
-
|
|
449
|
-
state = self._deployments[workflow_id]
|
|
450
|
-
|
|
451
|
-
# Check concurrent limit for this workflow
|
|
452
|
-
workflow_runs = self._active_runs.get(workflow_id, {})
|
|
453
|
-
active_count = sum(1 for k in workflow_runs if k.startswith("run_"))
|
|
454
|
-
max_concurrent = self._settings.get("max_concurrent_runs", 100)
|
|
455
|
-
if active_count >= max_concurrent:
|
|
456
|
-
logger.warning("Max concurrent runs reached", workflow_id=workflow_id, active=active_count)
|
|
457
|
-
return None
|
|
458
|
-
|
|
459
|
-
# Generate run ID
|
|
460
|
-
self._run_counters[workflow_id] = self._run_counters.get(workflow_id, 0) + 1
|
|
461
|
-
run_id = f"run_{state.deployment_id}_{self._run_counters[workflow_id]}"
|
|
462
|
-
|
|
463
|
-
await self._notify("run_started", {
|
|
464
|
-
"run_id": run_id,
|
|
465
|
-
"workflow_id": workflow_id,
|
|
466
|
-
"trigger_node_id": trigger_node_id,
|
|
467
|
-
"active_runs": active_count + 1
|
|
468
|
-
}, workflow_id)
|
|
469
|
-
|
|
470
|
-
async def execute():
|
|
471
|
-
try:
|
|
472
|
-
result = await self._execute_from_trigger(
|
|
473
|
-
run_id, trigger_node_id, trigger_data, workflow_id
|
|
474
|
-
)
|
|
475
|
-
await self._notify("run_completed", {
|
|
476
|
-
"run_id": run_id,
|
|
477
|
-
"workflow_id": workflow_id,
|
|
478
|
-
"success": result.get("success", False),
|
|
479
|
-
"execution_time": result.get("execution_time")
|
|
480
|
-
}, workflow_id)
|
|
481
|
-
except asyncio.CancelledError:
|
|
482
|
-
logger.debug("Run cancelled", run_id=run_id, workflow_id=workflow_id)
|
|
483
|
-
except Exception as e:
|
|
484
|
-
logger.error("Run failed", run_id=run_id, workflow_id=workflow_id, error=str(e))
|
|
485
|
-
await self._notify("run_failed", {"run_id": run_id, "error": str(e)}, workflow_id)
|
|
486
|
-
finally:
|
|
487
|
-
if workflow_id in self._active_runs:
|
|
488
|
-
self._active_runs[workflow_id].pop(run_id, None)
|
|
489
|
-
|
|
490
|
-
task = asyncio.create_task(execute())
|
|
491
|
-
if workflow_id not in self._active_runs:
|
|
492
|
-
self._active_runs[workflow_id] = {}
|
|
493
|
-
self._active_runs[workflow_id][run_id] = task
|
|
494
|
-
|
|
495
|
-
if wait:
|
|
496
|
-
try:
|
|
497
|
-
await task
|
|
498
|
-
except (asyncio.CancelledError, Exception):
|
|
499
|
-
pass
|
|
500
|
-
return None
|
|
501
|
-
|
|
502
|
-
return task
|
|
503
|
-
|
|
504
|
-
async def _execute_from_trigger(
|
|
505
|
-
self,
|
|
506
|
-
run_id: str,
|
|
507
|
-
trigger_node_id: str,
|
|
508
|
-
trigger_data: Dict[str, Any],
|
|
509
|
-
workflow_id: str
|
|
510
|
-
) -> Dict[str, Any]:
|
|
511
|
-
"""Execute workflow from a trigger node."""
|
|
512
|
-
state = self._deployments.get(workflow_id)
|
|
513
|
-
if not state:
|
|
514
|
-
return {"success": False, "error": f"Workflow {workflow_id} not deployed"}
|
|
515
|
-
|
|
516
|
-
start_time = time.time()
|
|
517
|
-
run_session_id = f"{state.session_id}_{run_id}"
|
|
518
|
-
|
|
519
|
-
# Store trigger output
|
|
520
|
-
trigger_output = trigger_data.get('event_data', trigger_data)
|
|
521
|
-
await self._store_output(run_session_id, trigger_node_id, "output_0", trigger_output)
|
|
522
|
-
|
|
523
|
-
# Get downstream nodes
|
|
524
|
-
downstream = self._get_downstream_nodes(
|
|
525
|
-
trigger_node_id,
|
|
526
|
-
state.nodes,
|
|
527
|
-
state.edges
|
|
528
|
-
)
|
|
529
|
-
|
|
530
|
-
if not downstream:
|
|
531
|
-
return {
|
|
532
|
-
"success": True,
|
|
533
|
-
"run_id": run_id,
|
|
534
|
-
"workflow_id": workflow_id,
|
|
535
|
-
"nodes_executed": [trigger_node_id],
|
|
536
|
-
"execution_time": time.time() - start_time,
|
|
537
|
-
"message": "No downstream nodes"
|
|
538
|
-
}
|
|
539
|
-
|
|
540
|
-
# Build filtered graph
|
|
541
|
-
run_filter = {trigger_node_id} | {n['id'] for n in downstream}
|
|
542
|
-
logger.debug(f"[Run] run_filter has {len(run_filter)} nodes")
|
|
543
|
-
|
|
544
|
-
filtered_nodes = []
|
|
545
|
-
for node in state.nodes:
|
|
546
|
-
if node['id'] not in run_filter:
|
|
547
|
-
continue
|
|
548
|
-
node_copy = node.copy()
|
|
549
|
-
if node['id'] == trigger_node_id:
|
|
550
|
-
node_copy['_pre_executed'] = True
|
|
551
|
-
node_copy['_trigger_output'] = trigger_output
|
|
552
|
-
filtered_nodes.append(node_copy)
|
|
553
|
-
|
|
554
|
-
filtered_edges = [
|
|
555
|
-
e for e in state.edges
|
|
556
|
-
if e.get('source') in run_filter and e.get('target') in run_filter
|
|
557
|
-
]
|
|
558
|
-
logger.debug(f"[Run] filtered_edges: {len(filtered_edges)} edges")
|
|
559
|
-
|
|
560
|
-
# Execute filtered graph with deployment's workflow_id for scoped status
|
|
561
|
-
# Use Temporal for proper parallel branch execution
|
|
562
|
-
status_callback = self._status_callbacks.get(workflow_id)
|
|
563
|
-
result = await self._execute_workflow(
|
|
564
|
-
nodes=filtered_nodes,
|
|
565
|
-
edges=filtered_edges,
|
|
566
|
-
session_id=run_session_id,
|
|
567
|
-
status_callback=status_callback,
|
|
568
|
-
skip_clear_outputs=True,
|
|
569
|
-
workflow_id=workflow_id, # Pass deployment's workflow_id for status scoping
|
|
570
|
-
use_temporal=True, # Force Temporal for parallel node execution
|
|
571
|
-
)
|
|
572
|
-
|
|
573
|
-
result["run_id"] = run_id
|
|
574
|
-
result["workflow_id"] = workflow_id
|
|
575
|
-
result["trigger_node_id"] = trigger_node_id
|
|
576
|
-
return result
|
|
577
|
-
|
|
578
|
-
def _get_downstream_nodes(
|
|
579
|
-
self,
|
|
580
|
-
node_id: str,
|
|
581
|
-
nodes: List[Dict],
|
|
582
|
-
edges: List[Dict]
|
|
583
|
-
) -> List[Dict]:
|
|
584
|
-
"""Get all downstream nodes from a trigger."""
|
|
585
|
-
downstream_ids = set()
|
|
586
|
-
node_types = {n['id']: n.get('type', '') for n in nodes}
|
|
587
|
-
nodes_with_inputs = {e.get('target') for e in edges if e.get('target')}
|
|
588
|
-
|
|
589
|
-
def collect(current_id: str):
|
|
590
|
-
for edge in edges:
|
|
591
|
-
if edge.get('source') != current_id:
|
|
592
|
-
continue
|
|
593
|
-
target_id = edge.get('target')
|
|
594
|
-
if not target_id or target_id in downstream_ids:
|
|
595
|
-
continue
|
|
596
|
-
|
|
597
|
-
target_type = node_types.get(target_id, '')
|
|
598
|
-
is_trigger = target_type in WORKFLOW_TRIGGER_TYPES
|
|
599
|
-
has_inputs = target_id in nodes_with_inputs
|
|
600
|
-
|
|
601
|
-
# Stop at independent triggers (no inputs)
|
|
602
|
-
if is_trigger and not has_inputs:
|
|
603
|
-
continue
|
|
604
|
-
|
|
605
|
-
downstream_ids.add(target_id)
|
|
606
|
-
collect(target_id)
|
|
607
|
-
|
|
608
|
-
collect(node_id)
|
|
609
|
-
|
|
610
|
-
# Include config nodes connected to downstream nodes
|
|
611
|
-
for edge in edges:
|
|
612
|
-
target = edge.get('target')
|
|
613
|
-
source = edge.get('source')
|
|
614
|
-
handle = edge.get('targetHandle', '')
|
|
615
|
-
|
|
616
|
-
is_config = handle and handle.startswith('input-') and handle != 'input-main'
|
|
617
|
-
if is_config and target in downstream_ids and source not in downstream_ids:
|
|
618
|
-
downstream_ids.add(source)
|
|
619
|
-
|
|
620
|
-
# Include sub-nodes connected to toolkit nodes (n8n Sub-Node pattern)
|
|
621
|
-
# Android service nodes connect to androidTool's input-main, not config handles
|
|
622
|
-
# These need to be included so the toolkit can discover its connected services
|
|
623
|
-
toolkit_node_ids = {n['id'] for n in nodes if n.get('type') == 'androidTool' and n['id'] in downstream_ids}
|
|
624
|
-
for edge in edges:
|
|
625
|
-
target = edge.get('target')
|
|
626
|
-
source = edge.get('source')
|
|
627
|
-
# Include nodes that connect to toolkit nodes
|
|
628
|
-
if target in toolkit_node_ids and source not in downstream_ids:
|
|
629
|
-
downstream_ids.add(source)
|
|
630
|
-
logger.debug(f"[Deployment] Including sub-node {source} connected to toolkit {target}")
|
|
631
|
-
|
|
632
|
-
return [n for n in nodes if n['id'] in downstream_ids]
|
|
633
|
-
|
|
634
|
-
# =========================================================================
|
|
635
|
-
# HELPERS
|
|
636
|
-
# =========================================================================
|
|
637
|
-
|
|
638
|
-
async def _load_settings(self):
|
|
639
|
-
"""Load deployment settings from database."""
|
|
640
|
-
try:
|
|
641
|
-
db_settings = await self.database.get_deployment_settings()
|
|
642
|
-
if db_settings:
|
|
643
|
-
self._settings.update({
|
|
644
|
-
"stop_on_error": db_settings.get("stop_on_error", False),
|
|
645
|
-
"max_concurrent_runs": db_settings.get("max_concurrent_runs", 100),
|
|
646
|
-
"use_parallel_executor": db_settings.get("use_parallel_executor", True)
|
|
647
|
-
})
|
|
648
|
-
except Exception:
|
|
649
|
-
pass
|
|
650
|
-
|
|
651
|
-
async def _notify(self, event: str, data: Dict[str, Any], workflow_id: Optional[str] = None):
|
|
652
|
-
"""Send status notification for a specific workflow."""
|
|
653
|
-
status_callback = None
|
|
654
|
-
if workflow_id:
|
|
655
|
-
status_callback = self._status_callbacks.get(workflow_id)
|
|
656
|
-
else:
|
|
657
|
-
# Backward compatibility: use first available callback
|
|
658
|
-
for cb in self._status_callbacks.values():
|
|
659
|
-
if cb:
|
|
660
|
-
status_callback = cb
|
|
661
|
-
break
|
|
662
|
-
|
|
663
|
-
if not status_callback:
|
|
664
|
-
return
|
|
665
|
-
|
|
666
|
-
try:
|
|
667
|
-
await status_callback("__deployment__", event, {
|
|
668
|
-
**data,
|
|
669
|
-
"workflow_id": workflow_id,
|
|
670
|
-
"timestamp": datetime.now().isoformat()
|
|
671
|
-
})
|
|
672
|
-
except Exception as e:
|
|
673
|
-
logger.warning("Status callback failed", workflow_id=workflow_id, error=str(e))
|
|
674
|
-
|
|
675
|
-
@staticmethod
|
|
676
|
-
def _get_schedule_description(params: Dict[str, Any]) -> str:
|
|
677
|
-
"""Get human-readable schedule description from parameters."""
|
|
678
|
-
frequency = params.get('frequency', 'minutes')
|
|
679
|
-
|
|
680
|
-
match frequency:
|
|
681
|
-
case 'seconds':
|
|
682
|
-
interval = params.get('interval', 30)
|
|
683
|
-
return f"Every {interval} seconds"
|
|
684
|
-
case 'minutes':
|
|
685
|
-
interval = params.get('intervalMinutes', 5)
|
|
686
|
-
return f"Every {interval} minutes"
|
|
687
|
-
case 'hours':
|
|
688
|
-
interval = params.get('intervalHours', 1)
|
|
689
|
-
return f"Every {interval} hours"
|
|
690
|
-
case 'days':
|
|
691
|
-
time_str = params.get('dailyTime', '09:00')
|
|
692
|
-
return f"Daily at {time_str}"
|
|
693
|
-
case 'weeks':
|
|
694
|
-
weekday = params.get('weekday', '1')
|
|
695
|
-
time_str = params.get('weeklyTime', '09:00')
|
|
696
|
-
days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
|
|
697
|
-
day_name = days[int(weekday)] if str(weekday).isdigit() else weekday
|
|
698
|
-
return f"Weekly on {day_name} at {time_str}"
|
|
699
|
-
case 'months':
|
|
700
|
-
day = params.get('monthDay', '1')
|
|
701
|
-
time_str = params.get('monthlyTime', '09:00')
|
|
702
|
-
return f"Monthly on day {day} at {time_str}"
|
|
703
|
-
case 'once':
|
|
704
|
-
return "Once (no repeat)"
|
|
705
|
-
case _:
|
|
706
|
-
return "Unknown schedule"
|
|
1
|
+
"""Deployment Manager - Event-driven workflow deployment lifecycle.
|
|
2
|
+
|
|
3
|
+
Implements n8n/Conductor pattern where:
|
|
4
|
+
- Workflow is a template stored in memory
|
|
5
|
+
- Trigger events spawn independent execution runs
|
|
6
|
+
- Runs execute concurrently (up to max_concurrent_runs)
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import json
|
|
11
|
+
import time
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from typing import Dict, Any, List, Optional, Callable, TYPE_CHECKING
|
|
14
|
+
|
|
15
|
+
from core.logging import get_logger
|
|
16
|
+
from constants import WORKFLOW_TRIGGER_TYPES
|
|
17
|
+
from services import event_waiter
|
|
18
|
+
from .state import DeploymentState, TriggerInfo
|
|
19
|
+
from .triggers import TriggerManager
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from core.database import Database
|
|
23
|
+
|
|
24
|
+
logger = get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DeploymentManager:
|
|
28
|
+
"""Manages event-driven workflow deployment.
|
|
29
|
+
|
|
30
|
+
Supports per-workflow deployments following n8n pattern:
|
|
31
|
+
- Each workflow can be deployed independently
|
|
32
|
+
- Multiple workflows can run concurrently
|
|
33
|
+
- Each deployment has its own state, triggers, and runs
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
database: "Database",
|
|
39
|
+
execute_workflow_fn: Callable,
|
|
40
|
+
store_output_fn: Callable,
|
|
41
|
+
broadcaster: Any,
|
|
42
|
+
):
|
|
43
|
+
self.database = database
|
|
44
|
+
self._execute_workflow = execute_workflow_fn
|
|
45
|
+
self._store_output = store_output_fn
|
|
46
|
+
self._broadcaster = broadcaster
|
|
47
|
+
|
|
48
|
+
# Per-workflow deployment state (n8n pattern)
|
|
49
|
+
self._deployments: Dict[str, DeploymentState] = {}
|
|
50
|
+
self._trigger_managers: Dict[str, TriggerManager] = {}
|
|
51
|
+
self._active_runs: Dict[str, Dict[str, asyncio.Task]] = {} # workflow_id -> {run_id: task}
|
|
52
|
+
self._run_counters: Dict[str, int] = {}
|
|
53
|
+
self._status_callbacks: Dict[str, Callable] = {}
|
|
54
|
+
self._cron_iterations: Dict[str, int] = {} # node_id -> iteration count
|
|
55
|
+
self._main_loop: Optional[asyncio.AbstractEventLoop] = None
|
|
56
|
+
|
|
57
|
+
self._settings = {
|
|
58
|
+
"stop_on_error": False,
|
|
59
|
+
"max_concurrent_runs": 100,
|
|
60
|
+
"use_parallel_executor": True
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def is_running(self) -> bool:
|
|
65
|
+
"""Check if ANY deployment is running (backward compatibility)."""
|
|
66
|
+
return any(state.is_running for state in self._deployments.values())
|
|
67
|
+
|
|
68
|
+
def is_workflow_deployed(self, workflow_id: str) -> bool:
|
|
69
|
+
"""Check if a specific workflow is deployed."""
|
|
70
|
+
state = self._deployments.get(workflow_id)
|
|
71
|
+
return state is not None and state.is_running
|
|
72
|
+
|
|
73
|
+
def get_deployed_workflows(self) -> List[str]:
|
|
74
|
+
"""Get list of deployed workflow IDs."""
|
|
75
|
+
return [wid for wid, state in self._deployments.items() if state.is_running]
|
|
76
|
+
|
|
77
|
+
# =========================================================================
|
|
78
|
+
# DEPLOYMENT LIFECYCLE
|
|
79
|
+
# =========================================================================
|
|
80
|
+
|
|
81
|
+
async def deploy(
|
|
82
|
+
self,
|
|
83
|
+
nodes: List[Dict],
|
|
84
|
+
edges: List[Dict],
|
|
85
|
+
session_id: str = "default",
|
|
86
|
+
status_callback: Optional[Callable] = None,
|
|
87
|
+
workflow_id: Optional[str] = None,
|
|
88
|
+
) -> Dict[str, Any]:
|
|
89
|
+
"""Deploy workflow in event-driven mode.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
nodes: Workflow nodes
|
|
93
|
+
edges: Workflow edges
|
|
94
|
+
session_id: Session identifier
|
|
95
|
+
status_callback: Status update callback
|
|
96
|
+
workflow_id: Workflow ID for per-workflow deployment tracking
|
|
97
|
+
"""
|
|
98
|
+
# Generate workflow_id if not provided
|
|
99
|
+
if not workflow_id:
|
|
100
|
+
workflow_id = f"workflow_{int(time.time() * 1000)}"
|
|
101
|
+
|
|
102
|
+
# Check if THIS workflow is already deployed
|
|
103
|
+
if self.is_workflow_deployed(workflow_id):
|
|
104
|
+
return {
|
|
105
|
+
"success": False,
|
|
106
|
+
"error": f"Workflow {workflow_id} is already deployed",
|
|
107
|
+
"workflow_id": workflow_id,
|
|
108
|
+
"deployment_id": self._deployments[workflow_id].deployment_id
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
# Setup
|
|
112
|
+
deployment_id = f"deploy_{workflow_id}_{int(time.time() * 1000)}"
|
|
113
|
+
self._status_callbacks[workflow_id] = status_callback
|
|
114
|
+
self._run_counters[workflow_id] = 0
|
|
115
|
+
self._active_runs[workflow_id] = {}
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
self._main_loop = asyncio.get_running_loop()
|
|
119
|
+
except RuntimeError:
|
|
120
|
+
self._main_loop = asyncio.get_event_loop()
|
|
121
|
+
|
|
122
|
+
# Create trigger manager for this workflow
|
|
123
|
+
trigger_manager = TriggerManager()
|
|
124
|
+
trigger_manager.set_main_loop(self._main_loop)
|
|
125
|
+
trigger_manager.set_running(True)
|
|
126
|
+
self._trigger_managers[workflow_id] = trigger_manager
|
|
127
|
+
|
|
128
|
+
# Load settings
|
|
129
|
+
await self._load_settings()
|
|
130
|
+
|
|
131
|
+
# Create state for this workflow
|
|
132
|
+
self._deployments[workflow_id] = DeploymentState(
|
|
133
|
+
deployment_id=deployment_id,
|
|
134
|
+
workflow_id=workflow_id,
|
|
135
|
+
is_running=True,
|
|
136
|
+
nodes=nodes,
|
|
137
|
+
edges=edges,
|
|
138
|
+
session_id=session_id,
|
|
139
|
+
settings=self._settings.copy()
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
logger.info("Deployment starting", deployment_id=deployment_id, workflow_id=workflow_id, nodes=len(nodes))
|
|
143
|
+
|
|
144
|
+
triggers_setup = []
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
# Setup cron triggers
|
|
148
|
+
for cron_node in TriggerManager.find_cron_nodes(nodes):
|
|
149
|
+
info = await self._setup_cron_trigger(cron_node, workflow_id)
|
|
150
|
+
triggers_setup.append(info.to_dict())
|
|
151
|
+
|
|
152
|
+
# Find start and event triggers
|
|
153
|
+
start_nodes, event_triggers = TriggerManager.find_trigger_nodes(nodes, edges)
|
|
154
|
+
|
|
155
|
+
# Fire start nodes immediately
|
|
156
|
+
for node in start_nodes:
|
|
157
|
+
info = await self._fire_start_trigger(node, workflow_id)
|
|
158
|
+
triggers_setup.append(info.to_dict())
|
|
159
|
+
|
|
160
|
+
# Setup event triggers
|
|
161
|
+
for node in event_triggers:
|
|
162
|
+
info = await self._setup_event_trigger(node, workflow_id)
|
|
163
|
+
triggers_setup.append(info.to_dict())
|
|
164
|
+
|
|
165
|
+
# Notify started
|
|
166
|
+
await self._notify("started", {
|
|
167
|
+
"deployment_id": deployment_id,
|
|
168
|
+
"workflow_id": workflow_id,
|
|
169
|
+
"triggers": triggers_setup
|
|
170
|
+
}, workflow_id)
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
"success": True,
|
|
174
|
+
"deployment_id": deployment_id,
|
|
175
|
+
"workflow_id": workflow_id,
|
|
176
|
+
"message": "Workflow deployed",
|
|
177
|
+
"triggers_setup": triggers_setup
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
except Exception as e:
|
|
181
|
+
logger.error("Deployment failed", workflow_id=workflow_id, error=str(e))
|
|
182
|
+
await self.cancel(workflow_id)
|
|
183
|
+
return {"success": False, "error": str(e), "workflow_id": workflow_id}
|
|
184
|
+
|
|
185
|
+
async def cancel(self, workflow_id: Optional[str] = None) -> Dict[str, Any]:
|
|
186
|
+
"""Cancel deployment for a specific workflow.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
workflow_id: Workflow to cancel. If None, cancels the first running deployment.
|
|
190
|
+
"""
|
|
191
|
+
# Find workflow to cancel
|
|
192
|
+
if workflow_id:
|
|
193
|
+
if not self.is_workflow_deployed(workflow_id):
|
|
194
|
+
return {"success": False, "error": f"Workflow {workflow_id} is not deployed"}
|
|
195
|
+
else:
|
|
196
|
+
# Backward compatibility: cancel first running deployment
|
|
197
|
+
deployed = self.get_deployed_workflows()
|
|
198
|
+
if not deployed:
|
|
199
|
+
return {"success": False, "error": "No deployment running"}
|
|
200
|
+
workflow_id = deployed[0]
|
|
201
|
+
|
|
202
|
+
state = self._deployments.get(workflow_id)
|
|
203
|
+
if not state:
|
|
204
|
+
return {"success": False, "error": f"Deployment state not found for {workflow_id}"}
|
|
205
|
+
|
|
206
|
+
deployment_id = state.deployment_id
|
|
207
|
+
logger.info("Cancelling deployment", deployment_id=deployment_id, workflow_id=workflow_id)
|
|
208
|
+
|
|
209
|
+
# Get trigger manager for this workflow
|
|
210
|
+
trigger_manager = self._trigger_managers.get(workflow_id)
|
|
211
|
+
if trigger_manager:
|
|
212
|
+
trigger_manager.set_running(False)
|
|
213
|
+
|
|
214
|
+
# Cancel active runs for this workflow
|
|
215
|
+
workflow_runs = self._active_runs.get(workflow_id, {})
|
|
216
|
+
listener_nodes = trigger_manager.get_listener_node_ids() if trigger_manager else []
|
|
217
|
+
|
|
218
|
+
for task in workflow_runs.values():
|
|
219
|
+
if not task.done():
|
|
220
|
+
task.cancel()
|
|
221
|
+
|
|
222
|
+
if workflow_runs:
|
|
223
|
+
await asyncio.gather(*workflow_runs.values(), return_exceptions=True)
|
|
224
|
+
run_count = len(workflow_runs)
|
|
225
|
+
|
|
226
|
+
# Cleanup triggers for this workflow
|
|
227
|
+
listener_count = 0
|
|
228
|
+
cron_count = 0
|
|
229
|
+
cron_node_ids = []
|
|
230
|
+
if trigger_manager:
|
|
231
|
+
# Get cron node IDs before teardown (they'll be cleared)
|
|
232
|
+
cron_node_ids = trigger_manager.get_cron_node_ids()
|
|
233
|
+
listener_count = await trigger_manager.teardown_all_listeners()
|
|
234
|
+
cron_count = trigger_manager.teardown_all_crons()
|
|
235
|
+
|
|
236
|
+
# Reset cron trigger node statuses to idle
|
|
237
|
+
for node_id in cron_node_ids:
|
|
238
|
+
await self._broadcaster.update_node_status(node_id, "idle", {}, workflow_id=workflow_id)
|
|
239
|
+
|
|
240
|
+
# Reset listener node statuses to idle
|
|
241
|
+
for node_id in listener_nodes:
|
|
242
|
+
await self._broadcaster.update_node_status(node_id, "idle", {}, workflow_id=workflow_id)
|
|
243
|
+
|
|
244
|
+
# Cancel event waiters for nodes in this workflow
|
|
245
|
+
waiter_count = 0
|
|
246
|
+
for node in state.nodes:
|
|
247
|
+
waiter_count += event_waiter.cancel_for_node(node['id'])
|
|
248
|
+
|
|
249
|
+
# Clear cron iteration counters for this workflow's cron nodes
|
|
250
|
+
for node_id in cron_node_ids:
|
|
251
|
+
self._cron_iterations.pop(node_id, None)
|
|
252
|
+
|
|
253
|
+
# Clear state for this workflow
|
|
254
|
+
self._deployments.pop(workflow_id, None)
|
|
255
|
+
self._trigger_managers.pop(workflow_id, None)
|
|
256
|
+
self._active_runs.pop(workflow_id, None)
|
|
257
|
+
self._run_counters.pop(workflow_id, None)
|
|
258
|
+
self._status_callbacks.pop(workflow_id, None)
|
|
259
|
+
|
|
260
|
+
return {
|
|
261
|
+
"success": True,
|
|
262
|
+
"deployment_id": deployment_id,
|
|
263
|
+
"workflow_id": workflow_id,
|
|
264
|
+
"runs_cancelled": run_count,
|
|
265
|
+
"listeners_cancelled": listener_count,
|
|
266
|
+
"crons_cancelled": cron_count,
|
|
267
|
+
"waiters_cancelled": waiter_count,
|
|
268
|
+
"cancelled_listener_node_ids": listener_nodes
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
def get_status(self, workflow_id: Optional[str] = None) -> Dict[str, Any]:
|
|
272
|
+
"""Get deployment status.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
workflow_id: Get status for specific workflow. If None, returns global status.
|
|
276
|
+
"""
|
|
277
|
+
if workflow_id:
|
|
278
|
+
# Status for specific workflow
|
|
279
|
+
state = self._deployments.get(workflow_id)
|
|
280
|
+
if not state or not state.is_running:
|
|
281
|
+
return {"deployed": False, "deployment_id": None, "active_runs": 0, "workflow_id": workflow_id}
|
|
282
|
+
|
|
283
|
+
workflow_runs = self._active_runs.get(workflow_id, {})
|
|
284
|
+
execution_runs = [k for k in workflow_runs if k.startswith("run_")]
|
|
285
|
+
return {
|
|
286
|
+
"deployed": True,
|
|
287
|
+
"deployment_id": state.deployment_id,
|
|
288
|
+
"workflow_id": workflow_id,
|
|
289
|
+
"active_runs": len(execution_runs),
|
|
290
|
+
"active_listeners": len(workflow_runs) - len(execution_runs),
|
|
291
|
+
"run_counter": self._run_counters.get(workflow_id, 0),
|
|
292
|
+
"deployed_at": state.deployed_at
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
# Global status (backward compatibility)
|
|
296
|
+
if not self.is_running:
|
|
297
|
+
return {"deployed": False, "deployment_id": None, "active_runs": 0}
|
|
298
|
+
|
|
299
|
+
# Aggregate across all workflows
|
|
300
|
+
total_runs = 0
|
|
301
|
+
total_listeners = 0
|
|
302
|
+
total_run_counter = 0
|
|
303
|
+
deployed_workflows = []
|
|
304
|
+
|
|
305
|
+
for wid, state in self._deployments.items():
|
|
306
|
+
if state.is_running:
|
|
307
|
+
deployed_workflows.append(wid)
|
|
308
|
+
workflow_runs = self._active_runs.get(wid, {})
|
|
309
|
+
execution_runs = [k for k in workflow_runs if k.startswith("run_")]
|
|
310
|
+
total_runs += len(execution_runs)
|
|
311
|
+
total_listeners += len(workflow_runs) - len(execution_runs)
|
|
312
|
+
total_run_counter += self._run_counters.get(wid, 0)
|
|
313
|
+
|
|
314
|
+
return {
|
|
315
|
+
"deployed": True,
|
|
316
|
+
"deployed_workflows": deployed_workflows,
|
|
317
|
+
"active_runs": total_runs,
|
|
318
|
+
"active_listeners": total_listeners,
|
|
319
|
+
"run_counter": total_run_counter
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
# =========================================================================
|
|
323
|
+
# TRIGGER SETUP
|
|
324
|
+
# =========================================================================
|
|
325
|
+
|
|
326
|
+
async def _setup_cron_trigger(self, node: Dict, workflow_id: str) -> TriggerInfo:
|
|
327
|
+
"""Setup cron trigger for a node."""
|
|
328
|
+
node_id = node['id']
|
|
329
|
+
params = await self.database.get_node_parameters(node_id) or {}
|
|
330
|
+
|
|
331
|
+
cron_expr = TriggerManager.build_cron_expression(params)
|
|
332
|
+
timezone = params.get('timezone', 'UTC')
|
|
333
|
+
frequency = params.get('frequency', 'minutes')
|
|
334
|
+
|
|
335
|
+
# Initialize iteration counter for this cron node
|
|
336
|
+
self._cron_iterations[node_id] = 0
|
|
337
|
+
|
|
338
|
+
# Build schedule description for output
|
|
339
|
+
schedule_desc = self._get_schedule_description(params)
|
|
340
|
+
|
|
341
|
+
def on_tick():
|
|
342
|
+
if self._main_loop and self._main_loop.is_running():
|
|
343
|
+
# Increment iteration counter
|
|
344
|
+
self._cron_iterations[node_id] = self._cron_iterations.get(node_id, 0) + 1
|
|
345
|
+
iteration = self._cron_iterations[node_id]
|
|
346
|
+
|
|
347
|
+
trigger_data = {
|
|
348
|
+
'node_id': node_id,
|
|
349
|
+
'timestamp': datetime.now().isoformat(),
|
|
350
|
+
'trigger_type': 'cron',
|
|
351
|
+
'event_data': {
|
|
352
|
+
'timestamp': datetime.now().isoformat(),
|
|
353
|
+
'iteration': iteration,
|
|
354
|
+
'frequency': frequency,
|
|
355
|
+
'timezone': timezone,
|
|
356
|
+
'schedule': schedule_desc,
|
|
357
|
+
'cron_expression': cron_expr
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
asyncio.run_coroutine_threadsafe(
|
|
361
|
+
self._spawn_run(node_id, trigger_data, workflow_id=workflow_id),
|
|
362
|
+
self._main_loop
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
trigger_manager = self._trigger_managers.get(workflow_id)
|
|
366
|
+
if not trigger_manager:
|
|
367
|
+
raise RuntimeError(f"No trigger manager for workflow {workflow_id}")
|
|
368
|
+
|
|
369
|
+
job_id = trigger_manager.setup_cron(node_id, cron_expr, timezone, on_tick)
|
|
370
|
+
|
|
371
|
+
# Broadcast waiting status for cron trigger (like event triggers do)
|
|
372
|
+
await self._broadcaster.update_node_status(node_id, "waiting", {
|
|
373
|
+
"message": f"Waiting for schedule: {cron_expr}",
|
|
374
|
+
"cron_expression": cron_expr,
|
|
375
|
+
"timezone": timezone,
|
|
376
|
+
"job_id": job_id
|
|
377
|
+
}, workflow_id=workflow_id)
|
|
378
|
+
|
|
379
|
+
return TriggerInfo(node_id, "cron", job_id=job_id)
|
|
380
|
+
|
|
381
|
+
async def _fire_start_trigger(self, node: Dict, workflow_id: str) -> TriggerInfo:
|
|
382
|
+
"""Fire a start trigger immediately."""
|
|
383
|
+
node_id = node['id']
|
|
384
|
+
params = await self.database.get_node_parameters(node_id) or {}
|
|
385
|
+
|
|
386
|
+
initial_data_str = params.get('initialData', '{}')
|
|
387
|
+
try:
|
|
388
|
+
initial_data = json.loads(initial_data_str) if initial_data_str else {}
|
|
389
|
+
except json.JSONDecodeError:
|
|
390
|
+
initial_data = {}
|
|
391
|
+
|
|
392
|
+
trigger_data = {
|
|
393
|
+
'node_id': node_id,
|
|
394
|
+
'timestamp': datetime.now().isoformat(),
|
|
395
|
+
'trigger_type': 'start',
|
|
396
|
+
'event_data': initial_data
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
await self._spawn_run(node_id, trigger_data, workflow_id=workflow_id)
|
|
400
|
+
return TriggerInfo(node_id, "start", fired=True)
|
|
401
|
+
|
|
402
|
+
async def _setup_event_trigger(self, node: Dict, workflow_id: str) -> TriggerInfo:
|
|
403
|
+
"""Setup event-based trigger."""
|
|
404
|
+
node_id = node['id']
|
|
405
|
+
node_type = node.get('type', '')
|
|
406
|
+
params = await self.database.get_node_parameters(node_id) or {}
|
|
407
|
+
|
|
408
|
+
async def on_event(event_data: Dict):
|
|
409
|
+
trigger_data = {
|
|
410
|
+
'node_id': node_id,
|
|
411
|
+
'timestamp': datetime.now().isoformat(),
|
|
412
|
+
'trigger_type': node_type,
|
|
413
|
+
'event_data': event_data
|
|
414
|
+
}
|
|
415
|
+
await self._spawn_run(node_id, trigger_data, wait=True, workflow_id=workflow_id)
|
|
416
|
+
|
|
417
|
+
trigger_manager = self._trigger_managers.get(workflow_id)
|
|
418
|
+
if not trigger_manager:
|
|
419
|
+
raise RuntimeError(f"No trigger manager for workflow {workflow_id}")
|
|
420
|
+
|
|
421
|
+
await trigger_manager.setup_event_trigger(
|
|
422
|
+
node_id, node_type, params, on_event, self._broadcaster,
|
|
423
|
+
workflow_id=workflow_id
|
|
424
|
+
)
|
|
425
|
+
return TriggerInfo(node_id, node_type)
|
|
426
|
+
|
|
427
|
+
# =========================================================================
|
|
428
|
+
# EXECUTION RUNS
|
|
429
|
+
# =========================================================================
|
|
430
|
+
|
|
431
|
+
async def _spawn_run(
|
|
432
|
+
self,
|
|
433
|
+
trigger_node_id: str,
|
|
434
|
+
trigger_data: Dict[str, Any],
|
|
435
|
+
wait: bool = False,
|
|
436
|
+
workflow_id: Optional[str] = None
|
|
437
|
+
) -> Optional[asyncio.Task]:
|
|
438
|
+
"""Spawn a new execution run for a specific workflow."""
|
|
439
|
+
if not workflow_id:
|
|
440
|
+
# Backward compatibility: find workflow for this trigger node
|
|
441
|
+
for wid, state in self._deployments.items():
|
|
442
|
+
if state.is_running and any(n['id'] == trigger_node_id for n in state.nodes):
|
|
443
|
+
workflow_id = wid
|
|
444
|
+
break
|
|
445
|
+
|
|
446
|
+
if not workflow_id or not self.is_workflow_deployed(workflow_id):
|
|
447
|
+
return None
|
|
448
|
+
|
|
449
|
+
state = self._deployments[workflow_id]
|
|
450
|
+
|
|
451
|
+
# Check concurrent limit for this workflow
|
|
452
|
+
workflow_runs = self._active_runs.get(workflow_id, {})
|
|
453
|
+
active_count = sum(1 for k in workflow_runs if k.startswith("run_"))
|
|
454
|
+
max_concurrent = self._settings.get("max_concurrent_runs", 100)
|
|
455
|
+
if active_count >= max_concurrent:
|
|
456
|
+
logger.warning("Max concurrent runs reached", workflow_id=workflow_id, active=active_count)
|
|
457
|
+
return None
|
|
458
|
+
|
|
459
|
+
# Generate run ID
|
|
460
|
+
self._run_counters[workflow_id] = self._run_counters.get(workflow_id, 0) + 1
|
|
461
|
+
run_id = f"run_{state.deployment_id}_{self._run_counters[workflow_id]}"
|
|
462
|
+
|
|
463
|
+
await self._notify("run_started", {
|
|
464
|
+
"run_id": run_id,
|
|
465
|
+
"workflow_id": workflow_id,
|
|
466
|
+
"trigger_node_id": trigger_node_id,
|
|
467
|
+
"active_runs": active_count + 1
|
|
468
|
+
}, workflow_id)
|
|
469
|
+
|
|
470
|
+
async def execute():
|
|
471
|
+
try:
|
|
472
|
+
result = await self._execute_from_trigger(
|
|
473
|
+
run_id, trigger_node_id, trigger_data, workflow_id
|
|
474
|
+
)
|
|
475
|
+
await self._notify("run_completed", {
|
|
476
|
+
"run_id": run_id,
|
|
477
|
+
"workflow_id": workflow_id,
|
|
478
|
+
"success": result.get("success", False),
|
|
479
|
+
"execution_time": result.get("execution_time")
|
|
480
|
+
}, workflow_id)
|
|
481
|
+
except asyncio.CancelledError:
|
|
482
|
+
logger.debug("Run cancelled", run_id=run_id, workflow_id=workflow_id)
|
|
483
|
+
except Exception as e:
|
|
484
|
+
logger.error("Run failed", run_id=run_id, workflow_id=workflow_id, error=str(e))
|
|
485
|
+
await self._notify("run_failed", {"run_id": run_id, "error": str(e)}, workflow_id)
|
|
486
|
+
finally:
|
|
487
|
+
if workflow_id in self._active_runs:
|
|
488
|
+
self._active_runs[workflow_id].pop(run_id, None)
|
|
489
|
+
|
|
490
|
+
task = asyncio.create_task(execute())
|
|
491
|
+
if workflow_id not in self._active_runs:
|
|
492
|
+
self._active_runs[workflow_id] = {}
|
|
493
|
+
self._active_runs[workflow_id][run_id] = task
|
|
494
|
+
|
|
495
|
+
if wait:
|
|
496
|
+
try:
|
|
497
|
+
await task
|
|
498
|
+
except (asyncio.CancelledError, Exception):
|
|
499
|
+
pass
|
|
500
|
+
return None
|
|
501
|
+
|
|
502
|
+
return task
|
|
503
|
+
|
|
504
|
+
async def _execute_from_trigger(
|
|
505
|
+
self,
|
|
506
|
+
run_id: str,
|
|
507
|
+
trigger_node_id: str,
|
|
508
|
+
trigger_data: Dict[str, Any],
|
|
509
|
+
workflow_id: str
|
|
510
|
+
) -> Dict[str, Any]:
|
|
511
|
+
"""Execute workflow from a trigger node."""
|
|
512
|
+
state = self._deployments.get(workflow_id)
|
|
513
|
+
if not state:
|
|
514
|
+
return {"success": False, "error": f"Workflow {workflow_id} not deployed"}
|
|
515
|
+
|
|
516
|
+
start_time = time.time()
|
|
517
|
+
run_session_id = f"{state.session_id}_{run_id}"
|
|
518
|
+
|
|
519
|
+
# Store trigger output
|
|
520
|
+
trigger_output = trigger_data.get('event_data', trigger_data)
|
|
521
|
+
await self._store_output(run_session_id, trigger_node_id, "output_0", trigger_output)
|
|
522
|
+
|
|
523
|
+
# Get downstream nodes
|
|
524
|
+
downstream = self._get_downstream_nodes(
|
|
525
|
+
trigger_node_id,
|
|
526
|
+
state.nodes,
|
|
527
|
+
state.edges
|
|
528
|
+
)
|
|
529
|
+
|
|
530
|
+
if not downstream:
|
|
531
|
+
return {
|
|
532
|
+
"success": True,
|
|
533
|
+
"run_id": run_id,
|
|
534
|
+
"workflow_id": workflow_id,
|
|
535
|
+
"nodes_executed": [trigger_node_id],
|
|
536
|
+
"execution_time": time.time() - start_time,
|
|
537
|
+
"message": "No downstream nodes"
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
# Build filtered graph
|
|
541
|
+
run_filter = {trigger_node_id} | {n['id'] for n in downstream}
|
|
542
|
+
logger.debug(f"[Run] run_filter has {len(run_filter)} nodes")
|
|
543
|
+
|
|
544
|
+
filtered_nodes = []
|
|
545
|
+
for node in state.nodes:
|
|
546
|
+
if node['id'] not in run_filter:
|
|
547
|
+
continue
|
|
548
|
+
node_copy = node.copy()
|
|
549
|
+
if node['id'] == trigger_node_id:
|
|
550
|
+
node_copy['_pre_executed'] = True
|
|
551
|
+
node_copy['_trigger_output'] = trigger_output
|
|
552
|
+
filtered_nodes.append(node_copy)
|
|
553
|
+
|
|
554
|
+
filtered_edges = [
|
|
555
|
+
e for e in state.edges
|
|
556
|
+
if e.get('source') in run_filter and e.get('target') in run_filter
|
|
557
|
+
]
|
|
558
|
+
logger.debug(f"[Run] filtered_edges: {len(filtered_edges)} edges")
|
|
559
|
+
|
|
560
|
+
# Execute filtered graph with deployment's workflow_id for scoped status
|
|
561
|
+
# Use Temporal for proper parallel branch execution
|
|
562
|
+
status_callback = self._status_callbacks.get(workflow_id)
|
|
563
|
+
result = await self._execute_workflow(
|
|
564
|
+
nodes=filtered_nodes,
|
|
565
|
+
edges=filtered_edges,
|
|
566
|
+
session_id=run_session_id,
|
|
567
|
+
status_callback=status_callback,
|
|
568
|
+
skip_clear_outputs=True,
|
|
569
|
+
workflow_id=workflow_id, # Pass deployment's workflow_id for status scoping
|
|
570
|
+
use_temporal=True, # Force Temporal for parallel node execution
|
|
571
|
+
)
|
|
572
|
+
|
|
573
|
+
result["run_id"] = run_id
|
|
574
|
+
result["workflow_id"] = workflow_id
|
|
575
|
+
result["trigger_node_id"] = trigger_node_id
|
|
576
|
+
return result
|
|
577
|
+
|
|
578
|
+
def _get_downstream_nodes(
|
|
579
|
+
self,
|
|
580
|
+
node_id: str,
|
|
581
|
+
nodes: List[Dict],
|
|
582
|
+
edges: List[Dict]
|
|
583
|
+
) -> List[Dict]:
|
|
584
|
+
"""Get all downstream nodes from a trigger."""
|
|
585
|
+
downstream_ids = set()
|
|
586
|
+
node_types = {n['id']: n.get('type', '') for n in nodes}
|
|
587
|
+
nodes_with_inputs = {e.get('target') for e in edges if e.get('target')}
|
|
588
|
+
|
|
589
|
+
def collect(current_id: str):
|
|
590
|
+
for edge in edges:
|
|
591
|
+
if edge.get('source') != current_id:
|
|
592
|
+
continue
|
|
593
|
+
target_id = edge.get('target')
|
|
594
|
+
if not target_id or target_id in downstream_ids:
|
|
595
|
+
continue
|
|
596
|
+
|
|
597
|
+
target_type = node_types.get(target_id, '')
|
|
598
|
+
is_trigger = target_type in WORKFLOW_TRIGGER_TYPES
|
|
599
|
+
has_inputs = target_id in nodes_with_inputs
|
|
600
|
+
|
|
601
|
+
# Stop at independent triggers (no inputs)
|
|
602
|
+
if is_trigger and not has_inputs:
|
|
603
|
+
continue
|
|
604
|
+
|
|
605
|
+
downstream_ids.add(target_id)
|
|
606
|
+
collect(target_id)
|
|
607
|
+
|
|
608
|
+
collect(node_id)
|
|
609
|
+
|
|
610
|
+
# Include config nodes connected to downstream nodes
|
|
611
|
+
for edge in edges:
|
|
612
|
+
target = edge.get('target')
|
|
613
|
+
source = edge.get('source')
|
|
614
|
+
handle = edge.get('targetHandle', '')
|
|
615
|
+
|
|
616
|
+
is_config = handle and handle.startswith('input-') and handle != 'input-main'
|
|
617
|
+
if is_config and target in downstream_ids and source not in downstream_ids:
|
|
618
|
+
downstream_ids.add(source)
|
|
619
|
+
|
|
620
|
+
# Include sub-nodes connected to toolkit nodes (n8n Sub-Node pattern)
|
|
621
|
+
# Android service nodes connect to androidTool's input-main, not config handles
|
|
622
|
+
# These need to be included so the toolkit can discover its connected services
|
|
623
|
+
toolkit_node_ids = {n['id'] for n in nodes if n.get('type') == 'androidTool' and n['id'] in downstream_ids}
|
|
624
|
+
for edge in edges:
|
|
625
|
+
target = edge.get('target')
|
|
626
|
+
source = edge.get('source')
|
|
627
|
+
# Include nodes that connect to toolkit nodes
|
|
628
|
+
if target in toolkit_node_ids and source not in downstream_ids:
|
|
629
|
+
downstream_ids.add(source)
|
|
630
|
+
logger.debug(f"[Deployment] Including sub-node {source} connected to toolkit {target}")
|
|
631
|
+
|
|
632
|
+
return [n for n in nodes if n['id'] in downstream_ids]
|
|
633
|
+
|
|
634
|
+
# =========================================================================
|
|
635
|
+
# HELPERS
|
|
636
|
+
# =========================================================================
|
|
637
|
+
|
|
638
|
+
async def _load_settings(self):
|
|
639
|
+
"""Load deployment settings from database."""
|
|
640
|
+
try:
|
|
641
|
+
db_settings = await self.database.get_deployment_settings()
|
|
642
|
+
if db_settings:
|
|
643
|
+
self._settings.update({
|
|
644
|
+
"stop_on_error": db_settings.get("stop_on_error", False),
|
|
645
|
+
"max_concurrent_runs": db_settings.get("max_concurrent_runs", 100),
|
|
646
|
+
"use_parallel_executor": db_settings.get("use_parallel_executor", True)
|
|
647
|
+
})
|
|
648
|
+
except Exception:
|
|
649
|
+
pass
|
|
650
|
+
|
|
651
|
+
async def _notify(self, event: str, data: Dict[str, Any], workflow_id: Optional[str] = None):
|
|
652
|
+
"""Send status notification for a specific workflow."""
|
|
653
|
+
status_callback = None
|
|
654
|
+
if workflow_id:
|
|
655
|
+
status_callback = self._status_callbacks.get(workflow_id)
|
|
656
|
+
else:
|
|
657
|
+
# Backward compatibility: use first available callback
|
|
658
|
+
for cb in self._status_callbacks.values():
|
|
659
|
+
if cb:
|
|
660
|
+
status_callback = cb
|
|
661
|
+
break
|
|
662
|
+
|
|
663
|
+
if not status_callback:
|
|
664
|
+
return
|
|
665
|
+
|
|
666
|
+
try:
|
|
667
|
+
await status_callback("__deployment__", event, {
|
|
668
|
+
**data,
|
|
669
|
+
"workflow_id": workflow_id,
|
|
670
|
+
"timestamp": datetime.now().isoformat()
|
|
671
|
+
})
|
|
672
|
+
except Exception as e:
|
|
673
|
+
logger.warning("Status callback failed", workflow_id=workflow_id, error=str(e))
|
|
674
|
+
|
|
675
|
+
@staticmethod
|
|
676
|
+
def _get_schedule_description(params: Dict[str, Any]) -> str:
|
|
677
|
+
"""Get human-readable schedule description from parameters."""
|
|
678
|
+
frequency = params.get('frequency', 'minutes')
|
|
679
|
+
|
|
680
|
+
match frequency:
|
|
681
|
+
case 'seconds':
|
|
682
|
+
interval = params.get('interval', 30)
|
|
683
|
+
return f"Every {interval} seconds"
|
|
684
|
+
case 'minutes':
|
|
685
|
+
interval = params.get('intervalMinutes', 5)
|
|
686
|
+
return f"Every {interval} minutes"
|
|
687
|
+
case 'hours':
|
|
688
|
+
interval = params.get('intervalHours', 1)
|
|
689
|
+
return f"Every {interval} hours"
|
|
690
|
+
case 'days':
|
|
691
|
+
time_str = params.get('dailyTime', '09:00')
|
|
692
|
+
return f"Daily at {time_str}"
|
|
693
|
+
case 'weeks':
|
|
694
|
+
weekday = params.get('weekday', '1')
|
|
695
|
+
time_str = params.get('weeklyTime', '09:00')
|
|
696
|
+
days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
|
|
697
|
+
day_name = days[int(weekday)] if str(weekday).isdigit() else weekday
|
|
698
|
+
return f"Weekly on {day_name} at {time_str}"
|
|
699
|
+
case 'months':
|
|
700
|
+
day = params.get('monthDay', '1')
|
|
701
|
+
time_str = params.get('monthlyTime', '09:00')
|
|
702
|
+
return f"Monthly on day {day} at {time_str}"
|
|
703
|
+
case 'once':
|
|
704
|
+
return "Once (no repeat)"
|
|
705
|
+
case _:
|
|
706
|
+
return "Unknown schedule"
|