whale-code 6.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (319) hide show
  1. package/README.md +95 -0
  2. package/bin/swag-agent.js +9 -0
  3. package/bin/swagmanager-mcp.js +321 -0
  4. package/dist/cli/app.d.ts +26 -0
  5. package/dist/cli/app.js +64 -0
  6. package/dist/cli/chat/AgentSelector.d.ts +14 -0
  7. package/dist/cli/chat/AgentSelector.js +14 -0
  8. package/dist/cli/chat/ChatApp.d.ts +9 -0
  9. package/dist/cli/chat/ChatApp.js +267 -0
  10. package/dist/cli/chat/ChatInput.d.ts +39 -0
  11. package/dist/cli/chat/ChatInput.js +509 -0
  12. package/dist/cli/chat/MarkdownText.d.ts +10 -0
  13. package/dist/cli/chat/MarkdownText.js +20 -0
  14. package/dist/cli/chat/MessageList.d.ts +37 -0
  15. package/dist/cli/chat/MessageList.js +80 -0
  16. package/dist/cli/chat/ModelSelector.d.ts +20 -0
  17. package/dist/cli/chat/ModelSelector.js +73 -0
  18. package/dist/cli/chat/RewindViewer.d.ts +26 -0
  19. package/dist/cli/chat/RewindViewer.js +185 -0
  20. package/dist/cli/chat/StoreSelector.d.ts +14 -0
  21. package/dist/cli/chat/StoreSelector.js +24 -0
  22. package/dist/cli/chat/StreamingText.d.ts +12 -0
  23. package/dist/cli/chat/StreamingText.js +12 -0
  24. package/dist/cli/chat/SubagentPanel.d.ts +45 -0
  25. package/dist/cli/chat/SubagentPanel.js +110 -0
  26. package/dist/cli/chat/TeamPanel.d.ts +21 -0
  27. package/dist/cli/chat/TeamPanel.js +42 -0
  28. package/dist/cli/chat/ToolIndicator.d.ts +25 -0
  29. package/dist/cli/chat/ToolIndicator.js +436 -0
  30. package/dist/cli/chat/hooks/useAgentLoop.d.ts +39 -0
  31. package/dist/cli/chat/hooks/useAgentLoop.js +382 -0
  32. package/dist/cli/chat/hooks/useSlashCommands.d.ts +37 -0
  33. package/dist/cli/chat/hooks/useSlashCommands.js +387 -0
  34. package/dist/cli/commands/config-cmd.d.ts +10 -0
  35. package/dist/cli/commands/config-cmd.js +99 -0
  36. package/dist/cli/commands/doctor.d.ts +14 -0
  37. package/dist/cli/commands/doctor.js +172 -0
  38. package/dist/cli/commands/init.d.ts +16 -0
  39. package/dist/cli/commands/init.js +278 -0
  40. package/dist/cli/commands/mcp.d.ts +12 -0
  41. package/dist/cli/commands/mcp.js +162 -0
  42. package/dist/cli/login/LoginApp.d.ts +7 -0
  43. package/dist/cli/login/LoginApp.js +157 -0
  44. package/dist/cli/print-mode.d.ts +31 -0
  45. package/dist/cli/print-mode.js +202 -0
  46. package/dist/cli/serve-mode.d.ts +37 -0
  47. package/dist/cli/serve-mode.js +636 -0
  48. package/dist/cli/services/agent-definitions.d.ts +25 -0
  49. package/dist/cli/services/agent-definitions.js +91 -0
  50. package/dist/cli/services/agent-events.d.ts +178 -0
  51. package/dist/cli/services/agent-events.js +175 -0
  52. package/dist/cli/services/agent-loop.d.ts +90 -0
  53. package/dist/cli/services/agent-loop.js +762 -0
  54. package/dist/cli/services/agent-worker-base.d.ts +97 -0
  55. package/dist/cli/services/agent-worker-base.js +220 -0
  56. package/dist/cli/services/auth-service.d.ts +30 -0
  57. package/dist/cli/services/auth-service.js +160 -0
  58. package/dist/cli/services/background-processes.d.ts +126 -0
  59. package/dist/cli/services/background-processes.js +318 -0
  60. package/dist/cli/services/browser-auth.d.ts +24 -0
  61. package/dist/cli/services/browser-auth.js +180 -0
  62. package/dist/cli/services/claude-md-loader.d.ts +16 -0
  63. package/dist/cli/services/claude-md-loader.js +58 -0
  64. package/dist/cli/services/config-store.d.ts +47 -0
  65. package/dist/cli/services/config-store.js +79 -0
  66. package/dist/cli/services/debug-log.d.ts +10 -0
  67. package/dist/cli/services/debug-log.js +52 -0
  68. package/dist/cli/services/error-logger.d.ts +58 -0
  69. package/dist/cli/services/error-logger.js +269 -0
  70. package/dist/cli/services/file-history.d.ts +21 -0
  71. package/dist/cli/services/file-history.js +83 -0
  72. package/dist/cli/services/format-server-response.d.ts +16 -0
  73. package/dist/cli/services/format-server-response.js +440 -0
  74. package/dist/cli/services/git-context.d.ts +11 -0
  75. package/dist/cli/services/git-context.js +66 -0
  76. package/dist/cli/services/hooks.d.ts +85 -0
  77. package/dist/cli/services/hooks.js +258 -0
  78. package/dist/cli/services/interactive-tools.d.ts +125 -0
  79. package/dist/cli/services/interactive-tools.js +260 -0
  80. package/dist/cli/services/keybinding-manager.d.ts +52 -0
  81. package/dist/cli/services/keybinding-manager.js +115 -0
  82. package/dist/cli/services/local-tools.d.ts +22 -0
  83. package/dist/cli/services/local-tools.js +697 -0
  84. package/dist/cli/services/lsp-manager.d.ts +18 -0
  85. package/dist/cli/services/lsp-manager.js +717 -0
  86. package/dist/cli/services/mcp-client.d.ts +48 -0
  87. package/dist/cli/services/mcp-client.js +157 -0
  88. package/dist/cli/services/memory-manager.d.ts +16 -0
  89. package/dist/cli/services/memory-manager.js +57 -0
  90. package/dist/cli/services/model-manager.d.ts +18 -0
  91. package/dist/cli/services/model-manager.js +71 -0
  92. package/dist/cli/services/model-router.d.ts +26 -0
  93. package/dist/cli/services/model-router.js +149 -0
  94. package/dist/cli/services/permission-modes.d.ts +13 -0
  95. package/dist/cli/services/permission-modes.js +43 -0
  96. package/dist/cli/services/rewind.d.ts +84 -0
  97. package/dist/cli/services/rewind.js +194 -0
  98. package/dist/cli/services/ripgrep.d.ts +28 -0
  99. package/dist/cli/services/ripgrep.js +138 -0
  100. package/dist/cli/services/sandbox.d.ts +29 -0
  101. package/dist/cli/services/sandbox.js +97 -0
  102. package/dist/cli/services/server-tools.d.ts +61 -0
  103. package/dist/cli/services/server-tools.js +543 -0
  104. package/dist/cli/services/session-persistence.d.ts +23 -0
  105. package/dist/cli/services/session-persistence.js +99 -0
  106. package/dist/cli/services/subagent-worker.d.ts +19 -0
  107. package/dist/cli/services/subagent-worker.js +41 -0
  108. package/dist/cli/services/subagent.d.ts +47 -0
  109. package/dist/cli/services/subagent.js +647 -0
  110. package/dist/cli/services/system-prompt.d.ts +7 -0
  111. package/dist/cli/services/system-prompt.js +198 -0
  112. package/dist/cli/services/team-lead.d.ts +73 -0
  113. package/dist/cli/services/team-lead.js +512 -0
  114. package/dist/cli/services/team-state.d.ts +77 -0
  115. package/dist/cli/services/team-state.js +398 -0
  116. package/dist/cli/services/teammate.d.ts +31 -0
  117. package/dist/cli/services/teammate.js +689 -0
  118. package/dist/cli/services/telemetry.d.ts +61 -0
  119. package/dist/cli/services/telemetry.js +209 -0
  120. package/dist/cli/services/tools/agent-tools.d.ts +14 -0
  121. package/dist/cli/services/tools/agent-tools.js +347 -0
  122. package/dist/cli/services/tools/file-ops.d.ts +15 -0
  123. package/dist/cli/services/tools/file-ops.js +487 -0
  124. package/dist/cli/services/tools/search-tools.d.ts +8 -0
  125. package/dist/cli/services/tools/search-tools.js +186 -0
  126. package/dist/cli/services/tools/shell-exec.d.ts +10 -0
  127. package/dist/cli/services/tools/shell-exec.js +168 -0
  128. package/dist/cli/services/tools/task-manager.d.ts +28 -0
  129. package/dist/cli/services/tools/task-manager.js +209 -0
  130. package/dist/cli/services/tools/web-tools.d.ts +11 -0
  131. package/dist/cli/services/tools/web-tools.js +395 -0
  132. package/dist/cli/setup/SetupApp.d.ts +9 -0
  133. package/dist/cli/setup/SetupApp.js +191 -0
  134. package/dist/cli/shared/MatrixIntro.d.ts +4 -0
  135. package/dist/cli/shared/MatrixIntro.js +83 -0
  136. package/dist/cli/shared/Theme.d.ts +74 -0
  137. package/dist/cli/shared/Theme.js +127 -0
  138. package/dist/cli/shared/WhaleBanner.d.ts +10 -0
  139. package/dist/cli/shared/WhaleBanner.js +12 -0
  140. package/dist/cli/shared/markdown.d.ts +21 -0
  141. package/dist/cli/shared/markdown.js +756 -0
  142. package/dist/cli/status/StatusApp.d.ts +4 -0
  143. package/dist/cli/status/StatusApp.js +105 -0
  144. package/dist/cli/stores/StoreApp.d.ts +7 -0
  145. package/dist/cli/stores/StoreApp.js +81 -0
  146. package/dist/index.d.ts +15 -0
  147. package/dist/index.js +538 -0
  148. package/dist/local-agent/connection.d.ts +48 -0
  149. package/dist/local-agent/connection.js +332 -0
  150. package/dist/local-agent/discovery.d.ts +18 -0
  151. package/dist/local-agent/discovery.js +146 -0
  152. package/dist/local-agent/executor.d.ts +34 -0
  153. package/dist/local-agent/executor.js +241 -0
  154. package/dist/local-agent/index.d.ts +14 -0
  155. package/dist/local-agent/index.js +198 -0
  156. package/dist/node/adapters/base.d.ts +35 -0
  157. package/dist/node/adapters/base.js +10 -0
  158. package/dist/node/adapters/discord.d.ts +29 -0
  159. package/dist/node/adapters/discord.js +299 -0
  160. package/dist/node/adapters/email.d.ts +23 -0
  161. package/dist/node/adapters/email.js +218 -0
  162. package/dist/node/adapters/imessage.d.ts +17 -0
  163. package/dist/node/adapters/imessage.js +118 -0
  164. package/dist/node/adapters/slack.d.ts +26 -0
  165. package/dist/node/adapters/slack.js +259 -0
  166. package/dist/node/adapters/sms.d.ts +23 -0
  167. package/dist/node/adapters/sms.js +161 -0
  168. package/dist/node/adapters/telegram.d.ts +17 -0
  169. package/dist/node/adapters/telegram.js +101 -0
  170. package/dist/node/adapters/webchat.d.ts +27 -0
  171. package/dist/node/adapters/webchat.js +160 -0
  172. package/dist/node/adapters/whatsapp.d.ts +28 -0
  173. package/dist/node/adapters/whatsapp.js +230 -0
  174. package/dist/node/cli.d.ts +2 -0
  175. package/dist/node/cli.js +325 -0
  176. package/dist/node/config.d.ts +17 -0
  177. package/dist/node/config.js +31 -0
  178. package/dist/node/runtime.d.ts +50 -0
  179. package/dist/node/runtime.js +351 -0
  180. package/dist/server/handlers/__test-utils__/mock-supabase.d.ts +11 -0
  181. package/dist/server/handlers/__test-utils__/mock-supabase.js +393 -0
  182. package/dist/server/handlers/analytics.d.ts +17 -0
  183. package/dist/server/handlers/analytics.js +266 -0
  184. package/dist/server/handlers/api-keys.d.ts +6 -0
  185. package/dist/server/handlers/api-keys.js +221 -0
  186. package/dist/server/handlers/billing.d.ts +33 -0
  187. package/dist/server/handlers/billing.js +272 -0
  188. package/dist/server/handlers/browser.d.ts +10 -0
  189. package/dist/server/handlers/browser.js +517 -0
  190. package/dist/server/handlers/catalog.d.ts +99 -0
  191. package/dist/server/handlers/catalog.js +976 -0
  192. package/dist/server/handlers/comms.d.ts +254 -0
  193. package/dist/server/handlers/comms.js +588 -0
  194. package/dist/server/handlers/creations.d.ts +6 -0
  195. package/dist/server/handlers/creations.js +479 -0
  196. package/dist/server/handlers/crm.d.ts +89 -0
  197. package/dist/server/handlers/crm.js +538 -0
  198. package/dist/server/handlers/discovery.d.ts +6 -0
  199. package/dist/server/handlers/discovery.js +288 -0
  200. package/dist/server/handlers/embeddings.d.ts +92 -0
  201. package/dist/server/handlers/embeddings.js +197 -0
  202. package/dist/server/handlers/enrichment.d.ts +8 -0
  203. package/dist/server/handlers/enrichment.js +768 -0
  204. package/dist/server/handlers/image-gen.d.ts +6 -0
  205. package/dist/server/handlers/image-gen.js +409 -0
  206. package/dist/server/handlers/inventory.d.ts +319 -0
  207. package/dist/server/handlers/inventory.js +447 -0
  208. package/dist/server/handlers/kali.d.ts +10 -0
  209. package/dist/server/handlers/kali.js +210 -0
  210. package/dist/server/handlers/llm-providers.d.ts +6 -0
  211. package/dist/server/handlers/llm-providers.js +673 -0
  212. package/dist/server/handlers/local-agent.d.ts +6 -0
  213. package/dist/server/handlers/local-agent.js +118 -0
  214. package/dist/server/handlers/meta-ads.d.ts +111 -0
  215. package/dist/server/handlers/meta-ads.js +2279 -0
  216. package/dist/server/handlers/nodes.d.ts +33 -0
  217. package/dist/server/handlers/nodes.js +699 -0
  218. package/dist/server/handlers/operations.d.ts +138 -0
  219. package/dist/server/handlers/operations.js +131 -0
  220. package/dist/server/handlers/platform.d.ts +23 -0
  221. package/dist/server/handlers/platform.js +227 -0
  222. package/dist/server/handlers/supply-chain.d.ts +19 -0
  223. package/dist/server/handlers/supply-chain.js +327 -0
  224. package/dist/server/handlers/transcription.d.ts +17 -0
  225. package/dist/server/handlers/transcription.js +121 -0
  226. package/dist/server/handlers/video-gen.d.ts +6 -0
  227. package/dist/server/handlers/video-gen.js +466 -0
  228. package/dist/server/handlers/voice.d.ts +8 -0
  229. package/dist/server/handlers/voice.js +1146 -0
  230. package/dist/server/handlers/workflow-steps.d.ts +86 -0
  231. package/dist/server/handlers/workflow-steps.js +2349 -0
  232. package/dist/server/handlers/workflows.d.ts +7 -0
  233. package/dist/server/handlers/workflows.js +989 -0
  234. package/dist/server/index.d.ts +1 -0
  235. package/dist/server/index.js +2427 -0
  236. package/dist/server/lib/batch-client.d.ts +80 -0
  237. package/dist/server/lib/batch-client.js +467 -0
  238. package/dist/server/lib/code-worker-pool.d.ts +31 -0
  239. package/dist/server/lib/code-worker-pool.js +224 -0
  240. package/dist/server/lib/code-worker.d.ts +1 -0
  241. package/dist/server/lib/code-worker.js +188 -0
  242. package/dist/server/lib/compaction-service.d.ts +32 -0
  243. package/dist/server/lib/compaction-service.js +162 -0
  244. package/dist/server/lib/logger.d.ts +19 -0
  245. package/dist/server/lib/logger.js +46 -0
  246. package/dist/server/lib/otel.d.ts +38 -0
  247. package/dist/server/lib/otel.js +126 -0
  248. package/dist/server/lib/pg-rate-limiter.d.ts +21 -0
  249. package/dist/server/lib/pg-rate-limiter.js +86 -0
  250. package/dist/server/lib/prompt-sanitizer.d.ts +37 -0
  251. package/dist/server/lib/prompt-sanitizer.js +177 -0
  252. package/dist/server/lib/provider-capabilities.d.ts +85 -0
  253. package/dist/server/lib/provider-capabilities.js +190 -0
  254. package/dist/server/lib/provider-failover.d.ts +74 -0
  255. package/dist/server/lib/provider-failover.js +210 -0
  256. package/dist/server/lib/rate-limiter.d.ts +39 -0
  257. package/dist/server/lib/rate-limiter.js +147 -0
  258. package/dist/server/lib/server-agent-loop.d.ts +107 -0
  259. package/dist/server/lib/server-agent-loop.js +667 -0
  260. package/dist/server/lib/server-subagent.d.ts +78 -0
  261. package/dist/server/lib/server-subagent.js +203 -0
  262. package/dist/server/lib/session-checkpoint.d.ts +51 -0
  263. package/dist/server/lib/session-checkpoint.js +145 -0
  264. package/dist/server/lib/ssrf-guard.d.ts +13 -0
  265. package/dist/server/lib/ssrf-guard.js +240 -0
  266. package/dist/server/lib/supabase-client.d.ts +7 -0
  267. package/dist/server/lib/supabase-client.js +78 -0
  268. package/dist/server/lib/template-resolver.d.ts +31 -0
  269. package/dist/server/lib/template-resolver.js +215 -0
  270. package/dist/server/lib/utils.d.ts +16 -0
  271. package/dist/server/lib/utils.js +147 -0
  272. package/dist/server/local-agent-gateway.d.ts +82 -0
  273. package/dist/server/local-agent-gateway.js +426 -0
  274. package/dist/server/providers/anthropic.d.ts +20 -0
  275. package/dist/server/providers/anthropic.js +199 -0
  276. package/dist/server/providers/bedrock.d.ts +20 -0
  277. package/dist/server/providers/bedrock.js +194 -0
  278. package/dist/server/providers/gemini.d.ts +24 -0
  279. package/dist/server/providers/gemini.js +486 -0
  280. package/dist/server/providers/openai.d.ts +24 -0
  281. package/dist/server/providers/openai.js +522 -0
  282. package/dist/server/providers/registry.d.ts +32 -0
  283. package/dist/server/providers/registry.js +58 -0
  284. package/dist/server/providers/shared.d.ts +32 -0
  285. package/dist/server/providers/shared.js +124 -0
  286. package/dist/server/providers/types.d.ts +92 -0
  287. package/dist/server/providers/types.js +12 -0
  288. package/dist/server/proxy-handlers.d.ts +6 -0
  289. package/dist/server/proxy-handlers.js +89 -0
  290. package/dist/server/tool-router.d.ts +149 -0
  291. package/dist/server/tool-router.js +803 -0
  292. package/dist/server/validation.d.ts +24 -0
  293. package/dist/server/validation.js +301 -0
  294. package/dist/server/worker.d.ts +19 -0
  295. package/dist/server/worker.js +201 -0
  296. package/dist/setup.d.ts +8 -0
  297. package/dist/setup.js +181 -0
  298. package/dist/shared/agent-core.d.ts +157 -0
  299. package/dist/shared/agent-core.js +534 -0
  300. package/dist/shared/anthropic-types.d.ts +105 -0
  301. package/dist/shared/anthropic-types.js +7 -0
  302. package/dist/shared/api-client.d.ts +90 -0
  303. package/dist/shared/api-client.js +379 -0
  304. package/dist/shared/constants.d.ts +33 -0
  305. package/dist/shared/constants.js +80 -0
  306. package/dist/shared/sse-parser.d.ts +26 -0
  307. package/dist/shared/sse-parser.js +259 -0
  308. package/dist/shared/tool-dispatch.d.ts +52 -0
  309. package/dist/shared/tool-dispatch.js +191 -0
  310. package/dist/shared/types.d.ts +72 -0
  311. package/dist/shared/types.js +7 -0
  312. package/dist/updater.d.ts +25 -0
  313. package/dist/updater.js +140 -0
  314. package/dist/webchat/widget.d.ts +0 -0
  315. package/dist/webchat/widget.js +397 -0
  316. package/package.json +95 -0
  317. package/src/cli/services/builtin-skills/commit.md +19 -0
  318. package/src/cli/services/builtin-skills/review-pr.md +21 -0
  319. package/src/cli/services/builtin-skills/review.md +18 -0
@@ -0,0 +1,989 @@
1
+ // server/handlers/workflows.ts — Workflow CRUD & management
2
+ //
3
+ // This file is the entry point for the workflow engine. It re-exports the step
4
+ // execution engine from ./workflow-steps.ts and contains the handleWorkflows()
5
+ // CRUD handler for the MCP tool interface.
6
+ //
7
+ // Step execution, inline chains, cron/schedule processing, event triggers,
8
+ // webhook ingestion, circuit breakers, and all step-type executors live in
9
+ // ./workflow-steps.ts.
10
+ import { randomUUID } from "node:crypto";
11
+ // Re-export everything from workflow-steps so existing imports from workflows.ts still work
12
+ export {
13
+ // Injected executor setters
14
+ setToolExecutor, setAgentExecutor, setTokenBroadcaster, setStepErrorBroadcaster,
15
+ // Core engine
16
+ processWorkflowSteps, processWaitingSteps, executeAndAdvance, executeInlineChain,
17
+ // Guest approval
18
+ generateGuestApprovalUrl, verifyGuestApprovalSignature,
19
+ // Schedule / timeout / events
20
+ processScheduleTriggers, enforceWorkflowTimeouts, processEventTriggers,
21
+ // Resilience
22
+ cleanupOrphanedSteps, processDlqRetries,
23
+ // Webhook ingestion
24
+ handleWebhookIngestion,
25
+ // Worker pool management
26
+ initWorkerPool, getPoolStats, shutdownPool,
27
+ // Cron parser (used by CRUD)
28
+ getNextCronTime,
29
+ // Event journal (used by CRUD replay)
30
+ logWorkflowEvent,
31
+ // Run completion (used by CRUD start/cancel)
32
+ completeWorkflowRun, } from "./workflow-steps.js";
33
+ import { executeInlineChain, getNextCronTime, completeWorkflowRun, logWorkflowEvent, } from "./workflow-steps.js";
34
+ // ============================================================================
35
+ // CRUD HANDLER — MCP tool interface
36
+ // ============================================================================
37
+ export async function handleWorkflows(supabase, args, storeId) {
38
+ const action = args.action;
39
+ const sid = storeId;
40
+ switch (action) {
41
+ case "list": {
42
+ let q = supabase.from("workflows")
43
+ .select("id, name, description, icon, status, is_active, trigger_type, max_concurrent_runs, version, last_run_at, created_at, circuit_breaker_state")
44
+ .eq("store_id", sid).order("created_at", { ascending: false });
45
+ if (args.status)
46
+ q = q.eq("status", args.status);
47
+ if (args.trigger_type)
48
+ q = q.eq("trigger_type", args.trigger_type);
49
+ const { data, error } = await q.limit(args.limit || 50);
50
+ return error ? { success: false, error: error.message } : { success: true, data };
51
+ }
52
+ case "get": {
53
+ const { data: wf, error } = await supabase.from("workflows")
54
+ .select("*, workflow_steps(*)").eq("id", args.workflow_id).eq("store_id", sid).single();
55
+ if (error)
56
+ return { success: false, error: error.message };
57
+ const { data: runs } = await supabase.from("workflow_runs")
58
+ .select("id, status, trigger_type, started_at, completed_at, duration_ms, error_message, error_step_key")
59
+ .eq("workflow_id", args.workflow_id).order("created_at", { ascending: false }).limit(10);
60
+ return { success: true, data: { ...wf, recent_runs: runs || [] } };
61
+ }
62
+ case "create": {
63
+ // Compute next_run_at from cron expression — check both top-level and trigger_config
64
+ const tc = args.trigger_config;
65
+ const cronExpr = args.cron_expression
66
+ || tc?.cron || tc?.cron_expression || null;
67
+ let nextRunAt = null;
68
+ if (cronExpr) {
69
+ const next = getNextCronTime(cronExpr);
70
+ if (!next)
71
+ return { success: false, error: `Invalid cron expression: ${cronExpr}` };
72
+ nextRunAt = next.toISOString();
73
+ }
74
+ // Auto-extract timezone from trigger_config if not top-level
75
+ const tz = args.timezone || tc?.timezone || "UTC";
76
+ const { data, error } = await supabase.from("workflows").insert({
77
+ store_id: sid,
78
+ name: args.name,
79
+ description: args.description || null,
80
+ icon: args.icon || null,
81
+ status: args.status || "draft",
82
+ is_active: args.status === "active",
83
+ trigger_type: args.trigger_type || (cronExpr ? "schedule" : "manual"),
84
+ trigger_config: args.trigger_config || {},
85
+ max_concurrent_runs: args.max_concurrent_runs || 1,
86
+ max_run_duration_seconds: args.max_run_duration_seconds || 3600,
87
+ max_steps_per_run: args.max_steps_per_run || 50,
88
+ max_retries_per_step: args.max_retries_per_step || 3,
89
+ on_error_webhook_url: args.on_error_webhook_url || null,
90
+ on_error_email: args.on_error_email || null,
91
+ cron_expression: cronExpr,
92
+ next_run_at: nextRunAt,
93
+ timezone: tz,
94
+ multitask_strategy: args.multitask_strategy || "allow",
95
+ }).select().single();
96
+ if (error)
97
+ return { success: false, error: error.message };
98
+ if (Array.isArray(args.steps)) {
99
+ const steps = args.steps.map((s, i) => ({
100
+ workflow_id: data.id,
101
+ step_key: s.step_key,
102
+ step_type: s.step_type,
103
+ is_entry_point: s.is_entry_point ?? (i === 0),
104
+ on_success: s.on_success || null,
105
+ on_failure: s.on_failure || null,
106
+ step_config: s.step_config || {},
107
+ max_retries: s.max_retries || 3,
108
+ retry_delay_seconds: s.retry_delay_seconds || 10,
109
+ timeout_seconds: s.timeout_seconds || 60,
110
+ input_schema: s.input_schema || null,
111
+ position_x: s.position_x || 0,
112
+ position_y: s.position_y || i * 100,
113
+ }));
114
+ const { error: stepsErr } = await supabase.from("workflow_steps").insert(steps);
115
+ if (stepsErr)
116
+ return { success: false, error: `Workflow created but steps failed: ${stepsErr.message}` };
117
+ }
118
+ return { success: true, data };
119
+ }
120
+ case "update": {
121
+ const updates = {};
122
+ const allowed = ["name", "description", "icon", "status", "trigger_type", "trigger_config",
123
+ "max_concurrent_runs", "max_run_duration_seconds", "max_steps_per_run", "max_retries_per_step",
124
+ "on_error_webhook_url", "on_error_email", "multitask_strategy", "timezone"];
125
+ for (const k of allowed) {
126
+ if (args[k] !== undefined)
127
+ updates[k] = args[k];
128
+ }
129
+ if (args.status !== undefined)
130
+ updates.is_active = args.status === "active";
131
+ // Handle cron_expression update — check both top-level and trigger_config
132
+ const utc = args.trigger_config;
133
+ const cronFromConfig = utc?.cron || utc?.cron_expression || null;
134
+ const cronExplicit = args.cron_expression !== undefined ? args.cron_expression : null;
135
+ const cronExpr = cronExplicit || cronFromConfig;
136
+ if (cronExpr !== null && (args.cron_expression !== undefined || cronFromConfig)) {
137
+ updates.cron_expression = cronExpr;
138
+ const next = getNextCronTime(cronExpr);
139
+ if (!next)
140
+ return { success: false, error: `Invalid cron expression: ${cronExpr}` };
141
+ updates.next_run_at = next.toISOString();
142
+ // Auto-set trigger_type to schedule if cron is provided
143
+ if (!updates.trigger_type)
144
+ updates.trigger_type = "schedule";
145
+ // Extract timezone from trigger_config if not top-level
146
+ if (!updates.timezone && utc?.timezone)
147
+ updates.timezone = utc.timezone;
148
+ }
149
+ else if (args.cron_expression === null) {
150
+ // Explicitly clearing cron
151
+ updates.cron_expression = null;
152
+ updates.next_run_at = null;
153
+ }
154
+ const { data, error } = await supabase.from("workflows")
155
+ .update(updates).eq("id", args.workflow_id).eq("store_id", sid).select().maybeSingle();
156
+ if (error)
157
+ return { success: false, error: error.message };
158
+ if (!data)
159
+ return { success: false, error: "Workflow not found or store mismatch" };
160
+ return { success: true, data };
161
+ }
162
+ case "delete": {
163
+ const { error } = await supabase.from("workflows").delete()
164
+ .eq("id", args.workflow_id).eq("store_id", sid);
165
+ return error ? { success: false, error: error.message } : { success: true, data: { deleted: true } };
166
+ }
167
+ case "add_step": {
168
+ // H9 FIX: Verify workflow belongs to this store before adding step
169
+ const { data: wfCheck } = await supabase.from("workflows")
170
+ .select("id").eq("id", args.workflow_id).eq("store_id", sid).single();
171
+ if (!wfCheck)
172
+ return { success: false, error: "Workflow not found in this store" };
173
+ const { data, error } = await supabase.from("workflow_steps").insert({
174
+ workflow_id: args.workflow_id,
175
+ step_key: args.step_key, step_type: args.step_type,
176
+ is_entry_point: args.is_entry_point ?? false,
177
+ on_success: args.on_success || null, on_failure: args.on_failure || null,
178
+ step_config: args.step_config || {},
179
+ max_retries: args.max_retries || 3,
180
+ timeout_seconds: args.timeout_seconds || 60,
181
+ input_schema: args.input_schema || null,
182
+ }).select().single();
183
+ return error ? { success: false, error: error.message } : { success: true, data };
184
+ }
185
+ case "update_step": {
186
+ // H9 FIX: Verify step belongs to a workflow owned by this store
187
+ const { data: stepCheck } = await supabase.from("workflow_steps")
188
+ .select("id, workflow_id, workflows!inner(store_id)")
189
+ .eq("id", args.step_id).single();
190
+ if (!stepCheck || stepCheck.workflows?.store_id !== sid) {
191
+ return { success: false, error: "Step not found in this store's workflows" };
192
+ }
193
+ const su = {};
194
+ for (const k of ["step_key", "step_type", "is_entry_point", "on_success", "on_failure",
195
+ "step_config", "max_retries", "retry_delay_seconds", "timeout_seconds", "input_schema",
196
+ "position_x", "position_y"]) {
197
+ if (args[k] !== undefined) {
198
+ // Treat empty string as null for nullable fields (on_success, on_failure)
199
+ su[k] = (args[k] === "" && (k === "on_success" || k === "on_failure")) ? null : args[k];
200
+ }
201
+ }
202
+ const { data, error } = await supabase.from("workflow_steps")
203
+ .update(su).eq("id", args.step_id).select().single();
204
+ return error ? { success: false, error: error.message } : { success: true, data };
205
+ }
206
+ case "delete_step": {
207
+ // H9 FIX: Verify step belongs to a workflow owned by this store
208
+ const { data: stepCheck } = await supabase.from("workflow_steps")
209
+ .select("id, workflow_id, workflows!inner(store_id)")
210
+ .eq("id", args.step_id).single();
211
+ if (!stepCheck || stepCheck.workflows?.store_id !== sid) {
212
+ return { success: false, error: "Step not found in this store's workflows" };
213
+ }
214
+ const { error } = await supabase.from("workflow_steps").delete().eq("id", args.step_id);
215
+ return error ? { success: false, error: error.message } : { success: true, data: { deleted: true } };
216
+ }
217
+ case "start": {
218
+ const wfId = args.workflow_id;
219
+ // FIX 5: Validate idempotency key format if provided
220
+ if (args.idempotency_key) {
221
+ const key = String(args.idempotency_key);
222
+ if (key.length > 255 || !/^[a-zA-Z0-9._:\/-]+$/.test(key)) {
223
+ return { success: false, error: "Invalid idempotency_key: must be 1-255 alphanumeric characters (with . _ : / -)" };
224
+ }
225
+ }
226
+ // FIX 6: Reject oversized trigger payloads before storing
227
+ if (args.trigger_payload) {
228
+ const payloadStr = JSON.stringify(args.trigger_payload);
229
+ if (payloadStr.length > 10_000_000) {
230
+ return { success: false, error: "Trigger payload too large (max 10MB)" };
231
+ }
232
+ }
233
+ // Auto-activate workflow if needed (start implies intent to run)
234
+ await supabase.from("workflows").update({ is_active: true, status: "active" })
235
+ .eq("id", wfId).eq("store_id", sid).in("status", ["draft", "paused"]);
236
+ // Load workflow config for strategy + concurrency + versioning
237
+ const { data: wfConfig } = await supabase.from("workflows")
238
+ .select("multitask_strategy, published_version_id, max_concurrent_runs").eq("id", wfId).eq("store_id", sid).single();
239
+ if (!wfConfig)
240
+ return { success: false, error: "Workflow not found or access denied" };
241
+ const strategy = wfConfig.multitask_strategy || "allow";
242
+ // Concurrency is enforced atomically inside start_workflow_run RPC (FOR UPDATE lock)
243
+ // No app-side check needed — the RPC handles the race-free count + insert
244
+ if (strategy !== "allow") {
245
+ // Check for in-flight runs
246
+ const { data: activeRuns } = await supabase.from("workflow_runs")
247
+ .select("id, status, created_at")
248
+ .eq("workflow_id", wfId).eq("store_id", sid)
249
+ .in("status", ["running", "pending"])
250
+ .order("created_at", { ascending: false })
251
+ .limit(10);
252
+ if (activeRuns?.length) {
253
+ switch (strategy) {
254
+ case "reject":
255
+ return { success: false, error: `Workflow already has ${activeRuns.length} active run(s). Strategy: reject concurrent runs.`, data: { strategy: "reject", active_runs: activeRuns.length } };
256
+ case "enqueue":
257
+ // Let it through — the run will be created and the worker picks it up in order
258
+ // Set priority lower so existing runs finish first
259
+ break;
260
+ case "interrupt":
261
+ // Cancel all active runs before starting new one
262
+ for (const run of activeRuns) {
263
+ await completeWorkflowRun(supabase, run.id, wfId, sid, "cancelled", "Interrupted by new run (multitask_strategy: interrupt)");
264
+ }
265
+ break;
266
+ case "replace":
267
+ // Cancel all active runs AND delete their step runs
268
+ for (const run of activeRuns) {
269
+ await completeWorkflowRun(supabase, run.id, wfId, sid, "cancelled", "Replaced by new run (multitask_strategy: replace)");
270
+ }
271
+ break;
272
+ }
273
+ }
274
+ }
275
+ const { data, error } = await supabase.rpc("start_workflow_run", {
276
+ p_workflow_id: wfId, p_store_id: sid,
277
+ p_trigger_type: args.trigger_type || "manual",
278
+ p_trigger_payload: args.trigger_payload || {},
279
+ p_idempotency_key: args.idempotency_key || null,
280
+ });
281
+ if (error)
282
+ return { success: false, error: error.message };
283
+ if (!data?.success)
284
+ return { success: false, error: data?.error || "Failed" };
285
+ // Generate trace_id for distributed tracing
286
+ const traceId = data.run_id ? randomUUID() : undefined;
287
+ // Set version_id, trace_id, priority
288
+ if (data.run_id && !data.deduplicated) {
289
+ const runUpdates = { trace_id: traceId };
290
+ if (wfConfig?.published_version_id)
291
+ runUpdates.version_id = wfConfig.published_version_id;
292
+ if (strategy === "enqueue")
293
+ runUpdates.priority = 3;
294
+ await supabase.from("workflow_runs").update(runUpdates).eq("id", data.run_id);
295
+ // Phase 1: Inline execution — execute first step immediately
296
+ try {
297
+ await executeInlineChain(supabase, data.run_id);
298
+ }
299
+ catch (err) {
300
+ console.error("[workflow-inline] Error in inline chain:", err.message);
301
+ // Non-fatal — worker will pick up remaining steps
302
+ }
303
+ }
304
+ return { success: true, data: { ...data, strategy, trace_id: traceId } };
305
+ }
306
+ case "pause": {
307
+ const { error } = await supabase.from("workflow_runs").update({ status: "paused" })
308
+ .eq("id", args.run_id).eq("store_id", sid).eq("status", "running");
309
+ return error ? { success: false, error: error.message } : { success: true, data: { paused: true } };
310
+ }
311
+ case "resume": {
312
+ const { error } = await supabase.from("workflow_runs").update({ status: "running" })
313
+ .eq("id", args.run_id).eq("store_id", sid).eq("status", "paused");
314
+ return error ? { success: false, error: error.message } : { success: true, data: { resumed: true } };
315
+ }
316
+ case "cancel": {
317
+ const { data: run } = await supabase.from("workflow_runs")
318
+ .select("workflow_id, store_id").eq("id", args.run_id).eq("store_id", sid).single();
319
+ if (!run)
320
+ return { success: false, error: "Run not found or access denied" };
321
+ await completeWorkflowRun(supabase, args.run_id, run.workflow_id, run.store_id, "cancelled", "Cancelled by user");
322
+ return { success: true, data: { cancelled: true } };
323
+ }
324
+ case "reset_circuit_breaker": {
325
+ const { data, error } = await supabase.from("workflows")
326
+ .update({ circuit_breaker_state: "closed", circuit_breaker_failures: 0 })
327
+ .eq("id", args.workflow_id).eq("store_id", sid).select().single();
328
+ if (error)
329
+ return { success: false, error: error.message };
330
+ if (!data)
331
+ return { success: false, error: "Workflow not found" };
332
+ return { success: true, data: { reset: true, workflow_id: data.id } };
333
+ }
334
+ case "runs": {
335
+ let q = supabase.from("workflow_runs")
336
+ .select("id, workflow_id, status, trigger_type, trigger_payload, current_step_key, error_message, error_step_key, started_at, completed_at, duration_ms, created_at")
337
+ .eq("store_id", sid).order("created_at", { ascending: false });
338
+ if (args.workflow_id)
339
+ q = q.eq("workflow_id", args.workflow_id);
340
+ if (args.status)
341
+ q = q.eq("status", args.status);
342
+ const { data, error } = await q.limit(args.limit || 25);
343
+ return error ? { success: false, error: error.message } : { success: true, data };
344
+ }
345
+ case "step_runs": {
346
+ // IDOR FIX: Verify the run belongs to this store before querying step_runs
347
+ const { data: runCheck } = await supabase.from("workflow_runs")
348
+ .select("id").eq("id", args.run_id).eq("store_id", sid).single();
349
+ if (!runCheck)
350
+ return { success: false, error: "Run not found or access denied" };
351
+ const { data, error } = await supabase.from("workflow_step_runs")
352
+ .select("id, step_key, step_type, status, input, output, error_message, attempt_count, started_at, completed_at, duration_ms, parent_step_run_id, child_run_id")
353
+ .eq("run_id", args.run_id).order("created_at", { ascending: true });
354
+ return error ? { success: false, error: error.message } : { success: true, data };
355
+ }
356
+ case "analytics": {
357
+ const { data, error } = await supabase.rpc("get_workflow_analytics", {
358
+ p_store_id: sid, p_days: args.days || 30,
359
+ });
360
+ return error ? { success: false, error: error.message } : { success: true, data };
361
+ }
362
+ case "create_webhook": {
363
+ // P1 FIX: Ensure slug is globally unique to prevent cross-store webhook interception.
364
+ // handleWebhookIngestion queries by slug without store_id, so duplicate slugs across
365
+ // stores would cause one store to intercept another's webhooks.
366
+ const { data: existingSlug } = await supabase
367
+ .from("webhook_endpoints")
368
+ .select("id")
369
+ .eq("slug", args.slug)
370
+ .eq("is_active", true)
371
+ .limit(1);
372
+ if (existingSlug?.length) {
373
+ return { success: false, error: "Webhook slug already in use. Choose a different slug." };
374
+ }
375
+ const { data, error } = await supabase.from("webhook_endpoints").insert({
376
+ store_id: sid, name: args.name, description: args.description || null,
377
+ slug: args.slug, workflow_id: args.workflow_id,
378
+ verify_signature: args.verify_signature ?? true,
379
+ max_requests_per_minute: args.max_requests_per_minute || 60,
380
+ payload_transform: args.payload_transform || null,
381
+ sync_response: args.sync_response ?? false,
382
+ sync_timeout_seconds: args.sync_timeout_seconds || 30,
383
+ }).select().single();
384
+ if (error)
385
+ return { success: false, error: error.message };
386
+ return { success: true, data: { ...data, webhook_url: `https://whale-agent.fly.dev/webhooks/${data.slug}` } };
387
+ }
388
+ case "list_webhooks": {
389
+ const { data, error } = await supabase.from("webhook_endpoints")
390
+ .select("id, name, description, slug, workflow_id, is_active, verify_signature, sync_response, last_received_at, total_received, created_at")
391
+ .eq("store_id", sid).order("created_at", { ascending: false });
392
+ return error ? { success: false, error: error.message } : { success: true, data };
393
+ }
394
+ case "delete_webhook": {
395
+ const { error } = await supabase.from("webhook_endpoints").delete()
396
+ .eq("id", args.webhook_id).eq("store_id", sid);
397
+ return error ? { success: false, error: error.message } : { success: true, data: { deleted: true } };
398
+ }
399
+ // ================================================================
400
+ // PHASE 2: Approval actions
401
+ // ================================================================
402
+ case "list_approvals": {
403
+ let q = supabase.from("workflow_approval_requests")
404
+ .select("id, run_id, step_run_id, workflow_id, title, description, prompt, options, form_schema, assigned_to, assigned_role, status, response_data, responded_by, responded_at, expires_at, timeout_action, created_at")
405
+ .eq("store_id", sid).order("created_at", { ascending: false });
406
+ if (args.status)
407
+ q = q.eq("status", args.status);
408
+ if (args.workflow_id)
409
+ q = q.eq("workflow_id", args.workflow_id);
410
+ if (args.run_id)
411
+ q = q.eq("run_id", args.run_id);
412
+ const { data, error } = await q.limit(args.limit || 25);
413
+ return error ? { success: false, error: error.message } : { success: true, data };
414
+ }
415
+ case "respond_approval": {
416
+ if (!args.approval_id)
417
+ return { success: false, error: "approval_id required" };
418
+ if (!args.response_status)
419
+ return { success: false, error: "response_status required (approved/rejected)" };
420
+ // FIX 3: Check expiration before calling the RPC (defense in depth)
421
+ const { data: approval, error: approvalErr } = await supabase.from("workflow_approval_requests")
422
+ .select("id, expires_at, status")
423
+ .eq("id", args.approval_id).eq("store_id", sid).single();
424
+ if (approvalErr || !approval)
425
+ return { success: false, error: "Approval not found" };
426
+ if (approval.status !== "pending")
427
+ return { success: false, error: `Approval already ${approval.status}` };
428
+ if (approval.expires_at && new Date(approval.expires_at) < new Date()) {
429
+ return { success: false, error: "Approval has expired" };
430
+ }
431
+ const { data, error } = await supabase.rpc("respond_to_approval", {
432
+ p_approval_id: args.approval_id,
433
+ p_store_id: sid,
434
+ p_response: args.response_status,
435
+ p_response_data: args.response_data || {},
436
+ p_responded_by: args.responded_by || null,
437
+ });
438
+ if (error)
439
+ return { success: false, error: error.message };
440
+ return data?.success ? { success: true, data } : { success: false, error: data?.error || "Failed" };
441
+ }
442
+ // ================================================================
443
+ // PHASE 4: Versioning actions
444
+ // ================================================================
445
+ case "publish": {
446
+ if (!args.workflow_id)
447
+ return { success: false, error: "workflow_id required" };
448
+ const { data, error } = await supabase.rpc("publish_workflow_version", {
449
+ p_workflow_id: args.workflow_id,
450
+ p_store_id: sid,
451
+ p_changelog: args.changelog || null,
452
+ p_published_by: args.published_by || null,
453
+ });
454
+ if (error)
455
+ return { success: false, error: error.message };
456
+ if (!data?.success)
457
+ return { success: false, error: data?.error || "Failed" };
458
+ // Auto-activate on publish — publishing implies ready to run
459
+ await supabase.from("workflows").update({ is_active: true, status: "active" })
460
+ .eq("id", args.workflow_id).eq("store_id", sid);
461
+ return { success: true, data };
462
+ }
463
+ case "versions": {
464
+ if (!args.workflow_id)
465
+ return { success: false, error: "workflow_id required" };
466
+ // IDOR FIX: Verify the workflow belongs to this store
467
+ const { data: versionsWfCheck } = await supabase.from("workflows")
468
+ .select("id").eq("id", args.workflow_id).eq("store_id", sid).single();
469
+ if (!versionsWfCheck)
470
+ return { success: false, error: "Workflow not found or access denied" };
471
+ const { data, error } = await supabase.from("workflow_versions")
472
+ .select("id, version, name, description, trigger_type, published_by, published_at, changelog")
473
+ .eq("workflow_id", args.workflow_id)
474
+ .order("version", { ascending: false })
475
+ .limit(args.limit || 25);
476
+ return error ? { success: false, error: error.message } : { success: true, data };
477
+ }
478
+ case "version_detail": {
479
+ if (!args.version_id)
480
+ return { success: false, error: "version_id required" };
481
+ // Load full version row
482
+ const { data: ver, error: vErr } = await supabase.from("workflow_versions")
483
+ .select("*")
484
+ .eq("id", args.version_id)
485
+ .single();
486
+ if (vErr || !ver)
487
+ return { success: false, error: vErr?.message || "Version not found" };
488
+ // IDOR FIX: Verify the version's workflow belongs to this store
489
+ const { data: versionWfCheck } = await supabase.from("workflows")
490
+ .select("id").eq("id", ver.workflow_id).eq("store_id", sid).single();
491
+ if (!versionWfCheck)
492
+ return { success: false, error: "Version not found or access denied" };
493
+ // Also load the workflow steps that existed at publish time
494
+ // (stored as a snapshot in the version row, or we load current steps for the workflow)
495
+ let steps = [];
496
+ if (ver.steps_snapshot && Array.isArray(ver.steps_snapshot)) {
497
+ steps = ver.steps_snapshot;
498
+ }
499
+ else {
500
+ // Fallback: load current steps from workflow_steps table
501
+ const { data: stepData } = await supabase.from("workflow_steps")
502
+ .select("id, step_key, name, type, config, position_x, position_y, depends_on")
503
+ .eq("workflow_id", ver.workflow_id)
504
+ .order("step_order", { ascending: true });
505
+ steps = stepData || [];
506
+ }
507
+ return { success: true, data: { ...ver, steps } };
508
+ }
509
+ case "rollback": {
510
+ if (!args.workflow_id || !args.version_id)
511
+ return { success: false, error: "workflow_id and version_id required" };
512
+ // Verify workflow belongs to this store
513
+ const { data: rbWfCheck } = await supabase.from("workflows")
514
+ .select("id").eq("id", args.workflow_id).eq("store_id", sid).single();
515
+ if (!rbWfCheck)
516
+ return { success: false, error: "Workflow not found or access denied" };
517
+ // Verify version belongs to this workflow
518
+ const { data: version, error: verErr } = await supabase.from("workflow_versions")
519
+ .select("id, version, steps_snapshot").eq("id", args.version_id)
520
+ .eq("workflow_id", args.workflow_id).single();
521
+ if (verErr || !version)
522
+ return { success: false, error: "Version not found for this workflow" };
523
+ // P1 FIX: Restore steps from snapshot — not just the version pointer
524
+ if (version.steps_snapshot && Array.isArray(version.steps_snapshot)) {
525
+ // Delete current steps
526
+ await supabase.from("workflow_steps")
527
+ .delete().eq("workflow_id", args.workflow_id);
528
+ // Upsert from snapshot
529
+ const stepsToInsert = version.steps_snapshot.map((s) => ({
530
+ ...s,
531
+ workflow_id: args.workflow_id,
532
+ }));
533
+ if (stepsToInsert.length > 0) {
534
+ await supabase.from("workflow_steps").upsert(stepsToInsert, { onConflict: "id" });
535
+ }
536
+ }
537
+ const { error } = await supabase.from("workflows").update({
538
+ published_version_id: version.id,
539
+ }).eq("id", args.workflow_id).eq("store_id", sid);
540
+ return error ? { success: false, error: error.message } : { success: true, data: { rolled_back_to: version.version, version_id: version.id, steps_restored: !!(version.steps_snapshot) } };
541
+ }
542
+ // ================================================================
543
+ // PHASE 5: Template actions
544
+ // ================================================================
545
+ case "list_templates": {
546
+ let q = supabase.from("workflows")
547
+ .select("id, name, description, icon, trigger_type, template_category, template_tags, clone_count, created_at")
548
+ .eq("is_template", true).order("clone_count", { ascending: false });
549
+ if (args.category)
550
+ q = q.eq("template_category", args.category);
551
+ const { data, error } = await q.limit(args.limit || 50);
552
+ return error ? { success: false, error: error.message } : { success: true, data };
553
+ }
554
+ case "clone_template": {
555
+ if (!args.template_id)
556
+ return { success: false, error: "template_id required" };
557
+ const { data, error } = await supabase.rpc("clone_workflow_template", {
558
+ p_template_id: args.template_id,
559
+ p_store_id: sid,
560
+ p_name: args.name || null,
561
+ });
562
+ if (error)
563
+ return { success: false, error: error.message };
564
+ return data?.success ? { success: true, data } : { success: false, error: data?.error || "Failed" };
565
+ }
566
+ // ================================================================
567
+ // Checkpoint / replay — time-travel debugging
568
+ // ================================================================
569
+ case "checkpoints": {
570
+ if (!args.run_id)
571
+ return { success: false, error: "run_id required" };
572
+ // IDOR FIX: Verify the run belongs to this store
573
+ const { data: cpRunCheck } = await supabase.from("workflow_runs")
574
+ .select("id").eq("id", args.run_id).eq("store_id", sid).single();
575
+ if (!cpRunCheck)
576
+ return { success: false, error: "Run not found or access denied" };
577
+ const { data, error } = await supabase.from("workflow_checkpoints")
578
+ .select("id, step_key, step_run_id, sequence_number, created_at")
579
+ .eq("run_id", args.run_id)
580
+ .order("sequence_number", { ascending: true });
581
+ return error ? { success: false, error: error.message } : { success: true, data };
582
+ }
583
+ case "replay": {
584
+ if (!args.run_id)
585
+ return { success: false, error: "run_id required" };
586
+ const fromStepKey = args.from_step;
587
+ // Load the checkpoint to replay from
588
+ let checkpointQuery = supabase.from("workflow_checkpoints")
589
+ .select("*").eq("run_id", args.run_id);
590
+ if (fromStepKey) {
591
+ checkpointQuery = checkpointQuery.eq("step_key", fromStepKey);
592
+ }
593
+ else {
594
+ // Default: replay from last successful checkpoint
595
+ checkpointQuery = checkpointQuery.order("sequence_number", { ascending: false }).limit(1);
596
+ }
597
+ const { data: checkpoint } = await checkpointQuery.single();
598
+ if (!checkpoint)
599
+ return { success: false, error: "No checkpoint found" };
600
+ // Get original run's workflow (IDOR FIX: scope to store)
601
+ const { data: origRun } = await supabase.from("workflow_runs")
602
+ .select("workflow_id, trigger_type, trigger_payload, version_id")
603
+ .eq("id", args.run_id).eq("store_id", sid).single();
604
+ if (!origRun)
605
+ return { success: false, error: "Original run not found" };
606
+ // Start a new run with checkpoint state pre-loaded
607
+ const { data: newRun, error: startErr } = await supabase.rpc("start_workflow_run", {
608
+ p_workflow_id: origRun.workflow_id,
609
+ p_store_id: sid,
610
+ p_trigger_type: "replay",
611
+ p_trigger_payload: checkpoint.trigger_payload || origRun.trigger_payload || {},
612
+ p_idempotency_key: null,
613
+ });
614
+ if (startErr || !newRun?.success)
615
+ return { success: false, error: startErr?.message || "Failed to start replay run" };
616
+ // Pre-load step_outputs from checkpoint
617
+ await supabase.from("workflow_runs").update({
618
+ step_outputs: checkpoint.step_outputs,
619
+ version_id: origRun.version_id,
620
+ }).eq("id", newRun.run_id);
621
+ // Cancel the auto-created entry step runs (we'll create our own starting from the checkpoint step)
622
+ await supabase.from("workflow_step_runs").update({ status: "cancelled" })
623
+ .eq("run_id", newRun.run_id).eq("status", "pending");
624
+ // Find what step comes AFTER the checkpoint step
625
+ const { data: checkpointStepDef } = await supabase.from("workflow_steps")
626
+ .select("on_success").eq("workflow_id", origRun.workflow_id)
627
+ .eq("step_key", checkpoint.step_key).single();
628
+ if (checkpointStepDef?.on_success) {
629
+ // createNextStepRunByKey is internal to workflow-steps — use direct insert
630
+ const { data: nextStepDef } = await supabase.from("workflow_steps")
631
+ .select("id, step_key, step_type, max_retries")
632
+ .eq("workflow_id", origRun.workflow_id).eq("step_key", checkpointStepDef.on_success).single();
633
+ if (nextStepDef) {
634
+ await supabase.from("workflow_step_runs").insert({
635
+ run_id: newRun.run_id, step_id: nextStepDef.id, step_key: nextStepDef.step_key,
636
+ step_type: nextStepDef.step_type, status: "pending", max_attempts: nextStepDef.max_retries ?? 3,
637
+ });
638
+ }
639
+ try {
640
+ await executeInlineChain(supabase, newRun.run_id);
641
+ }
642
+ catch (err) {
643
+ console.error("[workflow] Inline chain failed for replay run", newRun.run_id, ":", err.message);
644
+ }
645
+ }
646
+ logWorkflowEvent(supabase, newRun.run_id, "run_replayed", {
647
+ original_run_id: args.run_id, from_step: checkpoint.step_key, checkpoint_id: checkpoint.id,
648
+ });
649
+ return { success: true, data: { run_id: newRun.run_id, replayed_from: checkpoint.step_key, original_run_id: args.run_id } };
650
+ }
651
+ // ================================================================
652
+ // Event journal — time-travel debugging
653
+ // ================================================================
654
+ case "events": {
655
+ if (!args.run_id)
656
+ return { success: false, error: "run_id required" };
657
+ // IDOR FIX: Verify the run belongs to this store
658
+ const { data: eventsRunCheck } = await supabase.from("workflow_runs")
659
+ .select("id").eq("id", args.run_id).eq("store_id", sid).single();
660
+ if (!eventsRunCheck)
661
+ return { success: false, error: "Run not found or access denied" };
662
+ const { data, error } = await supabase.from("workflow_events")
663
+ .select("id, event_type, step_run_id, payload, created_at")
664
+ .eq("run_id", args.run_id)
665
+ .order("created_at", { ascending: true })
666
+ .limit(args.limit || 200);
667
+ return error ? { success: false, error: error.message } : { success: true, data };
668
+ }
669
+ // ================================================================
670
+ // Waitpoint actions
671
+ // ================================================================
672
+ case "list_waitpoints": {
673
+ let q = supabase.from("waitpoint_tokens")
674
+ .select("id, token, run_id, step_run_id, label, status, expires_at, completed_at, created_at")
675
+ .eq("store_id", sid).order("created_at", { ascending: false });
676
+ if (args.run_id)
677
+ q = q.eq("run_id", args.run_id);
678
+ if (args.status)
679
+ q = q.eq("status", args.status);
680
+ const { data, error } = await q.limit(args.limit || 25);
681
+ return error ? { success: false, error: error.message } : { success: true, data };
682
+ }
683
+ case "complete_waitpoint": {
684
+ if (!args.token)
685
+ return { success: false, error: "token required" };
686
+ // Find the waitpoint
687
+ const { data: wp, error: wpErr } = await supabase.from("waitpoint_tokens")
688
+ .select("id, run_id, step_run_id, store_id, expires_at, status")
689
+ .eq("token", args.token).eq("store_id", sid).single();
690
+ if (wpErr || !wp)
691
+ return { success: false, error: "Waitpoint token not found" };
692
+ if (wp.status === "completed")
693
+ return { success: false, error: "Waitpoint already completed" };
694
+ if (wp.expires_at && new Date(wp.expires_at) < new Date())
695
+ return { success: false, error: "Waitpoint expired" };
696
+ // Mark completed
697
+ await supabase.from("waitpoint_tokens").update({
698
+ status: "completed", completion_data: args.data || {}, completed_at: new Date().toISOString(),
699
+ }).eq("id", wp.id);
700
+ // Resume the waiting step
701
+ await supabase.from("workflow_step_runs").update({
702
+ status: "pending", input: { waitpoint_completed: true, waitpoint_data: args.data || {} },
703
+ }).eq("id", wp.step_run_id).eq("status", "waiting");
704
+ // Inline resume
705
+ try {
706
+ await executeInlineChain(supabase, wp.run_id);
707
+ }
708
+ catch (err) {
709
+ console.error("[workflow] Inline chain failed after waitpoint:", err.message);
710
+ }
711
+ return { success: true, data: { completed: true, run_id: wp.run_id } };
712
+ }
713
+ // ================================================================
714
+ // Dead Letter Queue actions
715
+ // ================================================================
716
+ case "dlq": {
717
+ let q = supabase.from("workflow_dlq")
718
+ .select("id, workflow_id, workflow_name, run_id, error_message, error_step_key, trigger_type, status, run_duration_ms, created_at")
719
+ .eq("store_id", sid).order("created_at", { ascending: false });
720
+ if (args.status)
721
+ q = q.eq("status", args.status);
722
+ if (args.workflow_id)
723
+ q = q.eq("workflow_id", args.workflow_id);
724
+ const { data, error } = await q.limit(args.limit || 25);
725
+ return error ? { success: false, error: error.message } : { success: true, data };
726
+ }
727
+ case "dlq_retry": {
728
+ if (!args.dlq_id)
729
+ return { success: false, error: "dlq_id required" };
730
+ const { data: dlqEntry } = await supabase.from("workflow_dlq")
731
+ .select("*").eq("id", args.dlq_id).eq("store_id", sid).single();
732
+ if (!dlqEntry)
733
+ return { success: false, error: "DLQ entry not found" };
734
+ if (dlqEntry.status !== "pending")
735
+ return { success: false, error: `DLQ entry already ${dlqEntry.status}` };
736
+ // FIX 1: Preserve version_id from the original failed run
737
+ const { data: originalRun } = await supabase.from("workflow_runs")
738
+ .select("version_id")
739
+ .eq("id", dlqEntry.run_id)
740
+ .single();
741
+ // Start a fresh run of the same workflow with the same trigger
742
+ const { data: result, error: startErr } = await supabase.rpc("start_workflow_run", {
743
+ p_workflow_id: dlqEntry.workflow_id,
744
+ p_store_id: sid,
745
+ p_trigger_type: dlqEntry.trigger_type || "dlq_retry",
746
+ p_trigger_payload: { ...(dlqEntry.trigger_payload || {}), dlq_retry: true, original_run_id: dlqEntry.run_id },
747
+ p_idempotency_key: null,
748
+ });
749
+ if (startErr || !result?.success)
750
+ return { success: false, error: startErr?.message || result?.error || "Retry failed" };
751
+ // Set version_id from the original failed run (same pattern as "start" action)
752
+ if (result.run_id && !result.deduplicated && originalRun?.version_id) {
753
+ await supabase.from("workflow_runs").update({ version_id: originalRun.version_id }).eq("id", result.run_id);
754
+ }
755
+ // Update DLQ entry
756
+ await supabase.from("workflow_dlq").update({
757
+ status: "retried", retried_run_id: result.run_id,
758
+ attempt_count: (dlqEntry.attempt_count || 1) + 1,
759
+ }).eq("id", dlqEntry.id);
760
+ // Inline execution for the retry
761
+ try {
762
+ await executeInlineChain(supabase, result.run_id);
763
+ }
764
+ catch (err) {
765
+ console.error("[workflow] Inline chain failed for DLQ retry run", result.run_id, ":", err.message);
766
+ }
767
+ return { success: true, data: { retried: true, new_run_id: result.run_id, dlq_id: dlqEntry.id } };
768
+ }
769
+ case "dlq_dismiss": {
770
+ if (!args.dlq_id)
771
+ return { success: false, error: "dlq_id required" };
772
+ const { error } = await supabase.from("workflow_dlq").update({
773
+ status: "dismissed",
774
+ dismissed_by: args.dismissed_by || null,
775
+ dismissed_at: new Date().toISOString(),
776
+ notes: args.notes || null,
777
+ }).eq("id", args.dlq_id).eq("store_id", sid).eq("status", "pending");
778
+ return error ? { success: false, error: error.message } : { success: true, data: { dismissed: true } };
779
+ }
780
+ // ================================================================
781
+ // Enhanced metrics + DAG visualization
782
+ // ================================================================
783
+ case "metrics": {
784
+ const { data, error } = await supabase.rpc("get_workflow_metrics", {
785
+ p_store_id: sid, p_days: args.days || 30,
786
+ });
787
+ return error ? { success: false, error: error.message } : { success: true, data };
788
+ }
789
+ case "graph": {
790
+ // Return DAG visualization data (nodes + edges) for a workflow
791
+ if (!args.workflow_id)
792
+ return { success: false, error: "workflow_id required" };
793
+ // P0 FIX: IDOR protection — verify workflow belongs to this store before querying steps
794
+ const { data: graphWfCheck } = await supabase.from("workflows")
795
+ .select("id").eq("id", args.workflow_id).eq("store_id", sid).single();
796
+ if (!graphWfCheck)
797
+ return { success: false, error: "Workflow not found or access denied" };
798
+ const { data: steps, error: stepsErr } = await supabase.from("workflow_steps")
799
+ .select("id, step_key, step_type, is_entry_point, on_success, on_failure, position_x, position_y, step_config, timeout_seconds, max_retries")
800
+ .eq("workflow_id", args.workflow_id)
801
+ .order("position_y", { ascending: true });
802
+ if (stepsErr)
803
+ return { success: false, error: stepsErr.message };
804
+ const nodes = (steps || []).map(s => ({
805
+ id: s.step_key,
806
+ step_id: s.id, // UUID for update_step/delete_step
807
+ type: s.step_type,
808
+ label: s.step_key,
809
+ is_entry_point: s.is_entry_point,
810
+ on_success: s.on_success || null,
811
+ on_failure: s.on_failure || null,
812
+ max_retries: s.max_retries,
813
+ timeout_seconds: s.timeout_seconds,
814
+ step_config: s.step_config || {},
815
+ position: { x: s.position_x || 0, y: s.position_y || 0 },
816
+ config_summary: {
817
+ timeout: s.timeout_seconds,
818
+ max_retries: s.max_retries,
819
+ ...(s.step_type === "condition" ? { expression: s.step_config?.expression } : {}),
820
+ ...(s.step_type === "tool" ? { tool_name: s.step_config?.tool_name } : {}),
821
+ ...(s.step_type === "delay" ? { seconds: s.step_config?.seconds } : {}),
822
+ },
823
+ }));
824
+ const edges = [];
825
+ for (const s of steps || []) {
826
+ if (s.on_success)
827
+ edges.push({ from: s.step_key, to: s.on_success, type: "success" });
828
+ if (s.on_failure)
829
+ edges.push({ from: s.step_key, to: s.on_failure, type: "failure" });
830
+ // Handle condition branches
831
+ const cfg = s.step_config;
832
+ if (s.step_type === "condition") {
833
+ if (cfg?.on_true)
834
+ edges.push({ from: s.step_key, to: cfg.on_true, type: "true" });
835
+ if (cfg?.on_false)
836
+ edges.push({ from: s.step_key, to: cfg.on_false, type: "false" });
837
+ }
838
+ // Handle parallel children
839
+ const parallelKeys = cfg?.step_keys || cfg?.child_steps;
840
+ if (s.step_type === "parallel" && Array.isArray(parallelKeys)) {
841
+ for (const childKey of parallelKeys) {
842
+ edges.push({ from: s.step_key, to: childKey, type: "parallel" });
843
+ }
844
+ }
845
+ }
846
+ // If run_id provided, overlay live status on nodes
847
+ let nodeStatus = {};
848
+ if (args.run_id) {
849
+ const { data: stepRuns } = await supabase.from("workflow_step_runs")
850
+ .select("step_key, status, duration_ms, error_message")
851
+ .eq("run_id", args.run_id);
852
+ for (const sr of stepRuns || []) {
853
+ nodeStatus[sr.step_key] = { status: sr.status, duration_ms: sr.duration_ms, ...(sr.error_message ? { error: sr.error_message } : {}) };
854
+ }
855
+ }
856
+ return { success: true, data: { nodes, edges, node_status: Object.keys(nodeStatus).length ? nodeStatus : undefined } };
857
+ }
858
+ // ================================================================
859
+ // Schedule management
860
+ // ================================================================
861
+ case "set_schedule": {
862
+ if (!args.workflow_id)
863
+ return { success: false, error: "workflow_id required" };
864
+ const cronExpr = args.cron_expression;
865
+ const runAt = args.run_at;
866
+ if (runAt) {
867
+ // One-time schedule: run once at a specific datetime
868
+ const runAtDate = new Date(runAt);
869
+ if (isNaN(runAtDate.getTime()))
870
+ return { success: false, error: `Invalid run_at datetime: ${runAt}` };
871
+ if (runAtDate <= new Date())
872
+ return { success: false, error: "run_at must be in the future" };
873
+ const { error } = await supabase.from("workflows").update({
874
+ cron_expression: null,
875
+ next_run_at: runAtDate.toISOString(),
876
+ trigger_type: "schedule",
877
+ timezone: args.timezone || "UTC",
878
+ is_active: true,
879
+ status: "active",
880
+ }).eq("id", args.workflow_id).eq("store_id", sid);
881
+ return error ? { success: false, error: error.message } : { success: true, data: { one_time: true, run_at: runAtDate.toISOString() } };
882
+ }
883
+ else if (cronExpr) {
884
+ // Recurring cron schedule
885
+ const next = getNextCronTime(cronExpr);
886
+ if (!next)
887
+ return { success: false, error: `Invalid cron expression: ${cronExpr}` };
888
+ const { error } = await supabase.from("workflows").update({
889
+ cron_expression: cronExpr,
890
+ next_run_at: next.toISOString(),
891
+ trigger_type: "schedule",
892
+ timezone: args.timezone || "UTC",
893
+ }).eq("id", args.workflow_id).eq("store_id", sid);
894
+ return error ? { success: false, error: error.message } : { success: true, data: { cron: cronExpr, next_run_at: next.toISOString() } };
895
+ }
896
+ else {
897
+ // Clear schedule
898
+ const { error } = await supabase.from("workflows").update({
899
+ cron_expression: null, next_run_at: null,
900
+ }).eq("id", args.workflow_id).eq("store_id", sid);
901
+ return error ? { success: false, error: error.message } : { success: true, data: { schedule_cleared: true } };
902
+ }
903
+ }
904
+ // ================================================================
905
+ // Event bus — fire events & manage subscriptions
906
+ // ================================================================
907
+ case "fire_event": {
908
+ if (!args.event_type)
909
+ return { success: false, error: "event_type required" };
910
+ const { data: evtId, error: evtErr } = await supabase.rpc("fire_event", {
911
+ p_store_id: sid,
912
+ p_event_type: args.event_type,
913
+ p_event_payload: args.event_payload || {},
914
+ p_source: args.source || "workflow_tool",
915
+ });
916
+ return evtErr ? { success: false, error: evtErr.message } : { success: true, data: { event_id: evtId } };
917
+ }
918
+ case "list_subscriptions": {
919
+ let q = supabase.from("workflow_event_subscriptions")
920
+ .select("id, store_id, workflow_id, event_type, filter_expression, is_active, created_at")
921
+ .eq("store_id", sid).order("created_at", { ascending: false });
922
+ if (args.event_type)
923
+ q = q.eq("event_type", args.event_type);
924
+ if (args.workflow_id)
925
+ q = q.eq("workflow_id", args.workflow_id);
926
+ const { data, error } = await q.limit(args.limit || 50);
927
+ return error ? { success: false, error: error.message } : { success: true, data };
928
+ }
929
+ case "create_subscription": {
930
+ if (!args.workflow_id)
931
+ return { success: false, error: "workflow_id required" };
932
+ if (!args.event_type)
933
+ return { success: false, error: "event_type required" };
934
+ const { data: sub, error: subErr } = await supabase.from("workflow_event_subscriptions")
935
+ .insert({
936
+ store_id: sid,
937
+ workflow_id: args.workflow_id,
938
+ event_type: args.event_type,
939
+ filter_expression: args.filter_expression || null,
940
+ is_active: args.is_active !== false,
941
+ })
942
+ .select().single();
943
+ return subErr ? { success: false, error: subErr.message } : { success: true, data: sub };
944
+ }
945
+ case "delete_subscription": {
946
+ if (!args.subscription_id)
947
+ return { success: false, error: "subscription_id required" };
948
+ const { error: delErr } = await supabase.from("workflow_event_subscriptions")
949
+ .delete().eq("id", args.subscription_id).eq("store_id", sid);
950
+ return delErr ? { success: false, error: delErr.message } : { success: true, data: { deleted: true } };
951
+ }
952
+ case "list_events": {
953
+ let q = supabase.from("automation_events")
954
+ .select("id, store_id, event_type, event_payload, source, status, processed_at, error_message, created_at")
955
+ .eq("store_id", sid).order("created_at", { ascending: false });
956
+ if (args.event_type)
957
+ q = q.eq("event_type", args.event_type);
958
+ if (args.status)
959
+ q = q.eq("status", args.status);
960
+ const { data, error } = await q.limit(args.limit || 50);
961
+ return error ? { success: false, error: error.message } : { success: true, data };
962
+ }
963
+ case "list_tools": {
964
+ const { data, error } = await supabase.from("ai_tool_registry")
965
+ .select("name, description, category, definition")
966
+ .eq("is_active", true)
967
+ .or("tool_mode.is.null,tool_mode.neq.code")
968
+ .order("category").order("name");
969
+ if (error)
970
+ return { success: false, error: error.message };
971
+ const tools = (data || []).map((row) => {
972
+ const def = row.definition;
973
+ const inputSchema = def?.input_schema;
974
+ const actionProp = inputSchema?.properties?.action;
975
+ const actions = actionProp?.enum || [];
976
+ return {
977
+ name: row.name,
978
+ description: row.description || def?.description || "",
979
+ category: row.category || "other",
980
+ actions,
981
+ input_schema: inputSchema || null,
982
+ };
983
+ });
984
+ return { success: true, data: { tools } };
985
+ }
986
+ default:
987
+ return { success: false, error: `Unknown workflow action: ${action}` };
988
+ }
989
+ }