@sudocode-ai/local-server 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/execution/executors/agent-executor-wrapper.d.ts.map +1 -1
- package/dist/execution/executors/agent-executor-wrapper.js +57 -2
- package/dist/execution/executors/agent-executor-wrapper.js.map +1 -1
- package/dist/execution/process/builders/claude.d.ts.map +1 -1
- package/dist/execution/process/builders/claude.js +32 -1
- package/dist/execution/process/builders/claude.js.map +1 -1
- package/dist/execution/worktree/config.js +1 -1
- package/dist/execution/worktree/config.js.map +1 -1
- package/dist/execution/worktree/git-cli.d.ts +48 -0
- package/dist/execution/worktree/git-cli.d.ts.map +1 -1
- package/dist/execution/worktree/git-cli.js +81 -0
- package/dist/execution/worktree/git-cli.js.map +1 -1
- package/dist/execution/worktree/types.d.ts.map +1 -1
- package/dist/execution/worktree/types.js.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +17 -4
- package/dist/index.js.map +1 -1
- package/dist/public/assets/index-Nz4IjDwB.css +1 -0
- package/dist/public/assets/index-Z8yftXvD.js +824 -0
- package/dist/public/assets/index-Z8yftXvD.js.map +1 -0
- package/dist/public/assets/{react-vendor-DiL5hC7l.js → react-vendor-5f1Wq1qs.js} +5 -5
- package/dist/public/assets/{react-vendor-DiL5hC7l.js.map → react-vendor-5f1Wq1qs.js.map} +1 -1
- package/dist/public/assets/{ui-vendor-B4WMPEfa.js → ui-vendor-BDDPoYki.js} +2 -2
- package/dist/public/assets/{ui-vendor-B4WMPEfa.js.map → ui-vendor-BDDPoYki.js.map} +1 -1
- package/dist/public/index.html +4 -4
- package/dist/routes/executions.d.ts.map +1 -1
- package/dist/routes/executions.js +3 -1
- package/dist/routes/executions.js.map +1 -1
- package/dist/routes/issues.d.ts.map +1 -1
- package/dist/routes/issues.js +13 -0
- package/dist/routes/issues.js.map +1 -1
- package/dist/routes/specs.d.ts.map +1 -1
- package/dist/routes/specs.js +14 -0
- package/dist/routes/specs.js.map +1 -1
- package/dist/routes/workflows.d.ts +8 -0
- package/dist/routes/workflows.d.ts.map +1 -0
- package/dist/routes/workflows.js +1729 -0
- package/dist/routes/workflows.js.map +1 -0
- package/dist/services/execution-event-callbacks.d.ts +73 -0
- package/dist/services/execution-event-callbacks.d.ts.map +1 -0
- package/dist/services/execution-event-callbacks.js +82 -0
- package/dist/services/execution-event-callbacks.js.map +1 -0
- package/dist/services/execution-lifecycle.d.ts +38 -2
- package/dist/services/execution-lifecycle.d.ts.map +1 -1
- package/dist/services/execution-lifecycle.js +94 -23
- package/dist/services/execution-lifecycle.js.map +1 -1
- package/dist/services/execution-service.d.ts +31 -3
- package/dist/services/execution-service.d.ts.map +1 -1
- package/dist/services/execution-service.js +161 -34
- package/dist/services/execution-service.js.map +1 -1
- package/dist/services/executions.d.ts +1 -0
- package/dist/services/executions.d.ts.map +1 -1
- package/dist/services/executions.js +4 -0
- package/dist/services/executions.js.map +1 -1
- package/dist/services/project-context.d.ts +25 -0
- package/dist/services/project-context.d.ts.map +1 -1
- package/dist/services/project-context.js +53 -3
- package/dist/services/project-context.js.map +1 -1
- package/dist/services/project-manager.d.ts +7 -0
- package/dist/services/project-manager.d.ts.map +1 -1
- package/dist/services/project-manager.js +108 -13
- package/dist/services/project-manager.js.map +1 -1
- package/dist/services/websocket.d.ts +10 -2
- package/dist/services/websocket.d.ts.map +1 -1
- package/dist/services/websocket.js +18 -0
- package/dist/services/websocket.js.map +1 -1
- package/dist/services/workflow-broadcast-service.d.ts +43 -0
- package/dist/services/workflow-broadcast-service.d.ts.map +1 -0
- package/dist/services/workflow-broadcast-service.js +145 -0
- package/dist/services/workflow-broadcast-service.js.map +1 -0
- package/dist/services/worktree-sync-service.d.ts +76 -4
- package/dist/services/worktree-sync-service.d.ts.map +1 -1
- package/dist/services/worktree-sync-service.js +264 -23
- package/dist/services/worktree-sync-service.js.map +1 -1
- package/dist/workflow/base-workflow-engine.d.ts +186 -0
- package/dist/workflow/base-workflow-engine.d.ts.map +1 -0
- package/dist/workflow/base-workflow-engine.js +549 -0
- package/dist/workflow/base-workflow-engine.js.map +1 -0
- package/dist/workflow/dependency-analyzer.d.ts +78 -0
- package/dist/workflow/dependency-analyzer.d.ts.map +1 -0
- package/dist/workflow/dependency-analyzer.js +264 -0
- package/dist/workflow/dependency-analyzer.js.map +1 -0
- package/dist/workflow/engines/orchestrator-engine.d.ts +237 -0
- package/dist/workflow/engines/orchestrator-engine.d.ts.map +1 -0
- package/dist/workflow/engines/orchestrator-engine.js +749 -0
- package/dist/workflow/engines/orchestrator-engine.js.map +1 -0
- package/dist/workflow/engines/sequential-engine.d.ts +276 -0
- package/dist/workflow/engines/sequential-engine.d.ts.map +1 -0
- package/dist/workflow/engines/sequential-engine.js +1110 -0
- package/dist/workflow/engines/sequential-engine.js.map +1 -0
- package/dist/workflow/index.d.ts +15 -0
- package/dist/workflow/index.d.ts.map +1 -0
- package/dist/workflow/index.js +22 -0
- package/dist/workflow/index.js.map +1 -0
- package/dist/workflow/mcp/api-client.d.ts +103 -0
- package/dist/workflow/mcp/api-client.d.ts.map +1 -0
- package/dist/workflow/mcp/api-client.js +193 -0
- package/dist/workflow/mcp/api-client.js.map +1 -0
- package/dist/workflow/mcp/index.d.ts +16 -0
- package/dist/workflow/mcp/index.d.ts.map +1 -0
- package/dist/workflow/mcp/index.js +114 -0
- package/dist/workflow/mcp/index.js.map +1 -0
- package/dist/workflow/mcp/server.d.ts +85 -0
- package/dist/workflow/mcp/server.d.ts.map +1 -0
- package/dist/workflow/mcp/server.js +520 -0
- package/dist/workflow/mcp/server.js.map +1 -0
- package/dist/workflow/mcp/tools/escalation.d.ts +36 -0
- package/dist/workflow/mcp/tools/escalation.d.ts.map +1 -0
- package/dist/workflow/mcp/tools/escalation.js +47 -0
- package/dist/workflow/mcp/tools/escalation.js.map +1 -0
- package/dist/workflow/mcp/tools/execution.d.ts +59 -0
- package/dist/workflow/mcp/tools/execution.d.ts.map +1 -0
- package/dist/workflow/mcp/tools/execution.js +67 -0
- package/dist/workflow/mcp/tools/execution.js.map +1 -0
- package/dist/workflow/mcp/tools/inspection.d.ts +82 -0
- package/dist/workflow/mcp/tools/inspection.d.ts.map +1 -0
- package/dist/workflow/mcp/tools/inspection.js +57 -0
- package/dist/workflow/mcp/tools/inspection.js.map +1 -0
- package/dist/workflow/mcp/tools/workflow.d.ts +59 -0
- package/dist/workflow/mcp/tools/workflow.d.ts.map +1 -0
- package/dist/workflow/mcp/tools/workflow.js +40 -0
- package/dist/workflow/mcp/tools/workflow.js.map +1 -0
- package/dist/workflow/mcp/types.d.ts +345 -0
- package/dist/workflow/mcp/types.d.ts.map +1 -0
- package/dist/workflow/mcp/types.js +7 -0
- package/dist/workflow/mcp/types.js.map +1 -0
- package/dist/workflow/services/prompt-builder.d.ts +36 -0
- package/dist/workflow/services/prompt-builder.d.ts.map +1 -0
- package/dist/workflow/services/prompt-builder.js +329 -0
- package/dist/workflow/services/prompt-builder.js.map +1 -0
- package/dist/workflow/services/wakeup-service.d.ts +262 -0
- package/dist/workflow/services/wakeup-service.d.ts.map +1 -0
- package/dist/workflow/services/wakeup-service.js +809 -0
- package/dist/workflow/services/wakeup-service.js.map +1 -0
- package/dist/workflow/workflow-engine.d.ts +221 -0
- package/dist/workflow/workflow-engine.d.ts.map +1 -0
- package/dist/workflow/workflow-engine.js +94 -0
- package/dist/workflow/workflow-engine.js.map +1 -0
- package/dist/workflow/workflow-event-emitter.d.ts +278 -0
- package/dist/workflow/workflow-event-emitter.d.ts.map +1 -0
- package/dist/workflow/workflow-event-emitter.js +259 -0
- package/dist/workflow/workflow-event-emitter.js.map +1 -0
- package/package.json +8 -6
- package/dist/public/assets/index-DV9Tbujb.css +0 -1
- package/dist/public/assets/index-DcDX9-Ad.js +0 -740
- package/dist/public/assets/index-DcDX9-Ad.js.map +0 -1
|
@@ -0,0 +1,1729 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workflows API routes (mapped to /api/workflows)
|
|
3
|
+
*
|
|
4
|
+
* Note: All routes require X-Project-ID header via requireProject() middleware
|
|
5
|
+
*/
|
|
6
|
+
import { Router } from "express";
|
|
7
|
+
import { randomUUID } from "crypto";
|
|
8
|
+
import { execSync } from "child_process";
|
|
9
|
+
import * as fs from "fs";
|
|
10
|
+
import { WorkflowNotFoundError, WorkflowStepNotFoundError, WorkflowStateError, WorkflowCycleError, } from "../workflow/workflow-engine.js";
|
|
11
|
+
import { broadcastWorkflowUpdate, broadcastWorkflowStepUpdate, } from "../services/websocket.js";
|
|
12
|
+
/**
|
|
13
|
+
* Get the appropriate workflow engine based on engine type
|
|
14
|
+
*/
|
|
15
|
+
function getEngine(req, engineType = "sequential") {
|
|
16
|
+
return req.project?.getWorkflowEngine(engineType) ?? null;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Get the workflow engine for an existing workflow by looking up its config
|
|
20
|
+
*/
|
|
21
|
+
function getEngineForWorkflow(req, workflowId) {
|
|
22
|
+
const db = req.project?.db;
|
|
23
|
+
if (!db)
|
|
24
|
+
return null;
|
|
25
|
+
const row = db
|
|
26
|
+
.prepare("SELECT config FROM workflows WHERE id = ?")
|
|
27
|
+
.get(workflowId);
|
|
28
|
+
if (!row)
|
|
29
|
+
return null;
|
|
30
|
+
try {
|
|
31
|
+
const config = JSON.parse(row.config);
|
|
32
|
+
const engineType = config.engineType ?? "sequential";
|
|
33
|
+
console.log(`[workflows] getEngineForWorkflow: workflow=${workflowId}, engineType=${engineType}`);
|
|
34
|
+
return getEngine(req, engineType);
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
console.log(`[workflows] getEngineForWorkflow: workflow=${workflowId}, defaulting to sequential`);
|
|
38
|
+
return getEngine(req, "sequential");
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Helper to map workflow errors to HTTP status codes
|
|
43
|
+
*/
|
|
44
|
+
function handleWorkflowError(error, res) {
|
|
45
|
+
if (error instanceof WorkflowNotFoundError) {
|
|
46
|
+
res.status(404).json({
|
|
47
|
+
success: false,
|
|
48
|
+
data: null,
|
|
49
|
+
message: error.message,
|
|
50
|
+
});
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
if (error instanceof WorkflowStepNotFoundError) {
|
|
54
|
+
res.status(404).json({
|
|
55
|
+
success: false,
|
|
56
|
+
data: null,
|
|
57
|
+
message: error.message,
|
|
58
|
+
});
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
if (error instanceof WorkflowStateError) {
|
|
62
|
+
res.status(400).json({
|
|
63
|
+
success: false,
|
|
64
|
+
data: null,
|
|
65
|
+
message: error.message,
|
|
66
|
+
});
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
if (error instanceof WorkflowCycleError) {
|
|
70
|
+
res.status(400).json({
|
|
71
|
+
success: false,
|
|
72
|
+
data: null,
|
|
73
|
+
message: error.message,
|
|
74
|
+
cycles: error.cycles,
|
|
75
|
+
});
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
// Generic error
|
|
79
|
+
console.error("Workflow error:", error);
|
|
80
|
+
res.status(500).json({
|
|
81
|
+
success: false,
|
|
82
|
+
data: null,
|
|
83
|
+
error_data: error instanceof Error ? error.message : String(error),
|
|
84
|
+
message: "Internal server error",
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
export function createWorkflowsRouter() {
|
|
88
|
+
const router = Router();
|
|
89
|
+
/**
|
|
90
|
+
* GET /api/workflows - List all workflows
|
|
91
|
+
*
|
|
92
|
+
* Query parameters:
|
|
93
|
+
* - limit: number (default: 50)
|
|
94
|
+
* - offset: number (default: 0)
|
|
95
|
+
* - status: WorkflowStatus | WorkflowStatus[] (filter by status)
|
|
96
|
+
* - sortBy: 'created_at' | 'updated_at' (default: 'created_at')
|
|
97
|
+
* - order: 'asc' | 'desc' (default: 'desc')
|
|
98
|
+
*/
|
|
99
|
+
router.get("/", async (req, res) => {
|
|
100
|
+
try {
|
|
101
|
+
// Use sequential engine for listing (just for the engine availability check)
|
|
102
|
+
const engine = getEngine(req, "sequential");
|
|
103
|
+
if (!engine) {
|
|
104
|
+
res.status(503).json({
|
|
105
|
+
success: false,
|
|
106
|
+
data: null,
|
|
107
|
+
message: "Workflow engine not available",
|
|
108
|
+
});
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
// Parse query parameters
|
|
112
|
+
const limit = req.query.limit
|
|
113
|
+
? parseInt(req.query.limit, 10)
|
|
114
|
+
: 50;
|
|
115
|
+
const offset = req.query.offset
|
|
116
|
+
? parseInt(req.query.offset, 10)
|
|
117
|
+
: 0;
|
|
118
|
+
const sortBy = req.query.sortBy || "created_at";
|
|
119
|
+
const order = req.query.order || "desc";
|
|
120
|
+
// Parse status filter (can be single value or array)
|
|
121
|
+
let statusFilter;
|
|
122
|
+
if (req.query.status) {
|
|
123
|
+
const statusParam = req.query.status;
|
|
124
|
+
if (Array.isArray(statusParam)) {
|
|
125
|
+
statusFilter = statusParam;
|
|
126
|
+
}
|
|
127
|
+
else {
|
|
128
|
+
statusFilter = [statusParam];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
// Query workflows directly from database
|
|
132
|
+
const db = req.project.db;
|
|
133
|
+
let query = `
|
|
134
|
+
SELECT * FROM workflows
|
|
135
|
+
WHERE 1=1
|
|
136
|
+
`;
|
|
137
|
+
const params = [];
|
|
138
|
+
if (statusFilter && statusFilter.length > 0) {
|
|
139
|
+
const placeholders = statusFilter.map(() => "?").join(", ");
|
|
140
|
+
query += ` AND status IN (${placeholders})`;
|
|
141
|
+
params.push(...statusFilter);
|
|
142
|
+
}
|
|
143
|
+
// Validate sortBy to prevent SQL injection
|
|
144
|
+
const validSortColumns = ["created_at", "updated_at"];
|
|
145
|
+
const sortColumn = validSortColumns.includes(sortBy)
|
|
146
|
+
? sortBy
|
|
147
|
+
: "created_at";
|
|
148
|
+
const sortOrder = order === "asc" ? "ASC" : "DESC";
|
|
149
|
+
query += ` ORDER BY ${sortColumn} ${sortOrder}`;
|
|
150
|
+
query += ` LIMIT ? OFFSET ?`;
|
|
151
|
+
params.push(limit, offset);
|
|
152
|
+
const rows = db.prepare(query).all(...params);
|
|
153
|
+
// Parse JSON fields
|
|
154
|
+
const workflows = rows.map((row) => ({
|
|
155
|
+
id: row.id,
|
|
156
|
+
title: row.title,
|
|
157
|
+
source: JSON.parse(row.source),
|
|
158
|
+
status: row.status,
|
|
159
|
+
steps: JSON.parse(row.steps || "[]"),
|
|
160
|
+
worktreePath: row.worktree_path,
|
|
161
|
+
branchName: row.branch_name,
|
|
162
|
+
baseBranch: row.base_branch,
|
|
163
|
+
currentStepIndex: row.current_step_index,
|
|
164
|
+
orchestratorExecutionId: row.orchestrator_execution_id,
|
|
165
|
+
orchestratorSessionId: row.orchestrator_session_id,
|
|
166
|
+
config: JSON.parse(row.config),
|
|
167
|
+
createdAt: row.created_at,
|
|
168
|
+
updatedAt: row.updated_at,
|
|
169
|
+
startedAt: row.started_at,
|
|
170
|
+
completedAt: row.completed_at,
|
|
171
|
+
}));
|
|
172
|
+
res.json({
|
|
173
|
+
success: true,
|
|
174
|
+
data: workflows,
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
catch (error) {
|
|
178
|
+
handleWorkflowError(error, res);
|
|
179
|
+
}
|
|
180
|
+
});
|
|
181
|
+
/**
|
|
182
|
+
* POST /api/workflows - Create a new workflow
|
|
183
|
+
*
|
|
184
|
+
* Request body:
|
|
185
|
+
* - source: WorkflowSource (required)
|
|
186
|
+
* - config: Partial<WorkflowConfig>
|
|
187
|
+
*/
|
|
188
|
+
router.post("/", async (req, res) => {
|
|
189
|
+
try {
|
|
190
|
+
const { source, config } = req.body;
|
|
191
|
+
// Determine engine type from config (default to sequential)
|
|
192
|
+
const engineType = config?.engineType ?? "sequential";
|
|
193
|
+
// Validate: "goal" source requires orchestrator engine
|
|
194
|
+
if (source?.type === "goal" && engineType !== "orchestrator") {
|
|
195
|
+
res.status(400).json({
|
|
196
|
+
success: false,
|
|
197
|
+
data: null,
|
|
198
|
+
message: "Goal-based workflows require the orchestrator engine. Set config.engineType to 'orchestrator'.",
|
|
199
|
+
});
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
const engine = getEngine(req, engineType);
|
|
203
|
+
if (!engine) {
|
|
204
|
+
res.status(503).json({
|
|
205
|
+
success: false,
|
|
206
|
+
data: null,
|
|
207
|
+
message: `Workflow engine not available for type: ${engineType}`,
|
|
208
|
+
});
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
// Validate source
|
|
212
|
+
if (!source || !source.type) {
|
|
213
|
+
res.status(400).json({
|
|
214
|
+
success: false,
|
|
215
|
+
data: null,
|
|
216
|
+
message: "source is required and must have a type",
|
|
217
|
+
});
|
|
218
|
+
return;
|
|
219
|
+
}
|
|
220
|
+
// Validate source type
|
|
221
|
+
const validSourceTypes = ["spec", "issues", "root_issue", "goal"];
|
|
222
|
+
if (!validSourceTypes.includes(source.type)) {
|
|
223
|
+
res.status(400).json({
|
|
224
|
+
success: false,
|
|
225
|
+
data: null,
|
|
226
|
+
message: `Invalid source type: ${source.type}. Must be one of: ${validSourceTypes.join(", ")}`,
|
|
227
|
+
});
|
|
228
|
+
return;
|
|
229
|
+
}
|
|
230
|
+
// Validate source-specific fields
|
|
231
|
+
if (source.type === "spec" && !("specId" in source)) {
|
|
232
|
+
res.status(400).json({
|
|
233
|
+
success: false,
|
|
234
|
+
data: null,
|
|
235
|
+
message: "source.specId is required for spec source type",
|
|
236
|
+
});
|
|
237
|
+
return;
|
|
238
|
+
}
|
|
239
|
+
if (source.type === "issues" && !("issueIds" in source)) {
|
|
240
|
+
res.status(400).json({
|
|
241
|
+
success: false,
|
|
242
|
+
data: null,
|
|
243
|
+
message: "source.issueIds is required for issues source type",
|
|
244
|
+
});
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
if (source.type === "root_issue" && !("issueId" in source)) {
|
|
248
|
+
res.status(400).json({
|
|
249
|
+
success: false,
|
|
250
|
+
data: null,
|
|
251
|
+
message: "source.issueId is required for root_issue source type",
|
|
252
|
+
});
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
if (source.type === "goal" && !("goal" in source)) {
|
|
256
|
+
res.status(400).json({
|
|
257
|
+
success: false,
|
|
258
|
+
data: null,
|
|
259
|
+
message: "source.goal is required for goal source type",
|
|
260
|
+
});
|
|
261
|
+
return;
|
|
262
|
+
}
|
|
263
|
+
// Create workflow
|
|
264
|
+
const workflow = await engine.createWorkflow(source, config);
|
|
265
|
+
// Broadcast creation
|
|
266
|
+
broadcastWorkflowUpdate(req.project.id, workflow.id, "created", workflow);
|
|
267
|
+
res.status(201).json({
|
|
268
|
+
success: true,
|
|
269
|
+
data: workflow,
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
catch (error) {
|
|
273
|
+
handleWorkflowError(error, res);
|
|
274
|
+
}
|
|
275
|
+
});
|
|
276
|
+
/**
|
|
277
|
+
* GET /api/workflows/:id - Get a specific workflow
|
|
278
|
+
*/
|
|
279
|
+
router.get("/:id", async (req, res) => {
|
|
280
|
+
try {
|
|
281
|
+
const { id } = req.params;
|
|
282
|
+
const engine = getEngineForWorkflow(req, id);
|
|
283
|
+
if (!engine) {
|
|
284
|
+
// Try sequential engine as fallback for listing
|
|
285
|
+
const fallbackEngine = getEngine(req, "sequential");
|
|
286
|
+
if (!fallbackEngine) {
|
|
287
|
+
res.status(503).json({
|
|
288
|
+
success: false,
|
|
289
|
+
data: null,
|
|
290
|
+
message: "Workflow engine not available",
|
|
291
|
+
});
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
const workflow = await fallbackEngine.getWorkflow(id);
|
|
295
|
+
if (!workflow) {
|
|
296
|
+
res.status(404).json({
|
|
297
|
+
success: false,
|
|
298
|
+
data: null,
|
|
299
|
+
message: `Workflow not found: ${id}`,
|
|
300
|
+
});
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
res.json({ success: true, data: workflow });
|
|
304
|
+
return;
|
|
305
|
+
}
|
|
306
|
+
const workflow = await engine.getWorkflow(id);
|
|
307
|
+
if (!workflow) {
|
|
308
|
+
res.status(404).json({
|
|
309
|
+
success: false,
|
|
310
|
+
data: null,
|
|
311
|
+
message: `Workflow not found: ${id}`,
|
|
312
|
+
});
|
|
313
|
+
return;
|
|
314
|
+
}
|
|
315
|
+
res.json({
|
|
316
|
+
success: true,
|
|
317
|
+
data: workflow,
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
catch (error) {
|
|
321
|
+
handleWorkflowError(error, res);
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
/**
|
|
325
|
+
* DELETE /api/workflows/:id - Delete a workflow
|
|
326
|
+
*
|
|
327
|
+
* Query parameters:
|
|
328
|
+
* - deleteWorktree: if "true", also delete the workflow's worktree
|
|
329
|
+
* - deleteBranch: if "true", also delete the workflow's branch
|
|
330
|
+
*/
|
|
331
|
+
router.delete("/:id", async (req, res) => {
|
|
332
|
+
try {
|
|
333
|
+
const { id } = req.params;
|
|
334
|
+
const { deleteWorktree, deleteBranch } = req.query;
|
|
335
|
+
const engine = getEngineForWorkflow(req, id);
|
|
336
|
+
if (!engine) {
|
|
337
|
+
res.status(503).json({
|
|
338
|
+
success: false,
|
|
339
|
+
data: null,
|
|
340
|
+
message: "Workflow engine not available",
|
|
341
|
+
});
|
|
342
|
+
return;
|
|
343
|
+
}
|
|
344
|
+
// Check if workflow exists
|
|
345
|
+
const workflow = await engine.getWorkflow(id);
|
|
346
|
+
if (!workflow) {
|
|
347
|
+
res.status(404).json({
|
|
348
|
+
success: false,
|
|
349
|
+
data: null,
|
|
350
|
+
message: `Workflow not found: ${id}`,
|
|
351
|
+
});
|
|
352
|
+
return;
|
|
353
|
+
}
|
|
354
|
+
// Cancel if running before deleting
|
|
355
|
+
if (workflow.status === "running" || workflow.status === "paused") {
|
|
356
|
+
await engine.cancelWorkflow(id);
|
|
357
|
+
}
|
|
358
|
+
// Track cleanup results
|
|
359
|
+
const cleanupResults = {};
|
|
360
|
+
const cleanupErrors = [];
|
|
361
|
+
const repoPath = req.project.path;
|
|
362
|
+
// Delete worktree if requested
|
|
363
|
+
if (deleteWorktree === "true" && workflow.worktreePath) {
|
|
364
|
+
try {
|
|
365
|
+
// Remove git worktree registration
|
|
366
|
+
execSync(`git worktree remove --force "${workflow.worktreePath}"`, {
|
|
367
|
+
cwd: repoPath,
|
|
368
|
+
stdio: "pipe",
|
|
369
|
+
});
|
|
370
|
+
cleanupResults.worktreeDeleted = true;
|
|
371
|
+
console.log(`[workflows/:id] Deleted worktree: ${workflow.worktreePath}`);
|
|
372
|
+
}
|
|
373
|
+
catch (worktreeError) {
|
|
374
|
+
// Worktree might already be removed, try to clean up the directory
|
|
375
|
+
try {
|
|
376
|
+
if (fs.existsSync(workflow.worktreePath)) {
|
|
377
|
+
fs.rmSync(workflow.worktreePath, { recursive: true, force: true });
|
|
378
|
+
cleanupResults.worktreeDeleted = true;
|
|
379
|
+
console.log(`[workflows/:id] Removed worktree directory: ${workflow.worktreePath}`);
|
|
380
|
+
}
|
|
381
|
+
// Prune stale worktree entries
|
|
382
|
+
execSync("git worktree prune", { cwd: repoPath, stdio: "pipe" });
|
|
383
|
+
}
|
|
384
|
+
catch (cleanupError) {
|
|
385
|
+
const errorMsg = worktreeError instanceof Error
|
|
386
|
+
? worktreeError.message
|
|
387
|
+
: String(worktreeError);
|
|
388
|
+
cleanupErrors.push(`Failed to delete worktree: ${errorMsg}`);
|
|
389
|
+
console.error(`[workflows/:id] Failed to delete worktree ${workflow.worktreePath}:`, worktreeError);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
// Delete branch if requested
|
|
394
|
+
if (deleteBranch === "true" && workflow.branchName) {
|
|
395
|
+
try {
|
|
396
|
+
execSync(`git branch -D "${workflow.branchName}"`, {
|
|
397
|
+
cwd: repoPath,
|
|
398
|
+
stdio: "pipe",
|
|
399
|
+
});
|
|
400
|
+
cleanupResults.branchDeleted = true;
|
|
401
|
+
console.log(`[workflows/:id] Deleted branch: ${workflow.branchName}`);
|
|
402
|
+
}
|
|
403
|
+
catch (branchError) {
|
|
404
|
+
const errorMsg = branchError instanceof Error
|
|
405
|
+
? branchError.message
|
|
406
|
+
: String(branchError);
|
|
407
|
+
cleanupErrors.push(`Failed to delete branch: ${errorMsg}`);
|
|
408
|
+
console.error(`[workflows/:id] Failed to delete branch ${workflow.branchName}:`, branchError);
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
if (cleanupErrors.length > 0) {
|
|
412
|
+
cleanupResults.cleanupErrors = cleanupErrors;
|
|
413
|
+
}
|
|
414
|
+
// Delete from database
|
|
415
|
+
const db = req.project.db;
|
|
416
|
+
db.prepare("DELETE FROM workflow_events WHERE workflow_id = ?").run(id);
|
|
417
|
+
db.prepare("DELETE FROM workflows WHERE id = ?").run(id);
|
|
418
|
+
// Broadcast deletion
|
|
419
|
+
broadcastWorkflowUpdate(req.project.id, id, "deleted", { id });
|
|
420
|
+
res.json({
|
|
421
|
+
success: true,
|
|
422
|
+
data: { id, deleted: true, ...cleanupResults },
|
|
423
|
+
});
|
|
424
|
+
}
|
|
425
|
+
catch (error) {
|
|
426
|
+
handleWorkflowError(error, res);
|
|
427
|
+
}
|
|
428
|
+
});
|
|
429
|
+
/**
|
|
430
|
+
* POST /api/workflows/:id/start - Start a pending workflow
|
|
431
|
+
*/
|
|
432
|
+
router.post("/:id/start", async (req, res) => {
|
|
433
|
+
try {
|
|
434
|
+
const { id } = req.params;
|
|
435
|
+
const engine = getEngineForWorkflow(req, id);
|
|
436
|
+
if (!engine) {
|
|
437
|
+
res.status(503).json({
|
|
438
|
+
success: false,
|
|
439
|
+
data: null,
|
|
440
|
+
message: "Workflow engine not available",
|
|
441
|
+
});
|
|
442
|
+
return;
|
|
443
|
+
}
|
|
444
|
+
await engine.startWorkflow(id);
|
|
445
|
+
const workflow = await engine.getWorkflow(id);
|
|
446
|
+
broadcastWorkflowUpdate(req.project.id, id, "started", workflow);
|
|
447
|
+
res.json({
|
|
448
|
+
success: true,
|
|
449
|
+
data: workflow,
|
|
450
|
+
});
|
|
451
|
+
}
|
|
452
|
+
catch (error) {
|
|
453
|
+
handleWorkflowError(error, res);
|
|
454
|
+
}
|
|
455
|
+
});
|
|
456
|
+
/**
|
|
457
|
+
* POST /api/workflows/:id/pause - Pause a running workflow
|
|
458
|
+
*/
|
|
459
|
+
router.post("/:id/pause", async (req, res) => {
|
|
460
|
+
try {
|
|
461
|
+
const { id } = req.params;
|
|
462
|
+
const engine = getEngineForWorkflow(req, id);
|
|
463
|
+
if (!engine) {
|
|
464
|
+
res.status(503).json({
|
|
465
|
+
success: false,
|
|
466
|
+
data: null,
|
|
467
|
+
message: "Workflow engine not available",
|
|
468
|
+
});
|
|
469
|
+
return;
|
|
470
|
+
}
|
|
471
|
+
await engine.pauseWorkflow(id);
|
|
472
|
+
const workflow = await engine.getWorkflow(id);
|
|
473
|
+
broadcastWorkflowUpdate(req.project.id, id, "paused", workflow);
|
|
474
|
+
res.json({
|
|
475
|
+
success: true,
|
|
476
|
+
data: workflow,
|
|
477
|
+
});
|
|
478
|
+
}
|
|
479
|
+
catch (error) {
|
|
480
|
+
handleWorkflowError(error, res);
|
|
481
|
+
}
|
|
482
|
+
});
|
|
483
|
+
/**
|
|
484
|
+
* POST /api/workflows/:id/resume - Resume a paused workflow
|
|
485
|
+
*
|
|
486
|
+
* Body:
|
|
487
|
+
* - message?: string - Optional message to send to the orchestrator on resume
|
|
488
|
+
*/
|
|
489
|
+
router.post("/:id/resume", async (req, res) => {
|
|
490
|
+
try {
|
|
491
|
+
const { id } = req.params;
|
|
492
|
+
const { message } = req.body;
|
|
493
|
+
const engine = getEngineForWorkflow(req, id);
|
|
494
|
+
if (!engine) {
|
|
495
|
+
res.status(503).json({
|
|
496
|
+
success: false,
|
|
497
|
+
data: null,
|
|
498
|
+
message: "Workflow engine not available",
|
|
499
|
+
});
|
|
500
|
+
return;
|
|
501
|
+
}
|
|
502
|
+
await engine.resumeWorkflow(id, message);
|
|
503
|
+
const workflow = await engine.getWorkflow(id);
|
|
504
|
+
broadcastWorkflowUpdate(req.project.id, id, "resumed", workflow);
|
|
505
|
+
res.json({
|
|
506
|
+
success: true,
|
|
507
|
+
data: workflow,
|
|
508
|
+
});
|
|
509
|
+
}
|
|
510
|
+
catch (error) {
|
|
511
|
+
handleWorkflowError(error, res);
|
|
512
|
+
}
|
|
513
|
+
});
|
|
514
|
+
/**
|
|
515
|
+
* POST /api/workflows/:id/cancel - Cancel a workflow
|
|
516
|
+
*/
|
|
517
|
+
router.post("/:id/cancel", async (req, res) => {
|
|
518
|
+
try {
|
|
519
|
+
const { id } = req.params;
|
|
520
|
+
const engine = getEngineForWorkflow(req, id);
|
|
521
|
+
if (!engine) {
|
|
522
|
+
res.status(503).json({
|
|
523
|
+
success: false,
|
|
524
|
+
data: null,
|
|
525
|
+
message: "Workflow engine not available",
|
|
526
|
+
});
|
|
527
|
+
return;
|
|
528
|
+
}
|
|
529
|
+
await engine.cancelWorkflow(id);
|
|
530
|
+
const workflow = await engine.getWorkflow(id);
|
|
531
|
+
broadcastWorkflowUpdate(req.project.id, id, "cancelled", workflow);
|
|
532
|
+
res.json({
|
|
533
|
+
success: true,
|
|
534
|
+
data: workflow,
|
|
535
|
+
});
|
|
536
|
+
}
|
|
537
|
+
catch (error) {
|
|
538
|
+
handleWorkflowError(error, res);
|
|
539
|
+
}
|
|
540
|
+
});
|
|
541
|
+
/**
|
|
542
|
+
* POST /api/workflows/:id/steps/:stepId/retry - Retry a failed step
|
|
543
|
+
*/
|
|
544
|
+
router.post("/:id/steps/:stepId/retry", async (req, res) => {
|
|
545
|
+
try {
|
|
546
|
+
const { id, stepId } = req.params;
|
|
547
|
+
const engine = getEngineForWorkflow(req, id);
|
|
548
|
+
if (!engine) {
|
|
549
|
+
res.status(503).json({
|
|
550
|
+
success: false,
|
|
551
|
+
data: null,
|
|
552
|
+
message: "Workflow engine not available",
|
|
553
|
+
});
|
|
554
|
+
return;
|
|
555
|
+
}
|
|
556
|
+
await engine.retryStep(id, stepId);
|
|
557
|
+
const workflow = await engine.getWorkflow(id);
|
|
558
|
+
const step = workflow?.steps.find((s) => s.id === stepId);
|
|
559
|
+
if (step) {
|
|
560
|
+
broadcastWorkflowStepUpdate(req.project.id, id, "started", {
|
|
561
|
+
workflow,
|
|
562
|
+
step,
|
|
563
|
+
});
|
|
564
|
+
}
|
|
565
|
+
res.json({
|
|
566
|
+
success: true,
|
|
567
|
+
data: workflow,
|
|
568
|
+
});
|
|
569
|
+
}
|
|
570
|
+
catch (error) {
|
|
571
|
+
handleWorkflowError(error, res);
|
|
572
|
+
}
|
|
573
|
+
});
|
|
574
|
+
/**
|
|
575
|
+
* POST /api/workflows/:id/steps/:stepId/skip - Skip a step
|
|
576
|
+
*
|
|
577
|
+
* Request body:
|
|
578
|
+
* - reason: string (optional)
|
|
579
|
+
*/
|
|
580
|
+
router.post("/:id/steps/:stepId/skip", async (req, res) => {
|
|
581
|
+
try {
|
|
582
|
+
const { id, stepId } = req.params;
|
|
583
|
+
const { reason } = req.body;
|
|
584
|
+
const engine = getEngineForWorkflow(req, id);
|
|
585
|
+
if (!engine) {
|
|
586
|
+
res.status(503).json({
|
|
587
|
+
success: false,
|
|
588
|
+
data: null,
|
|
589
|
+
message: "Workflow engine not available",
|
|
590
|
+
});
|
|
591
|
+
return;
|
|
592
|
+
}
|
|
593
|
+
await engine.skipStep(id, stepId, reason);
|
|
594
|
+
const workflow = await engine.getWorkflow(id);
|
|
595
|
+
const step = workflow?.steps.find((s) => s.id === stepId);
|
|
596
|
+
if (step) {
|
|
597
|
+
broadcastWorkflowStepUpdate(req.project.id, id, "skipped", {
|
|
598
|
+
workflow,
|
|
599
|
+
step,
|
|
600
|
+
reason,
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
res.json({
|
|
604
|
+
success: true,
|
|
605
|
+
data: workflow,
|
|
606
|
+
});
|
|
607
|
+
}
|
|
608
|
+
catch (error) {
|
|
609
|
+
handleWorkflowError(error, res);
|
|
610
|
+
}
|
|
611
|
+
});
|
|
612
|
+
/**
|
|
613
|
+
* GET /api/workflows/:id/events - Get workflow event history
|
|
614
|
+
*
|
|
615
|
+
* Query parameters:
|
|
616
|
+
* - limit: number (default: 100)
|
|
617
|
+
* - offset: number (default: 0)
|
|
618
|
+
*/
|
|
619
|
+
router.get("/:id/events", async (req, res) => {
|
|
620
|
+
try {
|
|
621
|
+
const { id } = req.params;
|
|
622
|
+
const engine = getEngineForWorkflow(req, id);
|
|
623
|
+
if (!engine) {
|
|
624
|
+
res.status(503).json({
|
|
625
|
+
success: false,
|
|
626
|
+
data: null,
|
|
627
|
+
message: "Workflow engine not available",
|
|
628
|
+
});
|
|
629
|
+
return;
|
|
630
|
+
}
|
|
631
|
+
// Check if workflow exists
|
|
632
|
+
const workflow = await engine.getWorkflow(id);
|
|
633
|
+
if (!workflow) {
|
|
634
|
+
res.status(404).json({
|
|
635
|
+
success: false,
|
|
636
|
+
data: null,
|
|
637
|
+
message: `Workflow not found: ${id}`,
|
|
638
|
+
});
|
|
639
|
+
return;
|
|
640
|
+
}
|
|
641
|
+
// Parse query parameters
|
|
642
|
+
const limit = req.query.limit
|
|
643
|
+
? parseInt(req.query.limit, 10)
|
|
644
|
+
: 100;
|
|
645
|
+
const offset = req.query.offset
|
|
646
|
+
? parseInt(req.query.offset, 10)
|
|
647
|
+
: 0;
|
|
648
|
+
// Query events from database
|
|
649
|
+
const db = req.project.db;
|
|
650
|
+
const rows = db
|
|
651
|
+
.prepare(`
|
|
652
|
+
SELECT * FROM workflow_events
|
|
653
|
+
WHERE workflow_id = ?
|
|
654
|
+
ORDER BY created_at DESC
|
|
655
|
+
LIMIT ? OFFSET ?
|
|
656
|
+
`)
|
|
657
|
+
.all(id, limit, offset);
|
|
658
|
+
// Parse JSON fields
|
|
659
|
+
const events = rows.map((row) => ({
|
|
660
|
+
id: row.id,
|
|
661
|
+
workflowId: row.workflow_id,
|
|
662
|
+
type: row.type,
|
|
663
|
+
stepId: row.step_id,
|
|
664
|
+
executionId: row.execution_id,
|
|
665
|
+
payload: JSON.parse(row.payload || "{}"),
|
|
666
|
+
createdAt: row.created_at,
|
|
667
|
+
processedAt: row.processed_at,
|
|
668
|
+
}));
|
|
669
|
+
res.json({
|
|
670
|
+
success: true,
|
|
671
|
+
data: events,
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
catch (error) {
|
|
675
|
+
handleWorkflowError(error, res);
|
|
676
|
+
}
|
|
677
|
+
});
|
|
678
|
+
/**
|
|
679
|
+
* GET /api/workflows/:id/escalation - Get pending escalation for workflow
|
|
680
|
+
*/
|
|
681
|
+
router.get("/:id/escalation", async (req, res) => {
|
|
682
|
+
try {
|
|
683
|
+
const { id } = req.params;
|
|
684
|
+
const db = req.project.db;
|
|
685
|
+
// Check if workflow exists
|
|
686
|
+
const workflowExists = db
|
|
687
|
+
.prepare("SELECT 1 FROM workflows WHERE id = ?")
|
|
688
|
+
.get(id);
|
|
689
|
+
if (!workflowExists) {
|
|
690
|
+
res.status(404).json({
|
|
691
|
+
success: false,
|
|
692
|
+
data: null,
|
|
693
|
+
message: `Workflow not found: ${id}`,
|
|
694
|
+
});
|
|
695
|
+
return;
|
|
696
|
+
}
|
|
697
|
+
// Query for pending escalation (same logic as respond endpoint)
|
|
698
|
+
const pendingEscalation = db
|
|
699
|
+
.prepare(`
|
|
700
|
+
SELECT payload FROM workflow_events
|
|
701
|
+
WHERE workflow_id = ?
|
|
702
|
+
AND type = 'escalation_requested'
|
|
703
|
+
AND json_extract(payload, '$.escalation_id') NOT IN (
|
|
704
|
+
SELECT json_extract(payload, '$.escalation_id')
|
|
705
|
+
FROM workflow_events
|
|
706
|
+
WHERE workflow_id = ?
|
|
707
|
+
AND type = 'escalation_resolved'
|
|
708
|
+
)
|
|
709
|
+
ORDER BY created_at DESC
|
|
710
|
+
LIMIT 1
|
|
711
|
+
`)
|
|
712
|
+
.get(id, id);
|
|
713
|
+
if (!pendingEscalation) {
|
|
714
|
+
res.json({
|
|
715
|
+
success: true,
|
|
716
|
+
data: { hasPendingEscalation: false },
|
|
717
|
+
});
|
|
718
|
+
return;
|
|
719
|
+
}
|
|
720
|
+
const payload = JSON.parse(pendingEscalation.payload);
|
|
721
|
+
res.json({
|
|
722
|
+
success: true,
|
|
723
|
+
data: {
|
|
724
|
+
hasPendingEscalation: true,
|
|
725
|
+
escalation: {
|
|
726
|
+
requestId: payload.escalation_id,
|
|
727
|
+
message: payload.message,
|
|
728
|
+
options: payload.options,
|
|
729
|
+
context: payload.context,
|
|
730
|
+
},
|
|
731
|
+
},
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
catch (error) {
|
|
735
|
+
handleWorkflowError(error, res);
|
|
736
|
+
}
|
|
737
|
+
});
|
|
738
|
+
/**
|
|
739
|
+
* POST /api/workflows/:id/escalation/respond - Respond to a pending escalation
|
|
740
|
+
*
|
|
741
|
+
* Request body:
|
|
742
|
+
* - action: 'approve' | 'reject' | 'custom' (required)
|
|
743
|
+
* - message: string (optional)
|
|
744
|
+
*/
|
|
745
|
+
router.post("/:id/escalation/respond", async (req, res) => {
|
|
746
|
+
try {
|
|
747
|
+
const { id } = req.params;
|
|
748
|
+
const { action, message } = req.body;
|
|
749
|
+
// Validate action
|
|
750
|
+
const validActions = ["approve", "reject", "custom"];
|
|
751
|
+
if (!action || !validActions.includes(action)) {
|
|
752
|
+
res.status(400).json({
|
|
753
|
+
success: false,
|
|
754
|
+
data: null,
|
|
755
|
+
message: `action is required and must be one of: ${validActions.join(", ")}`,
|
|
756
|
+
});
|
|
757
|
+
return;
|
|
758
|
+
}
|
|
759
|
+
// Get workflow from database
|
|
760
|
+
const db = req.project.db;
|
|
761
|
+
const workflowRow = db
|
|
762
|
+
.prepare("SELECT * FROM workflows WHERE id = ?")
|
|
763
|
+
.get(id);
|
|
764
|
+
if (!workflowRow) {
|
|
765
|
+
res.status(404).json({
|
|
766
|
+
success: false,
|
|
767
|
+
data: null,
|
|
768
|
+
message: `Workflow not found: ${id}`,
|
|
769
|
+
});
|
|
770
|
+
return;
|
|
771
|
+
}
|
|
772
|
+
// Check for pending escalation by querying events
|
|
773
|
+
const pendingEscalation = db
|
|
774
|
+
.prepare(`
|
|
775
|
+
SELECT payload FROM workflow_events
|
|
776
|
+
WHERE workflow_id = ?
|
|
777
|
+
AND type = 'escalation_requested'
|
|
778
|
+
AND json_extract(payload, '$.escalation_id') NOT IN (
|
|
779
|
+
SELECT json_extract(payload, '$.escalation_id')
|
|
780
|
+
FROM workflow_events
|
|
781
|
+
WHERE workflow_id = ?
|
|
782
|
+
AND type = 'escalation_resolved'
|
|
783
|
+
)
|
|
784
|
+
ORDER BY created_at DESC
|
|
785
|
+
LIMIT 1
|
|
786
|
+
`)
|
|
787
|
+
.get(id, id);
|
|
788
|
+
if (!pendingEscalation) {
|
|
789
|
+
res.status(400).json({
|
|
790
|
+
success: false,
|
|
791
|
+
data: null,
|
|
792
|
+
message: `No pending escalation for workflow: ${id}`,
|
|
793
|
+
});
|
|
794
|
+
return;
|
|
795
|
+
}
|
|
796
|
+
// Parse escalation data from event
|
|
797
|
+
const escalationPayload = JSON.parse(pendingEscalation.payload);
|
|
798
|
+
const now = new Date().toISOString();
|
|
799
|
+
// Record escalation_resolved event
|
|
800
|
+
const eventId = randomUUID();
|
|
801
|
+
db.prepare(`
|
|
802
|
+
INSERT INTO workflow_events (id, workflow_id, type, payload, created_at)
|
|
803
|
+
VALUES (?, ?, ?, ?, ?)
|
|
804
|
+
`).run(eventId, id, "escalation_resolved", JSON.stringify({
|
|
805
|
+
escalation_id: escalationPayload.escalation_id,
|
|
806
|
+
action,
|
|
807
|
+
message,
|
|
808
|
+
responded_at: now,
|
|
809
|
+
}), now);
|
|
810
|
+
// Emit escalation resolved event for WebSocket broadcast
|
|
811
|
+
const engine = getEngineForWorkflow(req, id);
|
|
812
|
+
if (engine) {
|
|
813
|
+
engine.emitEscalationResolved(id, escalationPayload.escalation_id, action, message);
|
|
814
|
+
}
|
|
815
|
+
// Trigger orchestrator wakeup if available (only on orchestrator engine)
|
|
816
|
+
if (engine && "triggerEscalationWakeup" in engine) {
|
|
817
|
+
try {
|
|
818
|
+
await engine.triggerEscalationWakeup(id);
|
|
819
|
+
}
|
|
820
|
+
catch (wakeupError) {
|
|
821
|
+
console.error("Failed to trigger escalation wakeup:", wakeupError);
|
|
822
|
+
// Don't fail the response - escalation is still resolved
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
// Parse and return workflow
|
|
826
|
+
const workflow = {
|
|
827
|
+
id: workflowRow.id,
|
|
828
|
+
title: workflowRow.title,
|
|
829
|
+
source: JSON.parse(workflowRow.source),
|
|
830
|
+
status: workflowRow.status,
|
|
831
|
+
steps: JSON.parse(workflowRow.steps || "[]"),
|
|
832
|
+
worktreePath: workflowRow.worktree_path,
|
|
833
|
+
branchName: workflowRow.branch_name,
|
|
834
|
+
baseBranch: workflowRow.base_branch,
|
|
835
|
+
currentStepIndex: workflowRow.current_step_index,
|
|
836
|
+
orchestratorExecutionId: workflowRow.orchestrator_execution_id,
|
|
837
|
+
orchestratorSessionId: workflowRow.orchestrator_session_id,
|
|
838
|
+
config: JSON.parse(workflowRow.config),
|
|
839
|
+
createdAt: workflowRow.created_at,
|
|
840
|
+
updatedAt: workflowRow.updated_at,
|
|
841
|
+
startedAt: workflowRow.started_at,
|
|
842
|
+
completedAt: workflowRow.completed_at,
|
|
843
|
+
};
|
|
844
|
+
// Broadcast update
|
|
845
|
+
broadcastWorkflowUpdate(req.project.id, id, "updated", workflow);
|
|
846
|
+
res.json({
|
|
847
|
+
success: true,
|
|
848
|
+
data: {
|
|
849
|
+
workflow,
|
|
850
|
+
escalation: {
|
|
851
|
+
id: escalationPayload.escalation_id,
|
|
852
|
+
action,
|
|
853
|
+
message,
|
|
854
|
+
resolvedAt: now,
|
|
855
|
+
},
|
|
856
|
+
},
|
|
857
|
+
message: `Escalation resolved with action: ${action}`,
|
|
858
|
+
});
|
|
859
|
+
}
|
|
860
|
+
catch (error) {
|
|
861
|
+
handleWorkflowError(error, res);
|
|
862
|
+
}
|
|
863
|
+
});
|
|
864
|
+
/**
|
|
865
|
+
* POST /api/workflows/:id/escalation/notify - Internal endpoint for MCP tool to notify of new escalation
|
|
866
|
+
*
|
|
867
|
+
* This endpoint is called by the workflow MCP server when an escalation is created.
|
|
868
|
+
* It broadcasts the escalation_requested event via WebSocket.
|
|
869
|
+
*
|
|
870
|
+
* Request body:
|
|
871
|
+
* - escalation_id: string (required)
|
|
872
|
+
* - message: string (required)
|
|
873
|
+
* - options?: string[]
|
|
874
|
+
* - context?: Record<string, unknown>
|
|
875
|
+
*/
|
|
876
|
+
router.post("/:id/escalation/notify", async (req, res) => {
|
|
877
|
+
try {
|
|
878
|
+
const { id } = req.params;
|
|
879
|
+
const { escalation_id, message, options, context } = req.body;
|
|
880
|
+
if (!escalation_id || !message) {
|
|
881
|
+
res.status(400).json({
|
|
882
|
+
success: false,
|
|
883
|
+
data: null,
|
|
884
|
+
message: "escalation_id and message are required",
|
|
885
|
+
});
|
|
886
|
+
return;
|
|
887
|
+
}
|
|
888
|
+
// Verify workflow exists
|
|
889
|
+
const db = req.project.db;
|
|
890
|
+
const workflowExists = db
|
|
891
|
+
.prepare("SELECT 1 FROM workflows WHERE id = ?")
|
|
892
|
+
.get(id);
|
|
893
|
+
if (!workflowExists) {
|
|
894
|
+
res.status(404).json({
|
|
895
|
+
success: false,
|
|
896
|
+
data: null,
|
|
897
|
+
message: `Workflow not found: ${id}`,
|
|
898
|
+
});
|
|
899
|
+
return;
|
|
900
|
+
}
|
|
901
|
+
// Emit escalation requested event for WebSocket broadcast
|
|
902
|
+
const engine = getEngineForWorkflow(req, id);
|
|
903
|
+
if (engine) {
|
|
904
|
+
engine.emitEscalationRequested(id, escalation_id, message, options, context);
|
|
905
|
+
}
|
|
906
|
+
res.json({
|
|
907
|
+
success: true,
|
|
908
|
+
data: { notified: true },
|
|
909
|
+
message: "Escalation notification broadcast",
|
|
910
|
+
});
|
|
911
|
+
}
|
|
912
|
+
catch (error) {
|
|
913
|
+
handleWorkflowError(error, res);
|
|
914
|
+
}
|
|
915
|
+
});
|
|
916
|
+
// ===========================================================================
|
|
917
|
+
// MCP Server Endpoints
|
|
918
|
+
// These endpoints are called by the workflow MCP server instead of direct DB access
|
|
919
|
+
// ===========================================================================
|
|
920
|
+
/**
|
|
921
|
+
* GET /api/workflows/:id/status - Get extended workflow status for orchestrator
|
|
922
|
+
*
|
|
923
|
+
* Returns workflow with steps, active executions, and ready steps.
|
|
924
|
+
* Used by workflow_status MCP tool.
|
|
925
|
+
*/
|
|
926
|
+
router.get("/:id/status", async (req, res) => {
|
|
927
|
+
try {
|
|
928
|
+
const { id } = req.params;
|
|
929
|
+
const engine = getEngineForWorkflow(req, id);
|
|
930
|
+
if (!engine) {
|
|
931
|
+
res.status(503).json({
|
|
932
|
+
success: false,
|
|
933
|
+
data: null,
|
|
934
|
+
message: "Workflow engine not available",
|
|
935
|
+
});
|
|
936
|
+
return;
|
|
937
|
+
}
|
|
938
|
+
const workflow = await engine.getWorkflow(id);
|
|
939
|
+
if (!workflow) {
|
|
940
|
+
res.status(404).json({
|
|
941
|
+
success: false,
|
|
942
|
+
data: null,
|
|
943
|
+
message: `Workflow not found: ${id}`,
|
|
944
|
+
});
|
|
945
|
+
return;
|
|
946
|
+
}
|
|
947
|
+
// Get ready steps
|
|
948
|
+
const readySteps = await engine.getReadySteps(id);
|
|
949
|
+
const readyStepIds = readySteps.map((s) => s.id);
|
|
950
|
+
// Get active executions from steps
|
|
951
|
+
const db = req.project.db;
|
|
952
|
+
const activeExecutionIds = workflow.steps
|
|
953
|
+
.filter((s) => s.status === "running" && s.executionId)
|
|
954
|
+
.map((s) => s.executionId);
|
|
955
|
+
const activeExecutions = [];
|
|
956
|
+
for (const execId of activeExecutionIds) {
|
|
957
|
+
const exec = db
|
|
958
|
+
.prepare("SELECT id, status, started_at FROM executions WHERE id = ?")
|
|
959
|
+
.get(execId);
|
|
960
|
+
if (exec) {
|
|
961
|
+
const step = workflow.steps.find((s) => s.executionId === execId);
|
|
962
|
+
activeExecutions.push({
|
|
963
|
+
id: exec.id,
|
|
964
|
+
stepId: step?.id || "",
|
|
965
|
+
status: exec.status,
|
|
966
|
+
startedAt: exec.started_at,
|
|
967
|
+
});
|
|
968
|
+
}
|
|
969
|
+
}
|
|
970
|
+
// Get issue titles for steps
|
|
971
|
+
const issueIds = workflow.steps.map((s) => s.issueId);
|
|
972
|
+
const issueTitles = {};
|
|
973
|
+
if (issueIds.length > 0) {
|
|
974
|
+
const placeholders = issueIds.map(() => "?").join(",");
|
|
975
|
+
const issues = db
|
|
976
|
+
.prepare(`SELECT id, title FROM issues WHERE id IN (${placeholders})`)
|
|
977
|
+
.all(...issueIds);
|
|
978
|
+
for (const issue of issues) {
|
|
979
|
+
issueTitles[issue.id] = issue.title;
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
// Build response matching WorkflowStatusResult type
|
|
983
|
+
const result = {
|
|
984
|
+
workflow: {
|
|
985
|
+
id: workflow.id,
|
|
986
|
+
title: workflow.title,
|
|
987
|
+
status: workflow.status,
|
|
988
|
+
source: workflow.source,
|
|
989
|
+
config: workflow.config,
|
|
990
|
+
worktreePath: workflow.worktreePath,
|
|
991
|
+
},
|
|
992
|
+
steps: workflow.steps.map((s) => ({
|
|
993
|
+
id: s.id,
|
|
994
|
+
issueId: s.issueId,
|
|
995
|
+
issueTitle: issueTitles[s.issueId] || s.issueId,
|
|
996
|
+
status: s.status,
|
|
997
|
+
executionId: s.executionId,
|
|
998
|
+
dependsOn: s.dependencies || [],
|
|
999
|
+
})),
|
|
1000
|
+
activeExecutions,
|
|
1001
|
+
readySteps: readyStepIds,
|
|
1002
|
+
};
|
|
1003
|
+
res.json({
|
|
1004
|
+
success: true,
|
|
1005
|
+
data: result,
|
|
1006
|
+
});
|
|
1007
|
+
}
|
|
1008
|
+
catch (error) {
|
|
1009
|
+
handleWorkflowError(error, res);
|
|
1010
|
+
}
|
|
1011
|
+
});
|
|
1012
|
+
/**
|
|
1013
|
+
* POST /api/workflows/:id/execute - Execute an issue within the workflow
|
|
1014
|
+
*
|
|
1015
|
+
* Used by execute_issue MCP tool.
|
|
1016
|
+
*
|
|
1017
|
+
* Request body:
|
|
1018
|
+
* - issue_id: string (required)
|
|
1019
|
+
* - agent_type?: AgentType
|
|
1020
|
+
* - model?: string
|
|
1021
|
+
* - worktree_mode: 'create_root' | 'use_root' | 'create_branch' | 'use_branch'
|
|
1022
|
+
* - worktree_id?: string (for use_root/use_branch)
|
|
1023
|
+
*/
|
|
1024
|
+
router.post("/:id/execute", async (req, res) => {
|
|
1025
|
+
try {
|
|
1026
|
+
const { id: workflowId } = req.params;
|
|
1027
|
+
const engine = getEngineForWorkflow(req, workflowId);
|
|
1028
|
+
if (!engine) {
|
|
1029
|
+
res.status(503).json({
|
|
1030
|
+
success: false,
|
|
1031
|
+
data: null,
|
|
1032
|
+
message: "Workflow engine not available",
|
|
1033
|
+
});
|
|
1034
|
+
return;
|
|
1035
|
+
}
|
|
1036
|
+
const { issue_id, agent_type, model, worktree_mode, worktree_id } = req.body;
|
|
1037
|
+
// Validate required params
|
|
1038
|
+
if (!issue_id) {
|
|
1039
|
+
res.status(400).json({
|
|
1040
|
+
success: false,
|
|
1041
|
+
data: null,
|
|
1042
|
+
message: "issue_id is required",
|
|
1043
|
+
});
|
|
1044
|
+
return;
|
|
1045
|
+
}
|
|
1046
|
+
if (!worktree_mode) {
|
|
1047
|
+
res.status(400).json({
|
|
1048
|
+
success: false,
|
|
1049
|
+
data: null,
|
|
1050
|
+
message: "worktree_mode is required",
|
|
1051
|
+
});
|
|
1052
|
+
return;
|
|
1053
|
+
}
|
|
1054
|
+
// Get workflow
|
|
1055
|
+
const workflow = await engine.getWorkflow(workflowId);
|
|
1056
|
+
if (!workflow) {
|
|
1057
|
+
res.status(404).json({
|
|
1058
|
+
success: false,
|
|
1059
|
+
data: null,
|
|
1060
|
+
message: `Workflow not found: ${workflowId}`,
|
|
1061
|
+
});
|
|
1062
|
+
return;
|
|
1063
|
+
}
|
|
1064
|
+
// Validate workflow is running
|
|
1065
|
+
if (workflow.status !== "running") {
|
|
1066
|
+
res.status(400).json({
|
|
1067
|
+
success: false,
|
|
1068
|
+
data: null,
|
|
1069
|
+
message: `Cannot execute issue: workflow is ${workflow.status}, expected running`,
|
|
1070
|
+
});
|
|
1071
|
+
return;
|
|
1072
|
+
}
|
|
1073
|
+
// Find step for this issue
|
|
1074
|
+
const step = workflow.steps.find((s) => s.issueId === issue_id);
|
|
1075
|
+
if (!step) {
|
|
1076
|
+
res.status(400).json({
|
|
1077
|
+
success: false,
|
|
1078
|
+
data: null,
|
|
1079
|
+
message: `Issue ${issue_id} is not part of workflow ${workflowId}`,
|
|
1080
|
+
});
|
|
1081
|
+
return;
|
|
1082
|
+
}
|
|
1083
|
+
// Validate step status
|
|
1084
|
+
if (step.status !== "pending" && step.status !== "ready") {
|
|
1085
|
+
res.status(400).json({
|
|
1086
|
+
success: false,
|
|
1087
|
+
data: null,
|
|
1088
|
+
message: `Cannot execute step: status is ${step.status}, expected pending or ready`,
|
|
1089
|
+
});
|
|
1090
|
+
return;
|
|
1091
|
+
}
|
|
1092
|
+
// Get issue
|
|
1093
|
+
const db = req.project.db;
|
|
1094
|
+
const issue = db
|
|
1095
|
+
.prepare("SELECT id, title, content FROM issues WHERE id = ?")
|
|
1096
|
+
.get(issue_id);
|
|
1097
|
+
if (!issue) {
|
|
1098
|
+
res.status(404).json({
|
|
1099
|
+
success: false,
|
|
1100
|
+
data: null,
|
|
1101
|
+
message: `Issue not found: ${issue_id}`,
|
|
1102
|
+
});
|
|
1103
|
+
return;
|
|
1104
|
+
}
|
|
1105
|
+
// Determine worktree configuration
|
|
1106
|
+
let reuseWorktreePath;
|
|
1107
|
+
if (worktree_mode === "use_root" || worktree_mode === "use_branch") {
|
|
1108
|
+
if (!worktree_id) {
|
|
1109
|
+
res.status(400).json({
|
|
1110
|
+
success: false,
|
|
1111
|
+
data: null,
|
|
1112
|
+
message: `worktree_id is required for ${worktree_mode} mode`,
|
|
1113
|
+
});
|
|
1114
|
+
return;
|
|
1115
|
+
}
|
|
1116
|
+
// Look up the execution to get the worktree path
|
|
1117
|
+
const existingExecution = req
|
|
1118
|
+
.project.db.prepare("SELECT worktree_path FROM executions WHERE id = ?")
|
|
1119
|
+
.get(worktree_id);
|
|
1120
|
+
if (!existingExecution?.worktree_path) {
|
|
1121
|
+
res.status(400).json({
|
|
1122
|
+
success: false,
|
|
1123
|
+
data: null,
|
|
1124
|
+
message: `Execution ${worktree_id} not found or has no worktree`,
|
|
1125
|
+
});
|
|
1126
|
+
return;
|
|
1127
|
+
}
|
|
1128
|
+
reuseWorktreePath = existingExecution.worktree_path;
|
|
1129
|
+
}
|
|
1130
|
+
else if (worktree_mode === "create_root" &&
|
|
1131
|
+
workflow.config.reuseWorktreePath) {
|
|
1132
|
+
// For the first execution, use workflow config's reuseWorktreePath if set
|
|
1133
|
+
// (e.g., when user selected an existing worktree when creating the workflow)
|
|
1134
|
+
reuseWorktreePath = workflow.config.reuseWorktreePath;
|
|
1135
|
+
}
|
|
1136
|
+
// Build execution config
|
|
1137
|
+
const agentTypeToUse = agent_type || workflow.config.defaultAgentType || "claude-code";
|
|
1138
|
+
const executionConfig = {
|
|
1139
|
+
mode: "worktree",
|
|
1140
|
+
model: model || workflow.config.orchestratorModel,
|
|
1141
|
+
baseBranch: workflow.baseBranch,
|
|
1142
|
+
reuseWorktreePath,
|
|
1143
|
+
// Workflow-spawned executions run autonomously without terminal
|
|
1144
|
+
dangerouslySkipPermissions: true,
|
|
1145
|
+
};
|
|
1146
|
+
// Create prompt from issue content
|
|
1147
|
+
const prompt = issue.content || `Implement issue: ${issue.title}`;
|
|
1148
|
+
// Create execution with workflow context
|
|
1149
|
+
const executionService = req.project.executionService;
|
|
1150
|
+
const execution = await executionService.createExecution(issue_id, executionConfig, prompt, agentTypeToUse, { workflowId, stepId: step.id });
|
|
1151
|
+
// Update step status and execution ID
|
|
1152
|
+
const updatedSteps = workflow.steps.map((s) => s.id === step.id
|
|
1153
|
+
? { ...s, status: "running", executionId: execution.id }
|
|
1154
|
+
: s);
|
|
1155
|
+
db.prepare("UPDATE workflows SET steps = ?, updated_at = ? WHERE id = ?").run(JSON.stringify(updatedSteps), new Date().toISOString(), workflowId);
|
|
1156
|
+
// Store worktree path on workflow for create_root mode
|
|
1157
|
+
if (worktree_mode === "create_root" && execution.worktree_path) {
|
|
1158
|
+
db.prepare("UPDATE workflows SET worktree_path = ?, branch_name = ?, updated_at = ? WHERE id = ?").run(execution.worktree_path, execution.branch_name, new Date().toISOString(), workflowId);
|
|
1159
|
+
}
|
|
1160
|
+
// Emit step started event
|
|
1161
|
+
engine.emitStepStarted(workflowId, {
|
|
1162
|
+
...step,
|
|
1163
|
+
status: "running",
|
|
1164
|
+
executionId: execution.id,
|
|
1165
|
+
});
|
|
1166
|
+
// Start execution timeout if configured (orchestrator workflows only)
|
|
1167
|
+
if (workflow.config.executionTimeoutMs &&
|
|
1168
|
+
"getWakeupService" in engine &&
|
|
1169
|
+
typeof engine.getWakeupService === "function") {
|
|
1170
|
+
const wakeupService = engine.getWakeupService();
|
|
1171
|
+
wakeupService.startExecutionTimeout(execution.id, workflowId, step.id, workflow.config.executionTimeoutMs);
|
|
1172
|
+
}
|
|
1173
|
+
console.log(`[workflows/:id/execute] Started execution ${execution.id} for issue ${issue_id} in workflow ${workflowId}`);
|
|
1174
|
+
res.json({
|
|
1175
|
+
success: true,
|
|
1176
|
+
data: {
|
|
1177
|
+
execution_id: execution.id,
|
|
1178
|
+
worktree_path: execution.worktree_path || "",
|
|
1179
|
+
branch_name: execution.branch_name,
|
|
1180
|
+
status: execution.status,
|
|
1181
|
+
},
|
|
1182
|
+
});
|
|
1183
|
+
}
|
|
1184
|
+
catch (error) {
|
|
1185
|
+
handleWorkflowError(error, res);
|
|
1186
|
+
}
|
|
1187
|
+
});
|
|
1188
|
+
/**
|
|
1189
|
+
* POST /api/workflows/:id/complete - Mark workflow as complete or failed
|
|
1190
|
+
*
|
|
1191
|
+
* Used by workflow_complete MCP tool.
|
|
1192
|
+
*
|
|
1193
|
+
* Request body:
|
|
1194
|
+
* - summary: string (required)
|
|
1195
|
+
* - status?: 'completed' | 'failed' (default: 'completed')
|
|
1196
|
+
*/
|
|
1197
|
+
router.post("/:id/complete", async (req, res) => {
|
|
1198
|
+
try {
|
|
1199
|
+
const { id: workflowId } = req.params;
|
|
1200
|
+
const engine = getEngineForWorkflow(req, workflowId);
|
|
1201
|
+
if (!engine) {
|
|
1202
|
+
res.status(503).json({
|
|
1203
|
+
success: false,
|
|
1204
|
+
data: null,
|
|
1205
|
+
message: "Workflow engine not available",
|
|
1206
|
+
});
|
|
1207
|
+
return;
|
|
1208
|
+
}
|
|
1209
|
+
const { summary, status = "completed" } = req.body;
|
|
1210
|
+
if (!summary) {
|
|
1211
|
+
res.status(400).json({
|
|
1212
|
+
success: false,
|
|
1213
|
+
data: null,
|
|
1214
|
+
message: "summary is required",
|
|
1215
|
+
});
|
|
1216
|
+
return;
|
|
1217
|
+
}
|
|
1218
|
+
// Get workflow
|
|
1219
|
+
const workflow = await engine.getWorkflow(workflowId);
|
|
1220
|
+
if (!workflow) {
|
|
1221
|
+
res.status(404).json({
|
|
1222
|
+
success: false,
|
|
1223
|
+
data: null,
|
|
1224
|
+
message: `Workflow not found: ${workflowId}`,
|
|
1225
|
+
});
|
|
1226
|
+
return;
|
|
1227
|
+
}
|
|
1228
|
+
// Update workflow status
|
|
1229
|
+
const now = new Date().toISOString();
|
|
1230
|
+
const db = req.project.db;
|
|
1231
|
+
db.prepare(`
|
|
1232
|
+
UPDATE workflows
|
|
1233
|
+
SET status = ?, completed_at = ?, updated_at = ?
|
|
1234
|
+
WHERE id = ?
|
|
1235
|
+
`).run(status, now, now, workflowId);
|
|
1236
|
+
// Emit workflow completed/failed event
|
|
1237
|
+
const updatedWorkflow = await engine.getWorkflow(workflowId);
|
|
1238
|
+
if (status === "completed") {
|
|
1239
|
+
engine.emitWorkflowCompleted(workflowId, updatedWorkflow);
|
|
1240
|
+
}
|
|
1241
|
+
else {
|
|
1242
|
+
engine.emitWorkflowFailed(workflowId, summary);
|
|
1243
|
+
}
|
|
1244
|
+
// Broadcast update
|
|
1245
|
+
broadcastWorkflowUpdate(req.project.id, workflowId, status, updatedWorkflow);
|
|
1246
|
+
res.json({
|
|
1247
|
+
success: true,
|
|
1248
|
+
data: {
|
|
1249
|
+
success: true,
|
|
1250
|
+
workflow_status: status,
|
|
1251
|
+
completed_at: now,
|
|
1252
|
+
},
|
|
1253
|
+
});
|
|
1254
|
+
}
|
|
1255
|
+
catch (error) {
|
|
1256
|
+
handleWorkflowError(error, res);
|
|
1257
|
+
}
|
|
1258
|
+
});
|
|
1259
|
+
/**
|
|
1260
|
+
* POST /api/workflows/:id/escalate - Create an escalation request
|
|
1261
|
+
*
|
|
1262
|
+
* Used by escalate_to_user MCP tool.
|
|
1263
|
+
* Creates escalation and emits event for WebSocket broadcast.
|
|
1264
|
+
*
|
|
1265
|
+
* Request body:
|
|
1266
|
+
* - message: string (required)
|
|
1267
|
+
* - options?: string[]
|
|
1268
|
+
* - context?: Record<string, unknown>
|
|
1269
|
+
*/
|
|
1270
|
+
router.post("/:id/escalate", async (req, res) => {
|
|
1271
|
+
try {
|
|
1272
|
+
const { id: workflowId } = req.params;
|
|
1273
|
+
const { message, options, context: escalationContext, } = req.body;
|
|
1274
|
+
if (!message) {
|
|
1275
|
+
res.status(400).json({
|
|
1276
|
+
success: false,
|
|
1277
|
+
data: null,
|
|
1278
|
+
message: "message is required",
|
|
1279
|
+
});
|
|
1280
|
+
return;
|
|
1281
|
+
}
|
|
1282
|
+
const db = req.project.db;
|
|
1283
|
+
// Get workflow
|
|
1284
|
+
const workflowRow = db
|
|
1285
|
+
.prepare("SELECT * FROM workflows WHERE id = ?")
|
|
1286
|
+
.get(workflowId);
|
|
1287
|
+
if (!workflowRow) {
|
|
1288
|
+
res.status(404).json({
|
|
1289
|
+
success: false,
|
|
1290
|
+
data: null,
|
|
1291
|
+
message: `Workflow not found: ${workflowId}`,
|
|
1292
|
+
});
|
|
1293
|
+
return;
|
|
1294
|
+
}
|
|
1295
|
+
// Parse config to check autonomy level
|
|
1296
|
+
const config = JSON.parse(workflowRow.config);
|
|
1297
|
+
// If full_auto mode, bypass escalation
|
|
1298
|
+
if (config.autonomyLevel === "full_auto") {
|
|
1299
|
+
console.log(`[workflows/:id/escalate] Workflow ${workflowId} is in full_auto mode, auto-approving`);
|
|
1300
|
+
res.json({
|
|
1301
|
+
success: true,
|
|
1302
|
+
data: {
|
|
1303
|
+
status: "auto_approved",
|
|
1304
|
+
message: "Escalation auto-approved (workflow is in full_auto mode). " +
|
|
1305
|
+
"Proceed with your decision.",
|
|
1306
|
+
},
|
|
1307
|
+
});
|
|
1308
|
+
return;
|
|
1309
|
+
}
|
|
1310
|
+
// Check for existing pending escalation
|
|
1311
|
+
const pendingEscalation = db
|
|
1312
|
+
.prepare(`
|
|
1313
|
+
SELECT payload FROM workflow_events
|
|
1314
|
+
WHERE workflow_id = ?
|
|
1315
|
+
AND type = 'escalation_requested'
|
|
1316
|
+
AND json_extract(payload, '$.escalation_id') NOT IN (
|
|
1317
|
+
SELECT json_extract(payload, '$.escalation_id')
|
|
1318
|
+
FROM workflow_events
|
|
1319
|
+
WHERE workflow_id = ?
|
|
1320
|
+
AND type = 'escalation_resolved'
|
|
1321
|
+
)
|
|
1322
|
+
ORDER BY created_at DESC
|
|
1323
|
+
LIMIT 1
|
|
1324
|
+
`)
|
|
1325
|
+
.get(workflowId, workflowId);
|
|
1326
|
+
if (pendingEscalation) {
|
|
1327
|
+
const payload = JSON.parse(pendingEscalation.payload);
|
|
1328
|
+
res.status(400).json({
|
|
1329
|
+
success: false,
|
|
1330
|
+
data: null,
|
|
1331
|
+
message: `Workflow already has a pending escalation (ID: ${payload.escalation_id}). ` +
|
|
1332
|
+
`Wait for user response or resolve the existing escalation first.`,
|
|
1333
|
+
});
|
|
1334
|
+
return;
|
|
1335
|
+
}
|
|
1336
|
+
// Generate unique escalation ID
|
|
1337
|
+
const escalationId = randomUUID();
|
|
1338
|
+
const now = new Date().toISOString();
|
|
1339
|
+
// Record escalation_requested event
|
|
1340
|
+
const eventId = randomUUID();
|
|
1341
|
+
db.prepare(`
|
|
1342
|
+
INSERT INTO workflow_events (id, workflow_id, type, payload, created_at)
|
|
1343
|
+
VALUES (?, ?, ?, ?, ?)
|
|
1344
|
+
`).run(eventId, workflowId, "escalation_requested", JSON.stringify({
|
|
1345
|
+
escalation_id: escalationId,
|
|
1346
|
+
message,
|
|
1347
|
+
options,
|
|
1348
|
+
context: escalationContext,
|
|
1349
|
+
}), now);
|
|
1350
|
+
// Emit escalation requested event for WebSocket broadcast
|
|
1351
|
+
const engine = getEngineForWorkflow(req, workflowId);
|
|
1352
|
+
if (engine) {
|
|
1353
|
+
engine.emitEscalationRequested(workflowId, escalationId, message, options, escalationContext);
|
|
1354
|
+
}
|
|
1355
|
+
console.log(`[workflows/:id/escalate] Escalation created for workflow ${workflowId}: ${escalationId}`);
|
|
1356
|
+
res.json({
|
|
1357
|
+
success: true,
|
|
1358
|
+
data: {
|
|
1359
|
+
status: "pending",
|
|
1360
|
+
escalation_id: escalationId,
|
|
1361
|
+
message: "Escalation request created. Your session will end here. " +
|
|
1362
|
+
"When the user responds, you will receive a follow-up message with their response. " +
|
|
1363
|
+
"The workflow will resume automatically.",
|
|
1364
|
+
},
|
|
1365
|
+
});
|
|
1366
|
+
}
|
|
1367
|
+
catch (error) {
|
|
1368
|
+
handleWorkflowError(error, res);
|
|
1369
|
+
}
|
|
1370
|
+
});
|
|
1371
|
+
/**
|
|
1372
|
+
* POST /api/workflows/:id/notify - Send a non-blocking notification
|
|
1373
|
+
*
|
|
1374
|
+
* Used by notify_user MCP tool.
|
|
1375
|
+
* Broadcasts notification via WebSocket.
|
|
1376
|
+
*
|
|
1377
|
+
* Request body:
|
|
1378
|
+
* - message: string (required)
|
|
1379
|
+
* - level?: 'info' | 'warning' | 'error' (default: 'info')
|
|
1380
|
+
*/
|
|
1381
|
+
router.post("/:id/notify", async (req, res) => {
|
|
1382
|
+
try {
|
|
1383
|
+
const { id: workflowId } = req.params;
|
|
1384
|
+
const { message, level = "info" } = req.body;
|
|
1385
|
+
if (!message) {
|
|
1386
|
+
res.status(400).json({
|
|
1387
|
+
success: false,
|
|
1388
|
+
data: null,
|
|
1389
|
+
message: "message is required",
|
|
1390
|
+
});
|
|
1391
|
+
return;
|
|
1392
|
+
}
|
|
1393
|
+
const db = req.project.db;
|
|
1394
|
+
// Verify workflow exists
|
|
1395
|
+
const workflowExists = db
|
|
1396
|
+
.prepare("SELECT 1 FROM workflows WHERE id = ?")
|
|
1397
|
+
.get(workflowId);
|
|
1398
|
+
if (!workflowExists) {
|
|
1399
|
+
res.status(404).json({
|
|
1400
|
+
success: false,
|
|
1401
|
+
data: null,
|
|
1402
|
+
message: `Workflow not found: ${workflowId}`,
|
|
1403
|
+
});
|
|
1404
|
+
return;
|
|
1405
|
+
}
|
|
1406
|
+
// Record notification event (for audit trail)
|
|
1407
|
+
const eventId = randomUUID();
|
|
1408
|
+
const now = new Date().toISOString();
|
|
1409
|
+
db.prepare(`
|
|
1410
|
+
INSERT INTO workflow_events (id, workflow_id, type, payload, created_at)
|
|
1411
|
+
VALUES (?, ?, ?, ?, ?)
|
|
1412
|
+
`).run(eventId, workflowId, "user_notification", JSON.stringify({ level, message }), now);
|
|
1413
|
+
// Broadcast notification via WebSocket
|
|
1414
|
+
broadcastWorkflowUpdate(req.project.id, workflowId, "notification", {
|
|
1415
|
+
level,
|
|
1416
|
+
message,
|
|
1417
|
+
timestamp: now,
|
|
1418
|
+
});
|
|
1419
|
+
console.log(`[workflows/:id/notify] [${level.toUpperCase()}] Workflow ${workflowId}: ${message}`);
|
|
1420
|
+
res.json({
|
|
1421
|
+
success: true,
|
|
1422
|
+
data: {
|
|
1423
|
+
success: true,
|
|
1424
|
+
delivered: true, // We assume WebSocket delivery
|
|
1425
|
+
},
|
|
1426
|
+
});
|
|
1427
|
+
}
|
|
1428
|
+
catch (error) {
|
|
1429
|
+
handleWorkflowError(error, res);
|
|
1430
|
+
}
|
|
1431
|
+
});
|
|
1432
|
+
/**
|
|
1433
|
+
* POST /api/workflows/:id/merge - Merge a branch into the workflow worktree
|
|
1434
|
+
*
|
|
1435
|
+
* Used by merge_branch MCP tool.
|
|
1436
|
+
* Merges a source branch into the workflow's worktree.
|
|
1437
|
+
*
|
|
1438
|
+
* Request body:
|
|
1439
|
+
* - source_branch: string (required)
|
|
1440
|
+
* - target_branch?: string (default: current workflow branch)
|
|
1441
|
+
* - strategy?: 'auto' | 'squash' (default: 'auto')
|
|
1442
|
+
* - message?: string (custom commit message)
|
|
1443
|
+
*/
|
|
1444
|
+
router.post("/:id/merge", async (req, res) => {
|
|
1445
|
+
try {
|
|
1446
|
+
const { id: workflowId } = req.params;
|
|
1447
|
+
const { source_branch, target_branch, strategy = "auto", message } = req.body;
|
|
1448
|
+
if (!source_branch) {
|
|
1449
|
+
res.status(400).json({
|
|
1450
|
+
success: false,
|
|
1451
|
+
data: null,
|
|
1452
|
+
message: "source_branch is required",
|
|
1453
|
+
});
|
|
1454
|
+
return;
|
|
1455
|
+
}
|
|
1456
|
+
const db = req.project.db;
|
|
1457
|
+
// Get workflow to find worktree path
|
|
1458
|
+
const workflowRow = db
|
|
1459
|
+
.prepare("SELECT worktree_path, branch_name FROM workflows WHERE id = ?")
|
|
1460
|
+
.get(workflowId);
|
|
1461
|
+
if (!workflowRow) {
|
|
1462
|
+
res.status(404).json({
|
|
1463
|
+
success: false,
|
|
1464
|
+
data: null,
|
|
1465
|
+
message: `Workflow not found: ${workflowId}`,
|
|
1466
|
+
});
|
|
1467
|
+
return;
|
|
1468
|
+
}
|
|
1469
|
+
if (!workflowRow.worktree_path) {
|
|
1470
|
+
res.status(400).json({
|
|
1471
|
+
success: false,
|
|
1472
|
+
data: null,
|
|
1473
|
+
message: `Workflow ${workflowId} does not have a worktree`,
|
|
1474
|
+
});
|
|
1475
|
+
return;
|
|
1476
|
+
}
|
|
1477
|
+
// If target_branch specified and different from current, checkout target first
|
|
1478
|
+
if (target_branch && target_branch !== workflowRow.branch_name) {
|
|
1479
|
+
try {
|
|
1480
|
+
execSync(`git checkout "${target_branch}"`, {
|
|
1481
|
+
cwd: workflowRow.worktree_path,
|
|
1482
|
+
stdio: "pipe",
|
|
1483
|
+
});
|
|
1484
|
+
}
|
|
1485
|
+
catch (checkoutError) {
|
|
1486
|
+
res.status(400).json({
|
|
1487
|
+
success: false,
|
|
1488
|
+
data: null,
|
|
1489
|
+
message: `Failed to checkout target branch: ${target_branch}`,
|
|
1490
|
+
error: checkoutError instanceof Error
|
|
1491
|
+
? checkoutError.message
|
|
1492
|
+
: String(checkoutError),
|
|
1493
|
+
});
|
|
1494
|
+
return;
|
|
1495
|
+
}
|
|
1496
|
+
}
|
|
1497
|
+
// Perform merge using git commands
|
|
1498
|
+
try {
|
|
1499
|
+
let mergeCommit;
|
|
1500
|
+
let strategyUsed;
|
|
1501
|
+
if (strategy === "squash") {
|
|
1502
|
+
// Squash merge
|
|
1503
|
+
execSync(`git merge --squash "${source_branch}"`, {
|
|
1504
|
+
cwd: workflowRow.worktree_path,
|
|
1505
|
+
stdio: "pipe",
|
|
1506
|
+
});
|
|
1507
|
+
// Commit with custom message or default
|
|
1508
|
+
const commitMessage = message || `Squash merge branch '${source_branch}'`;
|
|
1509
|
+
execSync(`git commit -m "${commitMessage.replace(/"/g, '\\"')}"`, {
|
|
1510
|
+
cwd: workflowRow.worktree_path,
|
|
1511
|
+
stdio: "pipe",
|
|
1512
|
+
});
|
|
1513
|
+
mergeCommit = execSync("git rev-parse HEAD", {
|
|
1514
|
+
cwd: workflowRow.worktree_path,
|
|
1515
|
+
encoding: "utf-8",
|
|
1516
|
+
}).trim();
|
|
1517
|
+
strategyUsed = "squash";
|
|
1518
|
+
}
|
|
1519
|
+
else {
|
|
1520
|
+
// Auto strategy: try fast-forward first
|
|
1521
|
+
try {
|
|
1522
|
+
execSync(`git merge-base --is-ancestor HEAD "${source_branch}"`, {
|
|
1523
|
+
cwd: workflowRow.worktree_path,
|
|
1524
|
+
stdio: "pipe",
|
|
1525
|
+
});
|
|
1526
|
+
// Fast-forward is possible
|
|
1527
|
+
execSync(`git merge --ff-only "${source_branch}"`, {
|
|
1528
|
+
cwd: workflowRow.worktree_path,
|
|
1529
|
+
stdio: "pipe",
|
|
1530
|
+
});
|
|
1531
|
+
mergeCommit = execSync("git rev-parse HEAD", {
|
|
1532
|
+
cwd: workflowRow.worktree_path,
|
|
1533
|
+
encoding: "utf-8",
|
|
1534
|
+
}).trim();
|
|
1535
|
+
strategyUsed = "fast-forward";
|
|
1536
|
+
}
|
|
1537
|
+
catch {
|
|
1538
|
+
// Fast-forward not possible, do regular merge
|
|
1539
|
+
const commitMessage = message || `Merge branch '${source_branch}'`;
|
|
1540
|
+
execSync(`git merge --no-ff -m "${commitMessage.replace(/"/g, '\\"')}" "${source_branch}"`, {
|
|
1541
|
+
cwd: workflowRow.worktree_path,
|
|
1542
|
+
stdio: "pipe",
|
|
1543
|
+
});
|
|
1544
|
+
mergeCommit = execSync("git rev-parse HEAD", {
|
|
1545
|
+
cwd: workflowRow.worktree_path,
|
|
1546
|
+
encoding: "utf-8",
|
|
1547
|
+
}).trim();
|
|
1548
|
+
strategyUsed = "merge";
|
|
1549
|
+
}
|
|
1550
|
+
}
|
|
1551
|
+
console.log(`[workflows/:id/merge] Merged ${source_branch} into workflow ${workflowId} (${strategyUsed})`);
|
|
1552
|
+
res.json({
|
|
1553
|
+
success: true,
|
|
1554
|
+
data: {
|
|
1555
|
+
success: true,
|
|
1556
|
+
merge_commit: mergeCommit,
|
|
1557
|
+
strategy_used: strategyUsed,
|
|
1558
|
+
},
|
|
1559
|
+
});
|
|
1560
|
+
}
|
|
1561
|
+
catch (mergeError) {
|
|
1562
|
+
// Check for merge conflicts
|
|
1563
|
+
let conflictingFiles = [];
|
|
1564
|
+
try {
|
|
1565
|
+
const conflictOutput = execSync("git diff --name-only --diff-filter=U", {
|
|
1566
|
+
cwd: workflowRow.worktree_path,
|
|
1567
|
+
encoding: "utf-8",
|
|
1568
|
+
});
|
|
1569
|
+
conflictingFiles = conflictOutput
|
|
1570
|
+
.split("\n")
|
|
1571
|
+
.map((l) => l.trim())
|
|
1572
|
+
.filter((l) => l.length > 0);
|
|
1573
|
+
}
|
|
1574
|
+
catch {
|
|
1575
|
+
// Not in merge state or other issue
|
|
1576
|
+
}
|
|
1577
|
+
if (conflictingFiles.length > 0) {
|
|
1578
|
+
// Abort the failed merge
|
|
1579
|
+
try {
|
|
1580
|
+
execSync("git merge --abort", {
|
|
1581
|
+
cwd: workflowRow.worktree_path,
|
|
1582
|
+
stdio: "pipe",
|
|
1583
|
+
});
|
|
1584
|
+
}
|
|
1585
|
+
catch {
|
|
1586
|
+
// Ignore abort errors
|
|
1587
|
+
}
|
|
1588
|
+
res.json({
|
|
1589
|
+
success: true,
|
|
1590
|
+
data: {
|
|
1591
|
+
success: false,
|
|
1592
|
+
strategy_used: strategy === "squash" ? "squash" : "merge",
|
|
1593
|
+
conflicting_files: conflictingFiles,
|
|
1594
|
+
error: `Merge conflict in ${conflictingFiles.length} file(s)`,
|
|
1595
|
+
},
|
|
1596
|
+
});
|
|
1597
|
+
}
|
|
1598
|
+
else {
|
|
1599
|
+
res.json({
|
|
1600
|
+
success: true,
|
|
1601
|
+
data: {
|
|
1602
|
+
success: false,
|
|
1603
|
+
strategy_used: strategy === "squash" ? "squash" : "merge",
|
|
1604
|
+
error: mergeError instanceof Error
|
|
1605
|
+
? mergeError.message
|
|
1606
|
+
: String(mergeError),
|
|
1607
|
+
},
|
|
1608
|
+
});
|
|
1609
|
+
}
|
|
1610
|
+
}
|
|
1611
|
+
}
|
|
1612
|
+
catch (error) {
|
|
1613
|
+
handleWorkflowError(error, res);
|
|
1614
|
+
}
|
|
1615
|
+
});
|
|
1616
|
+
/**
|
|
1617
|
+
* POST /api/workflows/:id/await-events - Register an await condition for the orchestrator
|
|
1618
|
+
*
|
|
1619
|
+
* Used by await_events MCP tool.
|
|
1620
|
+
* Stores condition in wakeup service (in-memory).
|
|
1621
|
+
* Returns immediately - orchestrator session should end after this call.
|
|
1622
|
+
*
|
|
1623
|
+
* Request body:
|
|
1624
|
+
* - event_types: AwaitableEventType[] (required)
|
|
1625
|
+
* - execution_ids?: string[]
|
|
1626
|
+
* - timeout_seconds?: number
|
|
1627
|
+
* - message?: string
|
|
1628
|
+
*/
|
|
1629
|
+
router.post("/:id/await-events", async (req, res) => {
|
|
1630
|
+
try {
|
|
1631
|
+
const { id: workflowId } = req.params;
|
|
1632
|
+
const { event_types, execution_ids, timeout_seconds, message } = req.body;
|
|
1633
|
+
// Validate required params
|
|
1634
|
+
if (!event_types || event_types.length === 0) {
|
|
1635
|
+
res.status(400).json({
|
|
1636
|
+
success: false,
|
|
1637
|
+
data: null,
|
|
1638
|
+
message: "event_types is required and must be non-empty",
|
|
1639
|
+
});
|
|
1640
|
+
return;
|
|
1641
|
+
}
|
|
1642
|
+
// Validate event types
|
|
1643
|
+
const validEventTypes = [
|
|
1644
|
+
"step_completed",
|
|
1645
|
+
"step_failed",
|
|
1646
|
+
"user_response",
|
|
1647
|
+
"escalation_resolved",
|
|
1648
|
+
"timeout",
|
|
1649
|
+
];
|
|
1650
|
+
for (const eventType of event_types) {
|
|
1651
|
+
if (!validEventTypes.includes(eventType)) {
|
|
1652
|
+
res.status(400).json({
|
|
1653
|
+
success: false,
|
|
1654
|
+
data: null,
|
|
1655
|
+
message: `Invalid event type: ${eventType}. Must be one of: ${validEventTypes.join(", ")}`,
|
|
1656
|
+
});
|
|
1657
|
+
return;
|
|
1658
|
+
}
|
|
1659
|
+
}
|
|
1660
|
+
const engine = getEngineForWorkflow(req, workflowId);
|
|
1661
|
+
if (!engine) {
|
|
1662
|
+
res.status(503).json({
|
|
1663
|
+
success: false,
|
|
1664
|
+
data: null,
|
|
1665
|
+
message: "Workflow engine not available",
|
|
1666
|
+
});
|
|
1667
|
+
return;
|
|
1668
|
+
}
|
|
1669
|
+
// Get workflow and validate status
|
|
1670
|
+
const workflow = await engine.getWorkflow(workflowId);
|
|
1671
|
+
if (!workflow) {
|
|
1672
|
+
res.status(404).json({
|
|
1673
|
+
success: false,
|
|
1674
|
+
data: null,
|
|
1675
|
+
message: `Workflow not found: ${workflowId}`,
|
|
1676
|
+
});
|
|
1677
|
+
return;
|
|
1678
|
+
}
|
|
1679
|
+
if (workflow.status !== "running") {
|
|
1680
|
+
res.status(400).json({
|
|
1681
|
+
success: false,
|
|
1682
|
+
data: null,
|
|
1683
|
+
message: `Cannot await events: workflow is ${workflow.status}, expected running`,
|
|
1684
|
+
});
|
|
1685
|
+
return;
|
|
1686
|
+
}
|
|
1687
|
+
// Register await condition in wakeup service (in-memory)
|
|
1688
|
+
// Note: getWakeupService is available on orchestrator engines
|
|
1689
|
+
if (!("getWakeupService" in engine)) {
|
|
1690
|
+
res.status(400).json({
|
|
1691
|
+
success: false,
|
|
1692
|
+
data: null,
|
|
1693
|
+
message: "Await events is only supported for orchestrator workflows",
|
|
1694
|
+
});
|
|
1695
|
+
return;
|
|
1696
|
+
}
|
|
1697
|
+
const wakeupService = engine.getWakeupService();
|
|
1698
|
+
const awaitResult = wakeupService.registerAwait({
|
|
1699
|
+
workflowId,
|
|
1700
|
+
eventTypes: event_types,
|
|
1701
|
+
executionIds: execution_ids,
|
|
1702
|
+
timeoutSeconds: timeout_seconds,
|
|
1703
|
+
message,
|
|
1704
|
+
});
|
|
1705
|
+
// Broadcast status update (for UI)
|
|
1706
|
+
broadcastWorkflowUpdate(req.project.id, workflowId, "awaiting", {
|
|
1707
|
+
await_id: awaitResult.id,
|
|
1708
|
+
event_types,
|
|
1709
|
+
message,
|
|
1710
|
+
});
|
|
1711
|
+
console.log(`[workflows/:id/await-events] Registered await ${awaitResult.id} for workflow ${workflowId}`, { eventTypes: event_types, executionIds: execution_ids });
|
|
1712
|
+
res.json({
|
|
1713
|
+
success: true,
|
|
1714
|
+
data: {
|
|
1715
|
+
status: "waiting",
|
|
1716
|
+
await_id: awaitResult.id,
|
|
1717
|
+
message: "Session will end. You'll be woken up when events occur.",
|
|
1718
|
+
will_wake_on: event_types,
|
|
1719
|
+
timeout_at: awaitResult.timeoutAt,
|
|
1720
|
+
},
|
|
1721
|
+
});
|
|
1722
|
+
}
|
|
1723
|
+
catch (error) {
|
|
1724
|
+
handleWorkflowError(error, res);
|
|
1725
|
+
}
|
|
1726
|
+
});
|
|
1727
|
+
return router;
|
|
1728
|
+
}
|
|
1729
|
+
//# sourceMappingURL=workflows.js.map
|