@cascade-flow/runner 0.2.4 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -78
- package/dist/checkpoint-ipc.d.ts +82 -0
- package/dist/checkpoint-ipc.d.ts.map +1 -0
- package/dist/index.d.ts +16 -10
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +456 -503
- package/dist/index.js.map +8 -7
- package/dist/step-executor.d.ts +9 -12
- package/dist/step-executor.d.ts.map +1 -1
- package/dist/subprocess-executor.d.ts +6 -1
- package/dist/subprocess-executor.d.ts.map +1 -1
- package/dist/validation.d.ts +27 -0
- package/dist/validation.d.ts.map +1 -1
- package/package.json +4 -4
package/dist/index.js
CHANGED
|
@@ -9,8 +9,403 @@ var __export = (target, all) => {
|
|
|
9
9
|
});
|
|
10
10
|
};
|
|
11
11
|
|
|
12
|
-
// src/
|
|
13
|
-
import {
|
|
12
|
+
// src/subprocess-executor.ts
|
|
13
|
+
import { spawn } from "node:child_process";
|
|
14
|
+
import { resolve, dirname, join as join2 } from "node:path";
|
|
15
|
+
import { fileURLToPath } from "node:url";
|
|
16
|
+
import { mkdir, readFile, unlink, access, writeFile, readdir, rm } from "node:fs/promises";
|
|
17
|
+
import { tmpdir } from "node:os";
|
|
18
|
+
import { getMicrosecondTimestamp, ensureErrorMessage } from "@cascade-flow/backend-interface";
|
|
19
|
+
|
|
20
|
+
// src/checkpoint-ipc.ts
|
|
21
|
+
import { join } from "node:path";
|
|
22
|
+
function getResponsePath(dir, requestId) {
|
|
23
|
+
return join(dir, `res-${requestId}.json`);
|
|
24
|
+
}
|
|
25
|
+
function getResultPath(dir, requestId) {
|
|
26
|
+
return join(dir, `result-${requestId}.json`);
|
|
27
|
+
}
|
|
28
|
+
function getFailurePath(dir, requestId) {
|
|
29
|
+
return join(dir, `failure-${requestId}.json`);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// src/subprocess-executor.ts
|
|
33
|
+
var EMBEDDED_STEP_EXECUTOR = `
|
|
34
|
+
import { pathToFileURL } from "node:url";
|
|
35
|
+
import { join } from "node:path";
|
|
36
|
+
|
|
37
|
+
function serializeError(err) {
|
|
38
|
+
if (err instanceof Error) {
|
|
39
|
+
return { message: err.message, stack: err.stack, name: err.name };
|
|
40
|
+
}
|
|
41
|
+
return { message: String(err), name: "Error" };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function createCheckpointFunction(checkpointDir) {
|
|
45
|
+
const checkpointSequences = new Map();
|
|
46
|
+
|
|
47
|
+
return async function checkpoint(name, fn) {
|
|
48
|
+
if (!checkpointDir) return fn();
|
|
49
|
+
|
|
50
|
+
const seq = checkpointSequences.get(name) ?? 0;
|
|
51
|
+
checkpointSequences.set(name, seq + 1);
|
|
52
|
+
const requestId = Date.now() + "-" + Math.random().toString(36).slice(2);
|
|
53
|
+
|
|
54
|
+
// Write request
|
|
55
|
+
await Bun.write(
|
|
56
|
+
join(checkpointDir, "req-" + requestId + ".json"),
|
|
57
|
+
JSON.stringify({ requestId, name, sequenceNumber: seq })
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
// Poll for response
|
|
61
|
+
let response = null;
|
|
62
|
+
const responsePath = join(checkpointDir, "res-" + requestId + ".json");
|
|
63
|
+
while (!response) {
|
|
64
|
+
try {
|
|
65
|
+
response = JSON.parse(await Bun.file(responsePath).text());
|
|
66
|
+
} catch {
|
|
67
|
+
await Bun.sleep(5);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (response.hit && response.data !== undefined) {
|
|
72
|
+
return JSON.parse(response.data);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Cache miss: execute function
|
|
76
|
+
try {
|
|
77
|
+
const result = await fn();
|
|
78
|
+
await Bun.write(
|
|
79
|
+
join(checkpointDir, "result-" + requestId + ".json"),
|
|
80
|
+
JSON.stringify({ requestId, data: JSON.stringify(result) })
|
|
81
|
+
);
|
|
82
|
+
return result;
|
|
83
|
+
} catch (err) {
|
|
84
|
+
// Write failure file for parent to consume before re-throwing
|
|
85
|
+
await Bun.write(
|
|
86
|
+
join(checkpointDir, "failure-" + requestId + ".json"),
|
|
87
|
+
JSON.stringify({
|
|
88
|
+
requestId,
|
|
89
|
+
name,
|
|
90
|
+
sequenceNumber: seq,
|
|
91
|
+
error: JSON.stringify(serializeError(err)),
|
|
92
|
+
})
|
|
93
|
+
);
|
|
94
|
+
throw err;
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async function main() {
|
|
100
|
+
try {
|
|
101
|
+
const inputFile = process.env.CF_STEP_INPUT_FILE;
|
|
102
|
+
const outputFile = process.env.STEP_OUTPUT_FILE;
|
|
103
|
+
const checkpointDir = process.env.CF_CHECKPOINT_DIR;
|
|
104
|
+
|
|
105
|
+
if (!inputFile) {
|
|
106
|
+
throw new Error("CF_STEP_INPUT_FILE environment variable is required");
|
|
107
|
+
}
|
|
108
|
+
if (!outputFile) {
|
|
109
|
+
throw new Error("STEP_OUTPUT_FILE environment variable is required");
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const input = await Bun.file(inputFile).text();
|
|
113
|
+
const { stepPath, dependencies, ctx } = JSON.parse(input);
|
|
114
|
+
|
|
115
|
+
const checkpoint = createCheckpointFunction(checkpointDir);
|
|
116
|
+
const reconstructedCtx = { ...ctx, log: console.log, checkpoint };
|
|
117
|
+
|
|
118
|
+
// Load and execute the step
|
|
119
|
+
const mod = await import(pathToFileURL(stepPath).toString());
|
|
120
|
+
const stepDef = mod.step;
|
|
121
|
+
|
|
122
|
+
if (!stepDef || typeof stepDef.fn !== "function") {
|
|
123
|
+
throw new Error("Invalid step module at " + stepPath);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
const result = await stepDef.fn({ dependencies, ctx: reconstructedCtx });
|
|
127
|
+
await Bun.write(outputFile, JSON.stringify(result, null, 2));
|
|
128
|
+
process.exit(0);
|
|
129
|
+
} catch (error) {
|
|
130
|
+
const errObj = error instanceof Error
|
|
131
|
+
? { message: error.message, stack: error.stack, name: error.name }
|
|
132
|
+
: { message: String(error), name: "Error" };
|
|
133
|
+
process.stderr.write(JSON.stringify(errObj));
|
|
134
|
+
process.exit(1);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
main();
|
|
139
|
+
`;
|
|
140
|
+
function createStreamHandler(streamType, attemptNumber, emitLog) {
|
|
141
|
+
let buffer = "";
|
|
142
|
+
const handler = (chunk) => {
|
|
143
|
+
buffer += chunk.toString();
|
|
144
|
+
const lines = buffer.split(`
|
|
145
|
+
`);
|
|
146
|
+
buffer = lines.pop() || "";
|
|
147
|
+
for (const line of lines) {
|
|
148
|
+
if (!line.trim())
|
|
149
|
+
continue;
|
|
150
|
+
const timestamp = getMicrosecondTimestamp();
|
|
151
|
+
emitLog({
|
|
152
|
+
timestamp,
|
|
153
|
+
stream: streamType,
|
|
154
|
+
message: line,
|
|
155
|
+
attemptNumber
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
};
|
|
159
|
+
const getBuffer = () => buffer;
|
|
160
|
+
const flushBuffer = () => {
|
|
161
|
+
if (buffer.trim()) {
|
|
162
|
+
emitLog({
|
|
163
|
+
timestamp: getMicrosecondTimestamp(),
|
|
164
|
+
stream: streamType,
|
|
165
|
+
message: buffer,
|
|
166
|
+
attemptNumber
|
|
167
|
+
});
|
|
168
|
+
buffer = "";
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
return { handler, getBuffer, flushBuffer };
|
|
172
|
+
}
|
|
173
|
+
function startCheckpointWatcher(dir, existing, onCheckpoint, onCheckpointFailed) {
|
|
174
|
+
const processed = new Set;
|
|
175
|
+
let running = true;
|
|
176
|
+
const poll = async () => {
|
|
177
|
+
while (running) {
|
|
178
|
+
try {
|
|
179
|
+
const files = await readdir(dir);
|
|
180
|
+
for (const file of files) {
|
|
181
|
+
if (file.startsWith("failure-") && !processed.has(file)) {
|
|
182
|
+
processed.add(file);
|
|
183
|
+
try {
|
|
184
|
+
const content = await readFile(join2(dir, file), "utf-8");
|
|
185
|
+
const failure = JSON.parse(content);
|
|
186
|
+
if (onCheckpointFailed) {
|
|
187
|
+
await onCheckpointFailed({
|
|
188
|
+
name: failure.name,
|
|
189
|
+
sequenceNumber: failure.sequenceNumber,
|
|
190
|
+
error: failure.error
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
} catch {}
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
if (!file.startsWith("req-") || processed.has(file))
|
|
197
|
+
continue;
|
|
198
|
+
processed.add(file);
|
|
199
|
+
try {
|
|
200
|
+
const content = await readFile(join2(dir, file), "utf-8");
|
|
201
|
+
const request = JSON.parse(content);
|
|
202
|
+
const cached = existing?.get(request.name)?.[request.sequenceNumber];
|
|
203
|
+
if (cached !== undefined) {
|
|
204
|
+
const response = {
|
|
205
|
+
requestId: request.requestId,
|
|
206
|
+
hit: true,
|
|
207
|
+
data: cached
|
|
208
|
+
};
|
|
209
|
+
await writeFile(getResponsePath(dir, request.requestId), JSON.stringify(response));
|
|
210
|
+
} else {
|
|
211
|
+
const response = {
|
|
212
|
+
requestId: request.requestId,
|
|
213
|
+
hit: false
|
|
214
|
+
};
|
|
215
|
+
await writeFile(getResponsePath(dir, request.requestId), JSON.stringify(response));
|
|
216
|
+
let result = null;
|
|
217
|
+
let failed = false;
|
|
218
|
+
const resultPath = getResultPath(dir, request.requestId);
|
|
219
|
+
const failurePath = getFailurePath(dir, request.requestId);
|
|
220
|
+
while (running && !result && !failed) {
|
|
221
|
+
try {
|
|
222
|
+
const resultContent = await readFile(resultPath, "utf-8");
|
|
223
|
+
result = JSON.parse(resultContent);
|
|
224
|
+
} catch {
|
|
225
|
+
try {
|
|
226
|
+
await readFile(failurePath, "utf-8");
|
|
227
|
+
failed = true;
|
|
228
|
+
} catch {
|
|
229
|
+
await new Promise((resolve2) => setTimeout(resolve2, 5));
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
if (result && onCheckpoint) {
|
|
234
|
+
if (!existing.has(request.name)) {
|
|
235
|
+
existing.set(request.name, []);
|
|
236
|
+
}
|
|
237
|
+
const arr = existing.get(request.name);
|
|
238
|
+
arr[request.sequenceNumber] = result.data;
|
|
239
|
+
await onCheckpoint({
|
|
240
|
+
name: request.name,
|
|
241
|
+
sequenceNumber: request.sequenceNumber,
|
|
242
|
+
data: result.data
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
} catch (e) {}
|
|
247
|
+
}
|
|
248
|
+
} catch {}
|
|
249
|
+
await new Promise((resolve2) => setTimeout(resolve2, 10));
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
poll().catch(() => {});
|
|
253
|
+
return {
|
|
254
|
+
stop: () => {
|
|
255
|
+
running = false;
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
async function executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, onCheckpoint, onCheckpointFailed, existingCheckpoints, options) {
|
|
260
|
+
await mkdir(dirname(outputPath), { recursive: true });
|
|
261
|
+
const executorPath = resolve(dirname(fileURLToPath(import.meta.url)), "step-executor.ts");
|
|
262
|
+
let spawnArgs;
|
|
263
|
+
let tempExecutorPath = null;
|
|
264
|
+
const stepInput = JSON.stringify({ stepPath: stepFile, dependencies, ctx });
|
|
265
|
+
const inputFilePath = join2(tmpdir(), `cf-step-input-${Date.now()}-${Math.random().toString(36).slice(2)}.json`);
|
|
266
|
+
await writeFile(inputFilePath, stepInput);
|
|
267
|
+
try {
|
|
268
|
+
await access(executorPath);
|
|
269
|
+
spawnArgs = ["bun", [executorPath]];
|
|
270
|
+
} catch {
|
|
271
|
+
tempExecutorPath = join2(tmpdir(), `cf-step-executor-${Date.now()}-${Math.random().toString(36).slice(2)}.ts`);
|
|
272
|
+
await writeFile(tempExecutorPath, EMBEDDED_STEP_EXECUTOR);
|
|
273
|
+
spawnArgs = ["bun", [tempExecutorPath]];
|
|
274
|
+
}
|
|
275
|
+
let checkpointDir;
|
|
276
|
+
let checkpointWatcher;
|
|
277
|
+
const checkpointCache = existingCheckpoints ?? new Map;
|
|
278
|
+
if (onCheckpoint || onCheckpointFailed || existingCheckpoints) {
|
|
279
|
+
checkpointDir = join2(tmpdir(), `cf-checkpoint-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
|
280
|
+
await mkdir(checkpointDir, { recursive: true });
|
|
281
|
+
checkpointWatcher = startCheckpointWatcher(checkpointDir, checkpointCache, onCheckpoint, onCheckpointFailed);
|
|
282
|
+
}
|
|
283
|
+
return new Promise((resolve2, reject) => {
|
|
284
|
+
const child = spawn(spawnArgs[0], spawnArgs[1], {
|
|
285
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
286
|
+
env: {
|
|
287
|
+
...process.env,
|
|
288
|
+
STEP_OUTPUT_FILE: outputPath,
|
|
289
|
+
CF_STEP_INPUT_FILE: inputFilePath,
|
|
290
|
+
...checkpointDir ? { CF_CHECKPOINT_DIR: checkpointDir } : {}
|
|
291
|
+
}
|
|
292
|
+
});
|
|
293
|
+
const logs = [];
|
|
294
|
+
const logWritePromises = [];
|
|
295
|
+
let logError = null;
|
|
296
|
+
const emitLog = (entry) => {
|
|
297
|
+
logs.push(entry);
|
|
298
|
+
if (!onLog)
|
|
299
|
+
return;
|
|
300
|
+
const trackedPromise = Promise.resolve(onLog(entry)).catch((err) => {
|
|
301
|
+
if (!logError) {
|
|
302
|
+
logError = err instanceof Error ? err : new Error(String(err));
|
|
303
|
+
}
|
|
304
|
+
});
|
|
305
|
+
logWritePromises.push(trackedPromise);
|
|
306
|
+
};
|
|
307
|
+
const signal = options?.signal;
|
|
308
|
+
let aborted = false;
|
|
309
|
+
let abortReason;
|
|
310
|
+
const abortHandler = signal ? () => {
|
|
311
|
+
if (aborted)
|
|
312
|
+
return;
|
|
313
|
+
aborted = true;
|
|
314
|
+
abortReason = signal?.reason ?? new Error("Step execution aborted");
|
|
315
|
+
try {
|
|
316
|
+
child.kill("SIGKILL");
|
|
317
|
+
} catch {}
|
|
318
|
+
} : null;
|
|
319
|
+
const cleanup = () => {
|
|
320
|
+
if (signal && abortHandler) {
|
|
321
|
+
signal.removeEventListener("abort", abortHandler);
|
|
322
|
+
}
|
|
323
|
+
};
|
|
324
|
+
if (signal) {
|
|
325
|
+
if (signal.aborted) {
|
|
326
|
+
abortHandler?.();
|
|
327
|
+
} else {
|
|
328
|
+
signal.addEventListener("abort", abortHandler);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
const stdoutHandler = createStreamHandler("stdout", attemptNumber, emitLog);
|
|
332
|
+
const stderrHandler = createStreamHandler("stderr", attemptNumber, emitLog);
|
|
333
|
+
child.stdout.on("data", stdoutHandler.handler);
|
|
334
|
+
child.stderr.on("data", stderrHandler.handler);
|
|
335
|
+
child.on("error", (err) => {
|
|
336
|
+
cleanup();
|
|
337
|
+
reject(err);
|
|
338
|
+
});
|
|
339
|
+
child.on("close", async (code, signal2) => {
|
|
340
|
+
cleanup();
|
|
341
|
+
if (checkpointWatcher) {
|
|
342
|
+
checkpointWatcher.stop();
|
|
343
|
+
}
|
|
344
|
+
if (tempExecutorPath) {
|
|
345
|
+
try {
|
|
346
|
+
await unlink(tempExecutorPath);
|
|
347
|
+
} catch {}
|
|
348
|
+
}
|
|
349
|
+
try {
|
|
350
|
+
await unlink(inputFilePath);
|
|
351
|
+
} catch {}
|
|
352
|
+
if (checkpointDir) {
|
|
353
|
+
try {
|
|
354
|
+
await rm(checkpointDir, { recursive: true, force: true });
|
|
355
|
+
} catch {}
|
|
356
|
+
}
|
|
357
|
+
stdoutHandler.flushBuffer();
|
|
358
|
+
stderrHandler.flushBuffer();
|
|
359
|
+
try {
|
|
360
|
+
await Promise.all(logWritePromises);
|
|
361
|
+
} catch {}
|
|
362
|
+
if (logError) {
|
|
363
|
+
reject(logError);
|
|
364
|
+
return;
|
|
365
|
+
}
|
|
366
|
+
if (aborted) {
|
|
367
|
+
const reason = abortReason instanceof Error ? abortReason : new Error(String(abortReason ?? "Step execution aborted"));
|
|
368
|
+
reject(reason);
|
|
369
|
+
return;
|
|
370
|
+
}
|
|
371
|
+
if (code === 0) {
|
|
372
|
+
try {
|
|
373
|
+
const outputContent = await readFile(outputPath, "utf-8");
|
|
374
|
+
const result = JSON.parse(outputContent);
|
|
375
|
+
try {
|
|
376
|
+
await unlink(outputPath);
|
|
377
|
+
} catch {}
|
|
378
|
+
resolve2({ result, logs });
|
|
379
|
+
} catch (err) {
|
|
380
|
+
reject(new Error(`Failed to read/parse output file ${outputPath}: ${err}`));
|
|
381
|
+
}
|
|
382
|
+
} else if (signal2) {
|
|
383
|
+
const error = new Error(`Step process killed by signal: ${signal2}`);
|
|
384
|
+
error.isCrash = true;
|
|
385
|
+
reject(error);
|
|
386
|
+
} else {
|
|
387
|
+
const lastStderrLog = logs.filter((l) => l.stream === "stderr").pop();
|
|
388
|
+
if (lastStderrLog) {
|
|
389
|
+
try {
|
|
390
|
+
const errorObj = JSON.parse(lastStderrLog.message);
|
|
391
|
+
const errorMessage = ensureErrorMessage(errorObj.message);
|
|
392
|
+
const error = new Error(errorMessage);
|
|
393
|
+
error.stack = errorObj.stack;
|
|
394
|
+
error.name = errorObj.name || "Error";
|
|
395
|
+
reject(error);
|
|
396
|
+
} catch {
|
|
397
|
+
const errorMessage = logs.filter((l) => l.stream === "stderr").map((l) => l.message).join(`
|
|
398
|
+
`);
|
|
399
|
+
reject(new Error(errorMessage || `Step process exited with code ${code}`));
|
|
400
|
+
}
|
|
401
|
+
} else {
|
|
402
|
+
reject(new Error(`Step process exited with code ${code}`));
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
});
|
|
406
|
+
child.stdin.end();
|
|
407
|
+
});
|
|
408
|
+
}
|
|
14
409
|
|
|
15
410
|
// src/discovery.ts
|
|
16
411
|
import fs from "node:fs/promises";
|
|
@@ -12626,202 +13021,24 @@ async function discoverSteps(root = path.resolve("steps")) {
|
|
|
12626
13021
|
}
|
|
12627
13022
|
return loaded;
|
|
12628
13023
|
}
|
|
12629
|
-
|
|
12630
|
-
// src/validation.ts
|
|
12631
|
-
function detectCycles(steps) {
|
|
12632
|
-
const visiting = new Set;
|
|
12633
|
-
const visited = new Set;
|
|
12634
|
-
function dfs(s) {
|
|
12635
|
-
if (visited.has(s.id))
|
|
12636
|
-
return;
|
|
12637
|
-
if (visiting.has(s.id)) {
|
|
12638
|
-
throw new Error(`Cycle detected involving step "${s.name}" (id: ${s.id})`);
|
|
12639
|
-
}
|
|
12640
|
-
visiting.add(s.id);
|
|
12641
|
-
for (const dep of Object.values(s.dependencies))
|
|
12642
|
-
dfs(dep);
|
|
12643
|
-
visiting.delete(s.id);
|
|
12644
|
-
visited.add(s.id);
|
|
12645
|
-
}
|
|
12646
|
-
for (const s of steps)
|
|
12647
|
-
dfs(s);
|
|
12648
|
-
}
|
|
12649
|
-
|
|
12650
|
-
// src/subprocess-executor.ts
|
|
12651
|
-
import { spawn } from "node:child_process";
|
|
12652
|
-
import { resolve, dirname } from "node:path";
|
|
12653
|
-
import { fileURLToPath } from "node:url";
|
|
12654
|
-
import { mkdir, readFile, unlink } from "node:fs/promises";
|
|
12655
|
-
import { getMicrosecondTimestamp, ensureErrorMessage } from "@cascade-flow/backend-interface";
|
|
12656
|
-
function createStreamHandler(streamType, attemptNumber, emitLog) {
|
|
12657
|
-
let buffer = "";
|
|
12658
|
-
const handler = (chunk) => {
|
|
12659
|
-
buffer += chunk.toString();
|
|
12660
|
-
const lines = buffer.split(`
|
|
12661
|
-
`);
|
|
12662
|
-
buffer = lines.pop() || "";
|
|
12663
|
-
for (const line of lines) {
|
|
12664
|
-
if (!line.trim())
|
|
12665
|
-
continue;
|
|
12666
|
-
const timestamp = getMicrosecondTimestamp();
|
|
12667
|
-
emitLog({
|
|
12668
|
-
timestamp,
|
|
12669
|
-
stream: streamType,
|
|
12670
|
-
message: line,
|
|
12671
|
-
attemptNumber
|
|
12672
|
-
});
|
|
12673
|
-
}
|
|
12674
|
-
};
|
|
12675
|
-
const getBuffer = () => buffer;
|
|
12676
|
-
const flushBuffer = () => {
|
|
12677
|
-
if (buffer.trim()) {
|
|
12678
|
-
emitLog({
|
|
12679
|
-
timestamp: getMicrosecondTimestamp(),
|
|
12680
|
-
stream: streamType,
|
|
12681
|
-
message: buffer,
|
|
12682
|
-
attemptNumber
|
|
12683
|
-
});
|
|
12684
|
-
buffer = "";
|
|
12685
|
-
}
|
|
12686
|
-
};
|
|
12687
|
-
return { handler, getBuffer, flushBuffer };
|
|
12688
|
-
}
|
|
12689
|
-
async function executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, options) {
|
|
12690
|
-
const executorPath = resolve(dirname(fileURLToPath(import.meta.url)), "step-executor");
|
|
12691
|
-
await mkdir(dirname(outputPath), { recursive: true });
|
|
12692
|
-
return new Promise((resolve2, reject) => {
|
|
12693
|
-
const child = spawn("bun", [executorPath], {
|
|
12694
|
-
stdio: ["pipe", "pipe", "pipe"],
|
|
12695
|
-
env: {
|
|
12696
|
-
...process.env,
|
|
12697
|
-
STEP_OUTPUT_FILE: outputPath
|
|
12698
|
-
}
|
|
12699
|
-
});
|
|
12700
|
-
const logs = [];
|
|
12701
|
-
const logWritePromises = [];
|
|
12702
|
-
let logError = null;
|
|
12703
|
-
const emitLog = (entry) => {
|
|
12704
|
-
logs.push(entry);
|
|
12705
|
-
if (!onLog)
|
|
12706
|
-
return;
|
|
12707
|
-
const trackedPromise = Promise.resolve(onLog(entry)).catch((err) => {
|
|
12708
|
-
if (!logError) {
|
|
12709
|
-
logError = err instanceof Error ? err : new Error(String(err));
|
|
12710
|
-
}
|
|
12711
|
-
});
|
|
12712
|
-
logWritePromises.push(trackedPromise);
|
|
12713
|
-
};
|
|
12714
|
-
const signal = options?.signal;
|
|
12715
|
-
let aborted2 = false;
|
|
12716
|
-
let abortReason;
|
|
12717
|
-
const abortHandler = signal ? () => {
|
|
12718
|
-
if (aborted2)
|
|
12719
|
-
return;
|
|
12720
|
-
aborted2 = true;
|
|
12721
|
-
abortReason = signal?.reason ?? new Error("Step execution aborted");
|
|
12722
|
-
try {
|
|
12723
|
-
child.kill("SIGKILL");
|
|
12724
|
-
} catch {}
|
|
12725
|
-
} : null;
|
|
12726
|
-
const cleanup = () => {
|
|
12727
|
-
if (signal && abortHandler) {
|
|
12728
|
-
signal.removeEventListener("abort", abortHandler);
|
|
12729
|
-
}
|
|
12730
|
-
};
|
|
12731
|
-
if (signal) {
|
|
12732
|
-
if (signal.aborted) {
|
|
12733
|
-
abortHandler?.();
|
|
12734
|
-
} else {
|
|
12735
|
-
signal.addEventListener("abort", abortHandler);
|
|
12736
|
-
}
|
|
12737
|
-
}
|
|
12738
|
-
const stdoutHandler = createStreamHandler("stdout", attemptNumber, emitLog);
|
|
12739
|
-
const stderrHandler = createStreamHandler("stderr", attemptNumber, emitLog);
|
|
12740
|
-
child.stdout.on("data", stdoutHandler.handler);
|
|
12741
|
-
child.stderr.on("data", stderrHandler.handler);
|
|
12742
|
-
child.on("error", (err) => {
|
|
12743
|
-
cleanup();
|
|
12744
|
-
reject(err);
|
|
12745
|
-
});
|
|
12746
|
-
child.on("close", async (code, signal2) => {
|
|
12747
|
-
cleanup();
|
|
12748
|
-
stdoutHandler.flushBuffer();
|
|
12749
|
-
stderrHandler.flushBuffer();
|
|
12750
|
-
try {
|
|
12751
|
-
await Promise.all(logWritePromises);
|
|
12752
|
-
} catch {}
|
|
12753
|
-
if (logError) {
|
|
12754
|
-
reject(logError);
|
|
12755
|
-
return;
|
|
12756
|
-
}
|
|
12757
|
-
if (aborted2) {
|
|
12758
|
-
const reason = abortReason instanceof Error ? abortReason : new Error(String(abortReason ?? "Step execution aborted"));
|
|
12759
|
-
reject(reason);
|
|
12760
|
-
return;
|
|
12761
|
-
}
|
|
12762
|
-
if (code === 0) {
|
|
12763
|
-
try {
|
|
12764
|
-
const outputContent = await readFile(outputPath, "utf-8");
|
|
12765
|
-
const result = JSON.parse(outputContent);
|
|
12766
|
-
try {
|
|
12767
|
-
await unlink(outputPath);
|
|
12768
|
-
} catch {}
|
|
12769
|
-
resolve2({ result, logs });
|
|
12770
|
-
} catch (err) {
|
|
12771
|
-
reject(new Error(`Failed to read/parse output file ${outputPath}: ${err}`));
|
|
12772
|
-
}
|
|
12773
|
-
} else if (signal2) {
|
|
12774
|
-
const error46 = new Error(`Step process killed by signal: ${signal2}`);
|
|
12775
|
-
error46.isCrash = true;
|
|
12776
|
-
reject(error46);
|
|
12777
|
-
} else {
|
|
12778
|
-
const lastStderrLog = logs.filter((l) => l.stream === "stderr").pop();
|
|
12779
|
-
if (lastStderrLog) {
|
|
12780
|
-
try {
|
|
12781
|
-
const errorObj = JSON.parse(lastStderrLog.message);
|
|
12782
|
-
const errorMessage = ensureErrorMessage(errorObj.message);
|
|
12783
|
-
const error46 = new Error(errorMessage);
|
|
12784
|
-
error46.stack = errorObj.stack;
|
|
12785
|
-
error46.name = errorObj.name || "Error";
|
|
12786
|
-
reject(error46);
|
|
12787
|
-
} catch {
|
|
12788
|
-
const errorMessage = logs.filter((l) => l.stream === "stderr").map((l) => l.message).join(`
|
|
12789
|
-
`);
|
|
12790
|
-
reject(new Error(errorMessage || `Step process exited with code ${code}`));
|
|
12791
|
-
}
|
|
12792
|
-
} else {
|
|
12793
|
-
reject(new Error(`Step process exited with code ${code}`));
|
|
12794
|
-
}
|
|
12795
|
-
}
|
|
12796
|
-
});
|
|
12797
|
-
const input = JSON.stringify({ stepPath: stepFile, dependencies, ctx });
|
|
12798
|
-
child.stdin.write(input);
|
|
12799
|
-
child.stdin.end();
|
|
12800
|
-
});
|
|
12801
|
-
}
|
|
12802
|
-
|
|
12803
|
-
// src/index.ts
|
|
12804
|
-
import { getMicrosecondTimestamp as getMicrosecondTimestamp2 } from "@cascade-flow/backend-interface";
|
|
12805
|
-
import { Skip, isOptional as isOptional2 } from "@cascade-flow/workflow";
|
|
12806
|
-
|
|
12807
13024
|
// src/versioning.ts
|
|
12808
13025
|
import { createHash } from "node:crypto";
|
|
12809
|
-
import { readFile as readFile2, readdir } from "node:fs/promises";
|
|
12810
|
-
import { join } from "node:path";
|
|
13026
|
+
import { readFile as readFile2, readdir as readdir2 } from "node:fs/promises";
|
|
13027
|
+
import { join as join3 } from "node:path";
|
|
12811
13028
|
import { exec } from "node:child_process";
|
|
12812
13029
|
import { promisify } from "node:util";
|
|
12813
13030
|
var execAsync = promisify(exec);
|
|
12814
13031
|
async function calculateWorkflowHash(workflow) {
|
|
12815
13032
|
const hash2 = createHash("sha256");
|
|
12816
13033
|
try {
|
|
12817
|
-
const workflowJsonPath =
|
|
13034
|
+
const workflowJsonPath = join3(workflow.dir, "workflow.json");
|
|
12818
13035
|
const workflowJsonContent = await readFile2(workflowJsonPath, "utf-8");
|
|
12819
13036
|
hash2.update(`workflow.json:${workflowJsonContent}`);
|
|
12820
13037
|
} catch (error46) {
|
|
12821
13038
|
throw new Error(`Failed to read workflow.json for ${workflow.slug}: ${error46 instanceof Error ? error46.message : "Unknown error"}`);
|
|
12822
13039
|
}
|
|
12823
13040
|
try {
|
|
12824
|
-
const inputSchemaPath =
|
|
13041
|
+
const inputSchemaPath = join3(workflow.dir, "input-schema.ts");
|
|
12825
13042
|
const inputSchemaContent = await readFile2(inputSchemaPath, "utf-8");
|
|
12826
13043
|
hash2.update(`input-schema.ts:${inputSchemaContent}`);
|
|
12827
13044
|
} catch (error46) {
|
|
@@ -12846,9 +13063,9 @@ async function calculateWorkflowHash(workflow) {
|
|
|
12846
13063
|
async function collectStepFiles(stepsDir) {
|
|
12847
13064
|
const files = [];
|
|
12848
13065
|
async function scan(dir) {
|
|
12849
|
-
const entries = await
|
|
13066
|
+
const entries = await readdir2(dir, { withFileTypes: true });
|
|
12850
13067
|
for (const entry of entries) {
|
|
12851
|
-
const fullPath =
|
|
13068
|
+
const fullPath = join3(dir, entry.name);
|
|
12852
13069
|
if (entry.isDirectory()) {
|
|
12853
13070
|
await scan(fullPath);
|
|
12854
13071
|
} else if (entry.isFile() && entry.name === "step.ts") {
|
|
@@ -12882,332 +13099,68 @@ async function getGitInfo(workflowDir) {
|
|
|
12882
13099
|
return;
|
|
12883
13100
|
}
|
|
12884
13101
|
}
|
|
12885
|
-
|
|
12886
|
-
|
|
12887
|
-
|
|
12888
|
-
|
|
12889
|
-
|
|
12890
|
-
|
|
12891
|
-
|
|
12892
|
-
const workflows = await discoverWorkflows();
|
|
12893
|
-
if (workflows.length === 0) {
|
|
12894
|
-
throw new Error('No workflows found. Please create a "workflows" directory with at least one workflow.');
|
|
12895
|
-
}
|
|
12896
|
-
const workflow = workflows.find((w) => w.slug === options.workflow);
|
|
12897
|
-
if (!workflow) {
|
|
12898
|
-
const available = workflows.map((w) => w.slug).join(", ");
|
|
12899
|
-
throw new Error(`Workflow "${options.workflow}" not found. Available workflows: ${available}`);
|
|
12900
|
-
}
|
|
12901
|
-
const steps = await discoverSteps(workflow.stepsDir);
|
|
12902
|
-
detectCycles(steps);
|
|
12903
|
-
const byId = new Map(steps.map((s) => [s.id, s]));
|
|
12904
|
-
const selected = options?.only?.length ? options.only.map((id) => {
|
|
12905
|
-
const s = byId.get(id);
|
|
12906
|
-
if (!s)
|
|
12907
|
-
throw new Error(`Unknown step "${id}"`);
|
|
12908
|
-
return s;
|
|
12909
|
-
}) : steps;
|
|
12910
|
-
const backend = options.backend;
|
|
12911
|
-
const workflowSlug = workflow.slug;
|
|
12912
|
-
const cache = new Map;
|
|
12913
|
-
const skippedSteps = new Set;
|
|
12914
|
-
const runId = options?.runId ?? `${getMicrosecondTimestamp2()}`;
|
|
12915
|
-
const defaultCtx = {
|
|
12916
|
-
runId,
|
|
12917
|
-
workflow: {
|
|
12918
|
-
slug: workflow.slug,
|
|
12919
|
-
name: workflow.name
|
|
12920
|
-
},
|
|
12921
|
-
input: undefined,
|
|
12922
|
-
log: (...args) => console.log("[runner]", ...args),
|
|
12923
|
-
...options?.ctx
|
|
12924
|
-
};
|
|
12925
|
-
const workflowStartTime = getMicrosecondTimestamp2();
|
|
12926
|
-
await backend.initializeRun(workflowSlug, runId);
|
|
12927
|
-
const versionId = await calculateWorkflowHash(workflow);
|
|
12928
|
-
const git = await getGitInfo(workflow.dir);
|
|
12929
|
-
const stepManifest = steps.map((s) => s.id);
|
|
12930
|
-
await backend.createWorkflowVersion({
|
|
12931
|
-
workflowSlug,
|
|
12932
|
-
versionId,
|
|
12933
|
-
createdAt: getMicrosecondTimestamp2(),
|
|
12934
|
-
stepManifest,
|
|
12935
|
-
totalSteps: steps.length,
|
|
12936
|
-
git
|
|
12937
|
-
});
|
|
12938
|
-
const hasInputSchema = workflow.inputSchema !== undefined;
|
|
12939
|
-
const hasInput = options.input !== undefined;
|
|
12940
|
-
await backend.saveWorkflowStart(workflowSlug, runId, {
|
|
12941
|
-
versionId,
|
|
12942
|
-
workflowAttemptNumber: 1,
|
|
12943
|
-
hasInputSchema,
|
|
12944
|
-
hasInput
|
|
12945
|
-
});
|
|
12946
|
-
let validatedInput = undefined;
|
|
12947
|
-
if (workflow.inputSchema) {
|
|
12948
|
-
const parseResult = workflow.inputSchema.safeParse(options.input ?? {});
|
|
12949
|
-
if (!parseResult.success) {
|
|
12950
|
-
const validationErrors = parseResult.error.issues.map((e) => ({
|
|
12951
|
-
path: e.path.join("."),
|
|
12952
|
-
message: e.message
|
|
12953
|
-
}));
|
|
12954
|
-
const errorMessage = validationErrors.map((e) => ` ${e.path}: ${e.message}`).join(`
|
|
12955
|
-
`);
|
|
12956
|
-
const error46 = {
|
|
12957
|
-
name: "ValidationError",
|
|
12958
|
-
message: `Invalid workflow input:
|
|
12959
|
-
${errorMessage}`
|
|
12960
|
-
};
|
|
12961
|
-
await backend.saveWorkflowInputValidation(workflowSlug, runId, {
|
|
12962
|
-
workflowAttemptNumber: 1,
|
|
12963
|
-
hasSchema: true,
|
|
12964
|
-
success: false,
|
|
12965
|
-
error: error46,
|
|
12966
|
-
validationErrors
|
|
12967
|
-
});
|
|
12968
|
-
const duration3 = getMicrosecondTimestamp2() - workflowStartTime;
|
|
12969
|
-
await backend.saveWorkflowFailed(workflowSlug, runId, error46, {
|
|
12970
|
-
workflowAttemptNumber: 1,
|
|
12971
|
-
duration: duration3,
|
|
12972
|
-
completedSteps: 0
|
|
12973
|
-
}, "step-failed");
|
|
12974
|
-
throw new Error(error46.message);
|
|
12975
|
-
}
|
|
12976
|
-
validatedInput = parseResult.data;
|
|
12977
|
-
} else if (options.input !== undefined) {
|
|
12978
|
-
validatedInput = options.input;
|
|
12979
|
-
}
|
|
12980
|
-
defaultCtx.input = validatedInput;
|
|
12981
|
-
if (hasInputSchema || hasInput) {
|
|
12982
|
-
await backend.saveWorkflowInputValidation(workflowSlug, runId, {
|
|
12983
|
-
workflowAttemptNumber: 1,
|
|
12984
|
-
hasSchema: hasInputSchema,
|
|
12985
|
-
success: true
|
|
12986
|
-
});
|
|
12987
|
-
}
|
|
12988
|
-
if (options?.resume) {
|
|
12989
|
-
defaultCtx.log(`Resuming run ${defaultCtx.runId}...`);
|
|
12990
|
-
const workflowEvents = await backend.loadEvents(workflowSlug, defaultCtx.runId, { category: "workflow" });
|
|
12991
|
-
const workflowStartedEvent = workflowEvents.find((e) => e.type === "WorkflowStarted");
|
|
12992
|
-
if (workflowStartedEvent && workflowStartedEvent.type === "WorkflowStarted") {
|
|
12993
|
-
const previousVersionId = workflowStartedEvent.versionId;
|
|
12994
|
-
if (previousVersionId !== versionId) {
|
|
12995
|
-
const previousVersion = await backend.getWorkflowVersion(workflowSlug, previousVersionId);
|
|
12996
|
-
const currentVersion = await backend.getWorkflowVersion(workflowSlug, versionId);
|
|
12997
|
-
defaultCtx.log(`⚠️ Workflow definition changed since original run`);
|
|
12998
|
-
defaultCtx.log(` Original: ${previousVersionId}`);
|
|
12999
|
-
defaultCtx.log(` Current: ${versionId}`);
|
|
13000
|
-
if (previousVersion?.git && currentVersion?.git) {
|
|
13001
|
-
defaultCtx.log(` Git: ${previousVersion.git.commit} → ${currentVersion.git.commit}`);
|
|
13002
|
-
}
|
|
13003
|
-
}
|
|
13004
|
-
}
|
|
13005
|
-
const existingRecords = await backend.loadRun(workflowSlug, defaultCtx.runId);
|
|
13006
|
-
let resumedSteps = 0;
|
|
13007
|
-
for (const record2 of existingRecords) {
|
|
13008
|
-
if (record2.status === "completed" && record2.output !== undefined && record2.output !== null) {
|
|
13009
|
-
try {
|
|
13010
|
-
const output = JSON.parse(record2.output);
|
|
13011
|
-
cache.set(record2.stepId, Promise.resolve(output));
|
|
13012
|
-
const step = steps.find((s) => s.id === record2.stepId);
|
|
13013
|
-
const displayName = step?.name ?? record2.stepId;
|
|
13014
|
-
defaultCtx.log(`✓ ${displayName} (resumed from cache)`);
|
|
13015
|
-
resumedSteps++;
|
|
13016
|
-
} catch (err) {
|
|
13017
|
-
const step = steps.find((s) => s.id === record2.stepId);
|
|
13018
|
-
const displayName = step?.name ?? record2.stepId;
|
|
13019
|
-
defaultCtx.log(`⚠ Failed to deserialize ${displayName}, will re-run`);
|
|
13020
|
-
}
|
|
13021
|
-
}
|
|
13022
|
-
}
|
|
13023
|
-
const pendingSteps = steps.length - resumedSteps;
|
|
13024
|
-
await backend.saveWorkflowResumed(workflowSlug, defaultCtx.runId, {
|
|
13025
|
-
originalRunId: defaultCtx.runId,
|
|
13026
|
-
resumedSteps,
|
|
13027
|
-
pendingSteps
|
|
13028
|
-
});
|
|
13029
|
-
}
|
|
13030
|
-
async function execute(step, stack = []) {
|
|
13031
|
-
const existing = cache.get(step.id);
|
|
13032
|
-
if (existing)
|
|
13033
|
-
return existing;
|
|
13034
|
-
const p = (async () => {
|
|
13035
|
-
const startTime = getMicrosecondTimestamp2();
|
|
13036
|
-
await backend.saveStepStart(workflowSlug, defaultCtx.runId, step.id, "local", {
|
|
13037
|
-
dependencies: Object.keys(step.dependencies),
|
|
13038
|
-
timestamp: startTime,
|
|
13039
|
-
attemptNumber: 1
|
|
13040
|
-
});
|
|
13041
|
-
defaultCtx.log(`→ ${step.name} (waiting deps: ${Object.keys(step.dependencies).join(", ") || "none"})`);
|
|
13042
|
-
try {
|
|
13043
|
-
const depEntries = Object.entries(step.dependencies);
|
|
13044
|
-
const depOutputsPairs = await Promise.all(depEntries.map(async ([alias, dep]) => {
|
|
13045
|
-
const output = await execute(dep, [...stack, step.id]);
|
|
13046
|
-
const isSkipped = skippedSteps.has(dep.id);
|
|
13047
|
-
const isOptionalDep = isOptional2(step.dependencies[alias]);
|
|
13048
|
-
return [alias, isSkipped && isOptionalDep ? undefined : output];
|
|
13049
|
-
}));
|
|
13050
|
-
const depOutputs = Object.fromEntries(depOutputsPairs);
|
|
13051
|
-
const skippedRequiredDeps = depEntries.filter(([alias, dep]) => {
|
|
13052
|
-
const isSkipped = skippedSteps.has(dep.id);
|
|
13053
|
-
const isOptionalDep = isOptional2(step.dependencies[alias]);
|
|
13054
|
-
return isSkipped && !isOptionalDep;
|
|
13055
|
-
});
|
|
13056
|
-
if (skippedRequiredDeps.length > 0) {
|
|
13057
|
-
const cascadedFromStep = skippedRequiredDeps[0][1];
|
|
13058
|
-
const endTime2 = getMicrosecondTimestamp2();
|
|
13059
|
-
await backend.saveStepSkipped(workflowSlug, defaultCtx.runId, step.id, {
|
|
13060
|
-
skipType: "cascade",
|
|
13061
|
-
reason: `Dependency '${cascadedFromStep.name}' was skipped`,
|
|
13062
|
-
duration: endTime2 - startTime,
|
|
13063
|
-
attemptNumber: 1,
|
|
13064
|
-
cascadedFrom: cascadedFromStep.id
|
|
13065
|
-
});
|
|
13066
|
-
skippedSteps.add(step.id);
|
|
13067
|
-
defaultCtx.log(`⊘ ${step.name} (skipped: dependency '${cascadedFromStep.name}' was skipped)`);
|
|
13068
|
-
return {};
|
|
13069
|
-
}
|
|
13070
|
-
const stepFile = join2(step.dir, "step.ts");
|
|
13071
|
-
const maxRetries = step.maxRetries ?? 0;
|
|
13072
|
-
let lastError = null;
|
|
13073
|
-
for (let attemptNumber = 1;attemptNumber <= maxRetries + 1; attemptNumber++) {
|
|
13074
|
-
try {
|
|
13075
|
-
const { result, logs } = await executeStepInProcess(stepFile, step.id, depOutputs, defaultCtx, attemptNumber, backend);
|
|
13076
|
-
const endTime2 = getMicrosecondTimestamp2();
|
|
13077
|
-
await backend.saveStepComplete(workflowSlug, defaultCtx.runId, step.id, result, {
|
|
13078
|
-
timestamp: endTime2,
|
|
13079
|
-
duration: endTime2 - startTime,
|
|
13080
|
-
logs: logs.length > 0 ? logs : undefined,
|
|
13081
|
-
attemptNumber,
|
|
13082
|
-
output: result
|
|
13083
|
-
}, step.exportOutput ?? false);
|
|
13084
|
-
defaultCtx.log(`✓ ${step.name}`);
|
|
13085
|
-
return result;
|
|
13086
|
-
} catch (err) {
|
|
13087
|
-
lastError = err instanceof Error ? err : new Error(String(err));
|
|
13088
|
-
if (lastError.name === "Skip" || lastError instanceof Skip) {
|
|
13089
|
-
const endTime2 = getMicrosecondTimestamp2();
|
|
13090
|
-
const skipError = lastError;
|
|
13091
|
-
await backend.saveStepSkipped(workflowSlug, defaultCtx.runId, step.id, {
|
|
13092
|
-
skipType: "primary",
|
|
13093
|
-
reason: skipError.reason || skipError.message.replace("Step skipped: ", ""),
|
|
13094
|
-
metadata: skipError.metadata,
|
|
13095
|
-
duration: endTime2 - startTime,
|
|
13096
|
-
attemptNumber
|
|
13097
|
-
});
|
|
13098
|
-
skippedSteps.add(step.id);
|
|
13099
|
-
defaultCtx.log(`⊘ ${step.name} (skipped: ${skipError.reason || skipError.message})`);
|
|
13100
|
-
return {};
|
|
13101
|
-
}
|
|
13102
|
-
if (attemptNumber <= maxRetries) {
|
|
13103
|
-
const error47 = {
|
|
13104
|
-
message: lastError.message,
|
|
13105
|
-
stack: lastError.stack,
|
|
13106
|
-
name: lastError.name
|
|
13107
|
-
};
|
|
13108
|
-
const retryEvent = {
|
|
13109
|
-
category: "step",
|
|
13110
|
-
eventId: "",
|
|
13111
|
-
timestampUs: getMicrosecondTimestamp2(),
|
|
13112
|
-
workflowSlug,
|
|
13113
|
-
runId: defaultCtx.runId,
|
|
13114
|
-
stepId: step.id,
|
|
13115
|
-
type: "StepRetrying",
|
|
13116
|
-
attemptNumber,
|
|
13117
|
-
nextAttempt: attemptNumber + 1,
|
|
13118
|
-
error: error47,
|
|
13119
|
-
maxRetries
|
|
13120
|
-
};
|
|
13121
|
-
await backend.appendEvent(workflowSlug, defaultCtx.runId, retryEvent);
|
|
13122
|
-
defaultCtx.log(`⟳ ${step.name} retry ${attemptNumber + 1}/${maxRetries + 1} after error: ${error47.message}`);
|
|
13123
|
-
}
|
|
13124
|
-
}
|
|
13125
|
-
}
|
|
13126
|
-
const error46 = {
|
|
13127
|
-
message: lastError.message,
|
|
13128
|
-
stack: lastError.stack,
|
|
13129
|
-
name: lastError.name
|
|
13130
|
-
};
|
|
13131
|
-
const failureReason = lastError.isCrash ? "worker-crash" : "exhausted-retries";
|
|
13132
|
-
const endTime = getMicrosecondTimestamp2();
|
|
13133
|
-
await backend.saveStepFailed(workflowSlug, defaultCtx.runId, step.id, error46, {
|
|
13134
|
-
duration: endTime - startTime,
|
|
13135
|
-
attemptNumber: maxRetries + 1,
|
|
13136
|
-
terminal: true,
|
|
13137
|
-
failureReason
|
|
13138
|
-
});
|
|
13139
|
-
defaultCtx.log(`✗ ${step.name} failed after ${maxRetries + 1} attempts: ${error46.message}`);
|
|
13140
|
-
throw lastError;
|
|
13141
|
-
} catch (err) {
|
|
13142
|
-
throw err;
|
|
13143
|
-
}
|
|
13144
|
-
})();
|
|
13145
|
-
cache.set(step.id, p);
|
|
13146
|
-
return p;
|
|
13147
|
-
}
|
|
13148
|
-
try {
|
|
13149
|
-
await Promise.all(selected.map((s) => execute(s)));
|
|
13150
|
-
const results = {};
|
|
13151
|
-
for (const s of steps) {
|
|
13152
|
-
if (s.exportOutput && !skippedSteps.has(s.id)) {
|
|
13153
|
-
const val = cache.get(s.id);
|
|
13154
|
-
if (val)
|
|
13155
|
-
results[s.id] = await val;
|
|
13156
|
-
}
|
|
13157
|
-
}
|
|
13158
|
-
const workflowEndTime = getMicrosecondTimestamp2();
|
|
13159
|
-
const workflowDuration = workflowEndTime - workflowStartTime;
|
|
13160
|
-
await backend.saveWorkflowComplete(workflowSlug, defaultCtx.runId, results, {
|
|
13161
|
-
workflowAttemptNumber: 1,
|
|
13162
|
-
timestamp: workflowEndTime,
|
|
13163
|
-
duration: workflowDuration,
|
|
13164
|
-
totalSteps: steps.length
|
|
13165
|
-
});
|
|
13166
|
-
return results;
|
|
13167
|
-
} catch (err) {
|
|
13168
|
-
const workflowEndTime = getMicrosecondTimestamp2();
|
|
13169
|
-
const workflowDuration = workflowEndTime - workflowStartTime;
|
|
13170
|
-
let completedSteps = 0;
|
|
13171
|
-
for (const s of steps) {
|
|
13172
|
-
const val = cache.get(s.id);
|
|
13173
|
-
if (val) {
|
|
13174
|
-
try {
|
|
13175
|
-
await val;
|
|
13176
|
-
completedSteps++;
|
|
13177
|
-
} catch {}
|
|
13178
|
-
}
|
|
13179
|
-
}
|
|
13180
|
-
let failedStep;
|
|
13181
|
-
if (err instanceof Error && err.message) {
|
|
13182
|
-
for (const s of steps) {
|
|
13183
|
-
if (err.message.includes(s.name) || err.message.includes(s.id)) {
|
|
13184
|
-
failedStep = s.id;
|
|
13185
|
-
break;
|
|
13186
|
-
}
|
|
13102
|
+
// src/validation.ts
|
|
13103
|
+
function getAllDependents(targetStepId, allSteps) {
|
|
13104
|
+
const dependentsMap = new Map;
|
|
13105
|
+
for (const step of allSteps) {
|
|
13106
|
+
for (const depStep of Object.values(step.dependencies)) {
|
|
13107
|
+
if (!dependentsMap.has(depStep.id)) {
|
|
13108
|
+
dependentsMap.set(depStep.id, []);
|
|
13187
13109
|
}
|
|
13110
|
+
dependentsMap.get(depStep.id).push(step.id);
|
|
13188
13111
|
}
|
|
13189
|
-
const error46 = {
|
|
13190
|
-
message: err instanceof Error ? err.message : String(err),
|
|
13191
|
-
stack: err instanceof Error ? err.stack : undefined,
|
|
13192
|
-
name: err instanceof Error ? err.name : undefined
|
|
13193
|
-
};
|
|
13194
|
-
const failureReason = err.isCrash ? "worker-crash" : "step-failed";
|
|
13195
|
-
await backend.saveWorkflowFailed(workflowSlug, defaultCtx.runId, error46, {
|
|
13196
|
-
workflowAttemptNumber: 1,
|
|
13197
|
-
duration: workflowDuration,
|
|
13198
|
-
completedSteps,
|
|
13199
|
-
failedStep
|
|
13200
|
-
}, failureReason);
|
|
13201
|
-
throw err;
|
|
13202
13112
|
}
|
|
13113
|
+
const allDependents = new Set;
|
|
13114
|
+
const visited = new Set;
|
|
13115
|
+
function dfs(stepId) {
|
|
13116
|
+
if (visited.has(stepId))
|
|
13117
|
+
return;
|
|
13118
|
+
visited.add(stepId);
|
|
13119
|
+
const dependents = dependentsMap.get(stepId) || [];
|
|
13120
|
+
for (const dependent of dependents) {
|
|
13121
|
+
allDependents.add(dependent);
|
|
13122
|
+
dfs(dependent);
|
|
13123
|
+
}
|
|
13124
|
+
}
|
|
13125
|
+
dfs(targetStepId);
|
|
13126
|
+
return allDependents;
|
|
13127
|
+
}
|
|
13128
|
+
async function validateWorkflowVersion(workflowSlug, parentRunId, currentVersionId, backend, log) {
|
|
13129
|
+
const workflowEvents = await backend.loadEvents(workflowSlug, parentRunId, { category: "workflow" });
|
|
13130
|
+
const workflowStartedEvent = workflowEvents.find((e) => e.type === "WorkflowStarted");
|
|
13131
|
+
if (!workflowStartedEvent || workflowStartedEvent.type !== "WorkflowStarted") {
|
|
13132
|
+
throw new Error(`Parent run ${parentRunId} has no WorkflowStarted event`);
|
|
13133
|
+
}
|
|
13134
|
+
const parentVersionId = workflowStartedEvent.versionId;
|
|
13135
|
+
if (parentVersionId !== currentVersionId && log) {
|
|
13136
|
+
const previousVersion = await backend.getWorkflowVersion(workflowSlug, parentVersionId);
|
|
13137
|
+
const currentVersion = await backend.getWorkflowVersion(workflowSlug, currentVersionId);
|
|
13138
|
+
const message = [
|
|
13139
|
+
`ℹ️ Workflow definition changed since parent run`,
|
|
13140
|
+
` Parent: ${parentVersionId}`,
|
|
13141
|
+
` Current: ${currentVersionId}`
|
|
13142
|
+
];
|
|
13143
|
+
if (previousVersion?.git && currentVersion?.git) {
|
|
13144
|
+
message.push(` Git: ${previousVersion.git.commit} → ${currentVersion.git.commit}`);
|
|
13145
|
+
}
|
|
13146
|
+
message.forEach((line) => log(line));
|
|
13147
|
+
}
|
|
13148
|
+
return parentVersionId;
|
|
13149
|
+
}
|
|
13150
|
+
|
|
13151
|
+
// src/index.ts
|
|
13152
|
+
async function executeStepInProcess(stepFile, stepId, dependencies, ctx, attemptNumber, backend, onLog, onCheckpoint, onCheckpointFailed, existingCheckpoints, options) {
|
|
13153
|
+
const outputPath = backend.getStepOutputPath(ctx.workflow.slug, ctx.runId, stepId, attemptNumber);
|
|
13154
|
+
return executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, onCheckpoint, onCheckpointFailed, existingCheckpoints, options);
|
|
13203
13155
|
}
|
|
13204
13156
|
export {
|
|
13205
|
-
|
|
13157
|
+
validateWorkflowVersion,
|
|
13206
13158
|
getGitInfo,
|
|
13159
|
+
getAllDependents,
|
|
13207
13160
|
executeStepInProcess,
|
|
13208
13161
|
discoverWorkflows,
|
|
13209
13162
|
discoverSteps,
|
|
13210
13163
|
calculateWorkflowHash
|
|
13211
13164
|
};
|
|
13212
13165
|
|
|
13213
|
-
//# debugId=
|
|
13166
|
+
//# debugId=922C5B57E7B7118364756E2164756E21
|