@cascade-flow/runner 0.2.5 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,82 @@
1
+ /**
2
+ * Checkpoint IPC Types and Utilities
3
+ *
4
+ * Handles communication between the subprocess (step execution) and parent process
5
+ * (worker) for checkpoint operations. Uses file-based IPC for simplicity.
6
+ *
7
+ * Flow:
8
+ * 1. Subprocess writes request to req-{id}.json
9
+ * 2. Parent reads request, checks cache
10
+ * 3. Parent writes response to res-{id}.json (hit/miss)
11
+ * 4. If miss: subprocess executes fn, writes result to result-{id}.json
12
+ * 5. Parent persists checkpoint to backend
13
+ */
14
+ /**
15
+ * Request from subprocess to parent for checkpoint lookup/save
16
+ */
17
+ export type CheckpointRequest = {
18
+ requestId: string;
19
+ name: string;
20
+ sequenceNumber: number;
21
+ };
22
+ /**
23
+ * Response from parent to subprocess
24
+ */
25
+ export type CheckpointResponse = {
26
+ requestId: string;
27
+ hit: boolean;
28
+ data?: string;
29
+ };
30
+ /**
31
+ * Result from subprocess after executing checkpoint function (on cache miss)
32
+ */
33
+ export type CheckpointResult = {
34
+ requestId: string;
35
+ data: string;
36
+ };
37
+ /**
38
+ * Data passed to onCheckpoint callback
39
+ */
40
+ export type CheckpointData = {
41
+ name: string;
42
+ sequenceNumber: number;
43
+ data: string;
44
+ };
45
+ /**
46
+ * Failure from subprocess when checkpoint function throws (on cache miss)
47
+ */
48
+ export type CheckpointFailure = {
49
+ requestId: string;
50
+ name: string;
51
+ sequenceNumber: number;
52
+ error: string;
53
+ };
54
+ /**
55
+ * Data passed to onCheckpointFailed callback
56
+ */
57
+ export type CheckpointFailedData = {
58
+ name: string;
59
+ sequenceNumber: number;
60
+ error: string;
61
+ };
62
+ /**
63
+ * Get path for checkpoint request file
64
+ */
65
+ export declare function getRequestPath(dir: string, requestId: string): string;
66
+ /**
67
+ * Get path for checkpoint response file
68
+ */
69
+ export declare function getResponsePath(dir: string, requestId: string): string;
70
+ /**
71
+ * Get path for checkpoint result file
72
+ */
73
+ export declare function getResultPath(dir: string, requestId: string): string;
74
+ /**
75
+ * Get path for checkpoint failure file
76
+ */
77
+ export declare function getFailurePath(dir: string, requestId: string): string;
78
+ /**
79
+ * Generate unique request ID for checkpoint IPC
80
+ */
81
+ export declare function generateRequestId(): string;
82
+ //# sourceMappingURL=checkpoint-ipc.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"checkpoint-ipc.d.ts","sourceRoot":"","sources":["../src/checkpoint-ipc.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAIH;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG;IAC9B,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;CACxB,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG;IAC/B,SAAS,EAAE,MAAM,CAAC;IAClB,GAAG,EAAE,OAAO,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;CACf,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,gBAAgB,GAAG;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG;IAC9B,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE,MAAM,CAAC;CACf,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE,MAAM,CAAC;CACf,CAAC;AAEF;;GAEG;AACH,wBAAgB,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAErE;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAEtE;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAEpE;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAErE;AAED;;GAEG;AACH,wBAAgB,iBAAiB,IAAI,MAAM,CAE1C"}
package/dist/index.d.ts CHANGED
@@ -1,13 +1,27 @@
1
1
  import type { Backend, LogEntry } from "@cascade-flow/backend-interface";
2
2
  import type { StepOutput, RunnerContext } from "@cascade-flow/workflow";
3
3
  import type { LoadedStep } from "./types";
4
+ import type { CheckpointData, CheckpointFailedData } from "./checkpoint-ipc";
4
5
  export type { LoadedStep };
6
+ export type { CheckpointData, CheckpointFailedData } from "./checkpoint-ipc";
5
7
  /**
6
8
  * Execute a step in an isolated child process
7
9
  *
8
10
  * Wrapper around executeStepInSubprocess that generates the output path.
11
+ *
12
+ * @param stepFile - Absolute path to the step.ts file
13
+ * @param stepId - Unique identifier of the step
14
+ * @param dependencies - Resolved dependency outputs
15
+ * @param ctx - Runner context passed to step
16
+ * @param attemptNumber - Current attempt number (for retries)
17
+ * @param backend - Backend for generating output path
18
+ * @param onLog - Optional callback for real-time log emission
19
+ * @param onCheckpoint - Optional callback for persisting checkpoints
20
+ * @param onCheckpointFailed - Optional callback for persisting checkpoint failures
21
+ * @param existingCheckpoints - Existing checkpoints to replay (name -> data[] by sequence)
22
+ * @param options - Additional options (signal for abort)
9
23
  */
10
- export declare function executeStepInProcess(stepFile: string, stepId: string, dependencies: Record<string, unknown>, ctx: RunnerContext, attemptNumber: number, backend: Backend, onLog?: (log: LogEntry) => void | Promise<void>, options?: {
24
+ export declare function executeStepInProcess(stepFile: string, stepId: string, dependencies: Record<string, unknown>, ctx: RunnerContext, attemptNumber: number, backend: Backend, onLog?: (log: LogEntry) => void | Promise<void>, onCheckpoint?: (checkpoint: CheckpointData) => Promise<void>, onCheckpointFailed?: (checkpoint: CheckpointFailedData) => Promise<void>, existingCheckpoints?: Map<string, string[]>, options?: {
11
25
  signal?: AbortSignal;
12
26
  }): Promise<{
13
27
  result: StepOutput;
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,iCAAiC,CAAC;AAEzE,OAAO,KAAK,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,wBAAwB,CAAC;AACxE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAG1C,YAAY,EAAE,UAAU,EAAE,CAAC;AAE3B;;;;GAIG;AACH,wBAAsB,oBAAoB,CACxC,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,GAAG,EAAE,aAAa,EAClB,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,OAAO,EAChB,KAAK,CAAC,EAAE,CAAC,GAAG,EAAE,QAAQ,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,EAC/C,OAAO,CAAC,EAAE;IAAE,MAAM,CAAC,EAAE,WAAW,CAAA;CAAE,GACjC,OAAO,CAAC;IAAE,MAAM,EAAE,UAAU,CAAC;IAAC,IAAI,EAAE,QAAQ,EAAE,CAAA;CAAE,CAAC,CAoBnD;AAGD,OAAO,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAG/D,OAAO,EAAE,qBAAqB,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAGjE,OAAO,EAAE,gBAAgB,EAAE,uBAAuB,EAAE,MAAM,cAAc,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,iCAAiC,CAAC;AAEzE,OAAO,KAAK,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,wBAAwB,CAAC;AACxE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAC1C,OAAO,KAAK,EAAE,cAAc,EAAE,oBAAoB,EAAE,MAAM,kBAAkB,CAAC;AAG7E,YAAY,EAAE,UAAU,EAAE,CAAC;AAG3B,YAAY,EAAE,cAAc,EAAE,oBAAoB,EAAE,MAAM,kBAAkB,CAAC;AAE7E;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAsB,oBAAoB,CACxC,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACrC,GAAG,EAAE,aAAa,EAClB,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,OAAO,EAChB,KAAK,CAAC,EAAE,CAAC,GAAG,EAAE,QAAQ,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,EAC/C,YAAY,CAAC,EAAE,CAAC,UAAU,EAAE,cAAc,KAAK,OAAO,CAAC,IAAI,CAAC,EAC5D,kBAAkB,CAAC,EAAE,CAAC,UAAU,EAAE,oBAAoB,KAAK,OAAO,CAAC,IAAI,CAAC,EACxE,mBAAmB,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,EAC3C,OAAO,CAAC,EAAE;IAAE,MAAM,CAAC,EAAE,WAAW,CAAA;CAAE,GACjC,OAAO,CAAC;IAAE,MAAM,EAAE,UAAU,CAAC;IAAC,IAAI,EAAE,QAAQ,EAAE,CAAA;CAAE,CAAC,CAuBnD;AAGD,OAAO,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAG/D,OAAO,EAAE,qBAAqB,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAGjE,OAAO,EAAE,gBAAgB,EAAE,uBAAuB,EAAE,MAAM,cAAc,CAAC"}
package/dist/index.js CHANGED
@@ -11,10 +11,151 @@ var __export = (target, all) => {
11
11
 
12
12
  // src/subprocess-executor.ts
13
13
  import { spawn } from "node:child_process";
14
- import { resolve, dirname } from "node:path";
14
+ import { resolve, dirname, join as join2 } from "node:path";
15
15
  import { fileURLToPath } from "node:url";
16
- import { mkdir, readFile, unlink } from "node:fs/promises";
16
+ import { mkdir, readFile, unlink, access, writeFile, readdir, rm } from "node:fs/promises";
17
+ import { tmpdir } from "node:os";
17
18
  import { getMicrosecondTimestamp, ensureErrorMessage } from "@cascade-flow/backend-interface";
19
+
20
+ // src/checkpoint-ipc.ts
21
+ import { join } from "node:path";
22
+ function getResponsePath(dir, requestId) {
23
+ return join(dir, `res-${requestId}.json`);
24
+ }
25
+ function getResultPath(dir, requestId) {
26
+ return join(dir, `result-${requestId}.json`);
27
+ }
28
+ function getFailurePath(dir, requestId) {
29
+ return join(dir, `failure-${requestId}.json`);
30
+ }
31
+
32
+ // src/subprocess-executor.ts
33
+ var EMBEDDED_STEP_EXECUTOR = `
34
+ import { pathToFileURL } from "node:url";
35
+ import { join } from "node:path";
36
+ import { AsyncLocalStorage } from "async_hooks";
37
+
38
+ function serializeError(err) {
39
+ if (err instanceof Error) {
40
+ return { message: err.message, stack: err.stack, name: err.name };
41
+ }
42
+ return { message: String(err), name: "Error" };
43
+ }
44
+
45
+ // AsyncLocalStorage for checkpoint context
46
+ const checkpointStorage = new AsyncLocalStorage();
47
+
48
+ function createCheckpointFunction(checkpointDir) {
49
+ const checkpointSequences = new Map();
50
+
51
+ return async function checkpoint(name, fn) {
52
+ if (!checkpointDir) return fn();
53
+
54
+ const seq = checkpointSequences.get(name) ?? 0;
55
+ checkpointSequences.set(name, seq + 1);
56
+ const requestId = Date.now() + "-" + Math.random().toString(36).slice(2);
57
+
58
+ // Write request
59
+ await Bun.write(
60
+ join(checkpointDir, "req-" + requestId + ".json"),
61
+ JSON.stringify({ requestId, name, sequenceNumber: seq })
62
+ );
63
+
64
+ // Poll for response
65
+ let response = null;
66
+ const responsePath = join(checkpointDir, "res-" + requestId + ".json");
67
+ while (!response) {
68
+ try {
69
+ response = JSON.parse(await Bun.file(responsePath).text());
70
+ } catch {
71
+ await Bun.sleep(5);
72
+ }
73
+ }
74
+
75
+ if (response.hit && response.data !== undefined) {
76
+ return JSON.parse(response.data);
77
+ }
78
+
79
+ // Cache miss: execute function with parent injection
80
+ try {
81
+ const ctx = checkpointStorage.getStore();
82
+ let result = await fn();
83
+
84
+ // Inject parent checkpoint info if in a scope
85
+ if (ctx?.parent && typeof result === "object" && result !== null) {
86
+ result = { _parentCheckpoint: ctx.parent, ...result };
87
+ }
88
+
89
+ await Bun.write(
90
+ join(checkpointDir, "result-" + requestId + ".json"),
91
+ JSON.stringify({ requestId, data: JSON.stringify(result) })
92
+ );
93
+ return result;
94
+ } catch (err) {
95
+ // Write failure file for parent to consume before re-throwing
96
+ await Bun.write(
97
+ join(checkpointDir, "failure-" + requestId + ".json"),
98
+ JSON.stringify({
99
+ requestId,
100
+ name,
101
+ sequenceNumber: seq,
102
+ error: JSON.stringify(serializeError(err)),
103
+ })
104
+ );
105
+ throw err;
106
+ }
107
+ };
108
+ }
109
+
110
+ // Run with checkpoint context
111
+ function runWithCheckpoint(checkpointFn, fn) {
112
+ return checkpointStorage.run({ checkpoint: checkpointFn }, fn);
113
+ }
114
+
115
+ async function main() {
116
+ try {
117
+ const inputFile = process.env.CF_STEP_INPUT_FILE;
118
+ const outputFile = process.env.STEP_OUTPUT_FILE;
119
+ const checkpointDir = process.env.CF_CHECKPOINT_DIR;
120
+
121
+ if (!inputFile) {
122
+ throw new Error("CF_STEP_INPUT_FILE environment variable is required");
123
+ }
124
+ if (!outputFile) {
125
+ throw new Error("STEP_OUTPUT_FILE environment variable is required");
126
+ }
127
+
128
+ const input = await Bun.file(inputFile).text();
129
+ const { stepPath, dependencies, ctx } = JSON.parse(input);
130
+
131
+ const checkpoint = createCheckpointFunction(checkpointDir);
132
+ const reconstructedCtx = { ...ctx, log: console.log, checkpoint };
133
+
134
+ // Load and execute the step
135
+ const mod = await import(pathToFileURL(stepPath).toString());
136
+ const stepDef = mod.step;
137
+
138
+ if (!stepDef || typeof stepDef.fn !== "function") {
139
+ throw new Error("Invalid step module at " + stepPath);
140
+ }
141
+
142
+ // Execute step with checkpoint context
143
+ const result = await runWithCheckpoint(checkpoint, () =>
144
+ stepDef.fn({ dependencies, ctx: reconstructedCtx })
145
+ );
146
+ await Bun.write(outputFile, JSON.stringify(result, null, 2));
147
+ process.exit(0);
148
+ } catch (error) {
149
+ const errObj = error instanceof Error
150
+ ? { message: error.message, stack: error.stack, name: error.name }
151
+ : { message: String(error), name: "Error" };
152
+ process.stderr.write(JSON.stringify(errObj));
153
+ process.exit(1);
154
+ }
155
+ }
156
+
157
+ main();
158
+ `;
18
159
  function createStreamHandler(streamType, attemptNumber, emitLog) {
19
160
  let buffer = "";
20
161
  const handler = (chunk) => {
@@ -48,15 +189,124 @@ function createStreamHandler(streamType, attemptNumber, emitLog) {
48
189
  };
49
190
  return { handler, getBuffer, flushBuffer };
50
191
  }
51
- async function executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, options) {
52
- const executorPath = resolve(dirname(fileURLToPath(import.meta.url)), "step-executor");
192
+ function startCheckpointWatcher(dir, existing, onCheckpoint, onCheckpointFailed) {
193
+ const processed = new Set;
194
+ let running = true;
195
+ const poll = async () => {
196
+ while (running) {
197
+ try {
198
+ const files = await readdir(dir);
199
+ for (const file of files) {
200
+ if (file.startsWith("failure-") && !processed.has(file)) {
201
+ processed.add(file);
202
+ try {
203
+ const content = await readFile(join2(dir, file), "utf-8");
204
+ const failure = JSON.parse(content);
205
+ if (onCheckpointFailed) {
206
+ await onCheckpointFailed({
207
+ name: failure.name,
208
+ sequenceNumber: failure.sequenceNumber,
209
+ error: failure.error
210
+ });
211
+ }
212
+ } catch {}
213
+ continue;
214
+ }
215
+ if (!file.startsWith("req-") || processed.has(file))
216
+ continue;
217
+ processed.add(file);
218
+ try {
219
+ const content = await readFile(join2(dir, file), "utf-8");
220
+ const request = JSON.parse(content);
221
+ const cached = existing?.get(request.name)?.[request.sequenceNumber];
222
+ if (cached !== undefined) {
223
+ const response = {
224
+ requestId: request.requestId,
225
+ hit: true,
226
+ data: cached
227
+ };
228
+ await writeFile(getResponsePath(dir, request.requestId), JSON.stringify(response));
229
+ } else {
230
+ const response = {
231
+ requestId: request.requestId,
232
+ hit: false
233
+ };
234
+ await writeFile(getResponsePath(dir, request.requestId), JSON.stringify(response));
235
+ let result = null;
236
+ let failed = false;
237
+ const resultPath = getResultPath(dir, request.requestId);
238
+ const failurePath = getFailurePath(dir, request.requestId);
239
+ while (running && !result && !failed) {
240
+ try {
241
+ const resultContent = await readFile(resultPath, "utf-8");
242
+ result = JSON.parse(resultContent);
243
+ } catch {
244
+ try {
245
+ await readFile(failurePath, "utf-8");
246
+ failed = true;
247
+ } catch {
248
+ await new Promise((resolve2) => setTimeout(resolve2, 5));
249
+ }
250
+ }
251
+ }
252
+ if (result && onCheckpoint) {
253
+ if (!existing.has(request.name)) {
254
+ existing.set(request.name, []);
255
+ }
256
+ const arr = existing.get(request.name);
257
+ arr[request.sequenceNumber] = result.data;
258
+ await onCheckpoint({
259
+ name: request.name,
260
+ sequenceNumber: request.sequenceNumber,
261
+ data: result.data
262
+ });
263
+ }
264
+ }
265
+ } catch (e) {}
266
+ }
267
+ } catch {}
268
+ await new Promise((resolve2) => setTimeout(resolve2, 10));
269
+ }
270
+ };
271
+ poll().catch(() => {});
272
+ return {
273
+ stop: () => {
274
+ running = false;
275
+ }
276
+ };
277
+ }
278
+ async function executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, onCheckpoint, onCheckpointFailed, existingCheckpoints, options) {
53
279
  await mkdir(dirname(outputPath), { recursive: true });
280
+ const executorPath = resolve(dirname(fileURLToPath(import.meta.url)), "step-executor.ts");
281
+ let spawnArgs;
282
+ let tempExecutorPath = null;
283
+ const stepInput = JSON.stringify({ stepPath: stepFile, dependencies, ctx });
284
+ const inputFilePath = join2(tmpdir(), `cf-step-input-${Date.now()}-${Math.random().toString(36).slice(2)}.json`);
285
+ await writeFile(inputFilePath, stepInput);
286
+ try {
287
+ await access(executorPath);
288
+ spawnArgs = ["bun", [executorPath]];
289
+ } catch {
290
+ tempExecutorPath = join2(tmpdir(), `cf-step-executor-${Date.now()}-${Math.random().toString(36).slice(2)}.ts`);
291
+ await writeFile(tempExecutorPath, EMBEDDED_STEP_EXECUTOR);
292
+ spawnArgs = ["bun", [tempExecutorPath]];
293
+ }
294
+ let checkpointDir;
295
+ let checkpointWatcher;
296
+ const checkpointCache = existingCheckpoints ?? new Map;
297
+ if (onCheckpoint || onCheckpointFailed || existingCheckpoints) {
298
+ checkpointDir = join2(tmpdir(), `cf-checkpoint-${Date.now()}-${Math.random().toString(36).slice(2)}`);
299
+ await mkdir(checkpointDir, { recursive: true });
300
+ checkpointWatcher = startCheckpointWatcher(checkpointDir, checkpointCache, onCheckpoint, onCheckpointFailed);
301
+ }
54
302
  return new Promise((resolve2, reject) => {
55
- const child = spawn("bun", [executorPath], {
303
+ const child = spawn(spawnArgs[0], spawnArgs[1], {
56
304
  stdio: ["pipe", "pipe", "pipe"],
57
305
  env: {
58
306
  ...process.env,
59
- STEP_OUTPUT_FILE: outputPath
307
+ STEP_OUTPUT_FILE: outputPath,
308
+ CF_STEP_INPUT_FILE: inputFilePath,
309
+ ...checkpointDir ? { CF_CHECKPOINT_DIR: checkpointDir } : {}
60
310
  }
61
311
  });
62
312
  const logs = [];
@@ -107,6 +357,22 @@ async function executeStepInSubprocess(stepFile, stepId, dependencies, ctx, atte
107
357
  });
108
358
  child.on("close", async (code, signal2) => {
109
359
  cleanup();
360
+ if (checkpointWatcher) {
361
+ checkpointWatcher.stop();
362
+ }
363
+ if (tempExecutorPath) {
364
+ try {
365
+ await unlink(tempExecutorPath);
366
+ } catch {}
367
+ }
368
+ try {
369
+ await unlink(inputFilePath);
370
+ } catch {}
371
+ if (checkpointDir) {
372
+ try {
373
+ await rm(checkpointDir, { recursive: true, force: true });
374
+ } catch {}
375
+ }
110
376
  stdoutHandler.flushBuffer();
111
377
  stderrHandler.flushBuffer();
112
378
  try {
@@ -156,8 +422,6 @@ async function executeStepInSubprocess(stepFile, stepId, dependencies, ctx, atte
156
422
  }
157
423
  }
158
424
  });
159
- const input = JSON.stringify({ stepPath: stepFile, dependencies, ctx });
160
- child.stdin.write(input);
161
425
  child.stdin.end();
162
426
  });
163
427
  }
@@ -12778,22 +13042,22 @@ async function discoverSteps(root = path.resolve("steps")) {
12778
13042
  }
12779
13043
  // src/versioning.ts
12780
13044
  import { createHash } from "node:crypto";
12781
- import { readFile as readFile2, readdir } from "node:fs/promises";
12782
- import { join } from "node:path";
13045
+ import { readFile as readFile2, readdir as readdir2 } from "node:fs/promises";
13046
+ import { join as join3 } from "node:path";
12783
13047
  import { exec } from "node:child_process";
12784
13048
  import { promisify } from "node:util";
12785
13049
  var execAsync = promisify(exec);
12786
13050
  async function calculateWorkflowHash(workflow) {
12787
13051
  const hash2 = createHash("sha256");
12788
13052
  try {
12789
- const workflowJsonPath = join(workflow.dir, "workflow.json");
13053
+ const workflowJsonPath = join3(workflow.dir, "workflow.json");
12790
13054
  const workflowJsonContent = await readFile2(workflowJsonPath, "utf-8");
12791
13055
  hash2.update(`workflow.json:${workflowJsonContent}`);
12792
13056
  } catch (error46) {
12793
13057
  throw new Error(`Failed to read workflow.json for ${workflow.slug}: ${error46 instanceof Error ? error46.message : "Unknown error"}`);
12794
13058
  }
12795
13059
  try {
12796
- const inputSchemaPath = join(workflow.dir, "input-schema.ts");
13060
+ const inputSchemaPath = join3(workflow.dir, "input-schema.ts");
12797
13061
  const inputSchemaContent = await readFile2(inputSchemaPath, "utf-8");
12798
13062
  hash2.update(`input-schema.ts:${inputSchemaContent}`);
12799
13063
  } catch (error46) {
@@ -12818,9 +13082,9 @@ async function calculateWorkflowHash(workflow) {
12818
13082
  async function collectStepFiles(stepsDir) {
12819
13083
  const files = [];
12820
13084
  async function scan(dir) {
12821
- const entries = await readdir(dir, { withFileTypes: true });
13085
+ const entries = await readdir2(dir, { withFileTypes: true });
12822
13086
  for (const entry of entries) {
12823
- const fullPath = join(dir, entry.name);
13087
+ const fullPath = join3(dir, entry.name);
12824
13088
  if (entry.isDirectory()) {
12825
13089
  await scan(fullPath);
12826
13090
  } else if (entry.isFile() && entry.name === "step.ts") {
@@ -12904,9 +13168,9 @@ async function validateWorkflowVersion(workflowSlug, parentRunId, currentVersion
12904
13168
  }
12905
13169
 
12906
13170
  // src/index.ts
12907
- async function executeStepInProcess(stepFile, stepId, dependencies, ctx, attemptNumber, backend, onLog, options) {
13171
+ async function executeStepInProcess(stepFile, stepId, dependencies, ctx, attemptNumber, backend, onLog, onCheckpoint, onCheckpointFailed, existingCheckpoints, options) {
12908
13172
  const outputPath = backend.getStepOutputPath(ctx.workflow.slug, ctx.runId, stepId, attemptNumber);
12909
- return executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, options);
13173
+ return executeStepInSubprocess(stepFile, stepId, dependencies, ctx, attemptNumber, outputPath, onLog, onCheckpoint, onCheckpointFailed, existingCheckpoints, options);
12910
13174
  }
12911
13175
  export {
12912
13176
  validateWorkflowVersion,
@@ -12918,4 +13182,4 @@ export {
12918
13182
  calculateWorkflowHash
12919
13183
  };
12920
13184
 
12921
- //# debugId=6283553AC8E9AA7C64756E2164756E21
13185
+ //# debugId=1B7890ED6FF9B69B64756E2164756E21