@ryanfw/prompt-orchestration-pipeline 0.5.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/README.md +1 -2
  2. package/package.json +1 -2
  3. package/src/api/validators/json.js +39 -0
  4. package/src/components/DAGGrid.jsx +392 -303
  5. package/src/components/JobCard.jsx +14 -12
  6. package/src/components/JobDetail.jsx +54 -51
  7. package/src/components/JobTable.jsx +72 -23
  8. package/src/components/Layout.jsx +145 -42
  9. package/src/components/LiveText.jsx +47 -0
  10. package/src/components/PageSubheader.jsx +75 -0
  11. package/src/components/TaskDetailSidebar.jsx +216 -0
  12. package/src/components/TimerText.jsx +82 -0
  13. package/src/components/UploadSeed.jsx +0 -70
  14. package/src/components/ui/Logo.jsx +16 -0
  15. package/src/components/ui/RestartJobModal.jsx +140 -0
  16. package/src/components/ui/toast.jsx +138 -0
  17. package/src/config/models.js +322 -0
  18. package/src/config/statuses.js +119 -0
  19. package/src/core/config.js +4 -34
  20. package/src/core/file-io.js +13 -28
  21. package/src/core/module-loader.js +54 -40
  22. package/src/core/pipeline-runner.js +65 -26
  23. package/src/core/status-writer.js +213 -58
  24. package/src/core/symlink-bridge.js +57 -0
  25. package/src/core/symlink-utils.js +94 -0
  26. package/src/core/task-runner.js +321 -437
  27. package/src/llm/index.js +258 -86
  28. package/src/pages/Code.jsx +351 -0
  29. package/src/pages/PipelineDetail.jsx +124 -15
  30. package/src/pages/PromptPipelineDashboard.jsx +20 -88
  31. package/src/providers/anthropic.js +83 -69
  32. package/src/providers/base.js +52 -0
  33. package/src/providers/deepseek.js +20 -21
  34. package/src/providers/gemini.js +226 -0
  35. package/src/providers/openai.js +36 -106
  36. package/src/providers/zhipu.js +136 -0
  37. package/src/ui/client/adapters/job-adapter.js +42 -28
  38. package/src/ui/client/api.js +134 -0
  39. package/src/ui/client/hooks/useJobDetailWithUpdates.js +65 -179
  40. package/src/ui/client/index.css +15 -0
  41. package/src/ui/client/index.html +2 -1
  42. package/src/ui/client/main.jsx +19 -14
  43. package/src/ui/client/time-store.js +161 -0
  44. package/src/ui/config-bridge.js +15 -24
  45. package/src/ui/config-bridge.node.js +15 -24
  46. package/src/ui/dist/assets/{index-CxcrauYR.js → index-DqkbzXZ1.js} +2132 -1086
  47. package/src/ui/dist/assets/style-DBF9NQGk.css +62 -0
  48. package/src/ui/dist/index.html +4 -3
  49. package/src/ui/job-reader.js +0 -108
  50. package/src/ui/public/favicon.svg +12 -0
  51. package/src/ui/server.js +252 -0
  52. package/src/ui/sse-enhancer.js +0 -1
  53. package/src/ui/transformers/list-transformer.js +32 -12
  54. package/src/ui/transformers/status-transformer.js +29 -42
  55. package/src/utils/dag.js +8 -4
  56. package/src/utils/duration.js +13 -19
  57. package/src/utils/formatters.js +27 -0
  58. package/src/utils/geometry-equality.js +83 -0
  59. package/src/utils/pipelines.js +5 -1
  60. package/src/utils/time-utils.js +40 -0
  61. package/src/utils/token-cost-calculator.js +294 -0
  62. package/src/utils/ui.jsx +18 -20
  63. package/src/components/ui/select.jsx +0 -27
  64. package/src/lib/utils.js +0 -6
  65. package/src/ui/client/hooks/useTicker.js +0 -26
  66. package/src/ui/config-bridge.browser.js +0 -149
  67. package/src/ui/dist/assets/style-D6K_oQ12.css +0 -62
@@ -2,8 +2,6 @@ import fsp from "node:fs/promises";
2
2
  import path from "node:path";
3
3
  import { fileURLToPath, pathToFileURL } from "node:url";
4
4
 
5
- const WORKSPACE_CACHE_DIR = path.join(process.cwd(), ".tmp-task-modules");
6
-
7
5
  /**
8
6
  * Convert supported modulePath formats into a file:// URL.
9
7
  * @param {string | URL} modulePath
@@ -88,70 +86,86 @@ function createMissingModuleError(modulePath, originalError) {
88
86
  }
89
87
 
90
88
  /**
91
- * Copy a module file into a workspace-local cache directory so Vite/Vitest can load it.
89
+ * Copy a module file adjacent to its original location with a unique name.
92
90
  * @param {string} sourcePath
93
91
  * @returns {Promise<string>}
94
92
  */
95
- async function copyModuleToWorkspaceCache(sourcePath) {
96
- await fsp.mkdir(WORKSPACE_CACHE_DIR, { recursive: true });
93
+ async function copyModuleAdjacent(sourcePath) {
94
+ const dir = path.dirname(sourcePath);
97
95
  const ext = path.extname(sourcePath) || ".js";
98
96
  const base = path.basename(sourcePath, ext);
99
97
  const uniqueSuffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
100
- const destFile = path.join(
101
- WORKSPACE_CACHE_DIR,
102
- `${base}.${uniqueSuffix}${ext}`
103
- );
98
+ const destFile = path.join(dir, `.cache.${base}.${uniqueSuffix}${ext}`);
104
99
  await fsp.copyFile(sourcePath, destFile);
105
100
  return destFile;
106
101
  }
107
102
 
108
103
  /**
109
104
  * Dynamically import a module with cache busting while remaining compatible with Node's file:/// resolution.
110
- * Falls back to copying the module into a workspace-local cache when query parameters break filesystem resolution.
105
+ * Falls back to copying the module adjacent to its original location when query parameters break filesystem resolution.
111
106
  * @param {string | URL} modulePath
112
107
  * @returns {Promise<any>} Module namespace object
113
108
  */
114
109
  export async function loadFreshModule(modulePath) {
115
110
  const fileUrl = resolveToFileURL(modulePath);
116
- const cacheBustedUrl = `${fileUrl.href}?t=${Date.now()}`;
117
111
 
112
+ // First attempt direct import without cache busting
118
113
  try {
119
- return await import(cacheBustedUrl);
114
+ return await import(fileUrl.href);
120
115
  } catch (error) {
121
- if (!isModuleNotFoundError(error) || fileUrl.protocol !== "file:") {
116
+ if (!isModuleNotFoundError(error)) {
122
117
  throw error;
123
118
  }
124
119
 
125
- const absolutePath = fileURLToPath(fileUrl);
126
-
120
+ // Second attempt: try cache-busted import
121
+ const cacheBustedUrl = `${fileUrl.href}?t=${Date.now()}`;
127
122
  try {
128
- await fsp.access(absolutePath);
129
- } catch {
130
- throw createMissingModuleError(
131
- absolutePath,
132
- /** @type {Error} */ (error)
133
- );
134
- }
123
+ return await import(cacheBustedUrl);
124
+ } catch (cacheBustedError) {
125
+ if (
126
+ !isModuleNotFoundError(cacheBustedError) ||
127
+ fileUrl.protocol !== "file:"
128
+ ) {
129
+ throw cacheBustedError;
130
+ }
135
131
 
136
- try {
137
- const cacheCopy = await copyModuleToWorkspaceCache(absolutePath);
138
- const cacheUrl = `${pathToFileURL(cacheCopy).href}?t=${Date.now()}`;
139
- return await import(cacheUrl);
140
- } catch (fallbackError) {
141
- const messageLines = [
142
- `Failed to load module "${absolutePath}" after attempting cache-busting import.`,
143
- `Cache-busted URL: ${cacheBustedUrl}`,
144
- `Original error: ${/** @type {Error} */ (error).message}`,
145
- `Fallback error: ${/** @type {Error} */ (fallbackError).message}`,
146
- ];
147
- const combined = new Error(messageLines.join("\n"));
148
- if ("cause" in Error.prototype) {
149
- combined.cause = fallbackError;
150
- } else {
151
- combined.fallbackError = fallbackError;
132
+ const absolutePath = fileURLToPath(fileUrl);
133
+
134
+ try {
135
+ await fsp.access(absolutePath);
136
+ } catch {
137
+ throw createMissingModuleError(
138
+ absolutePath,
139
+ /** @type {Error} */ (cacheBustedError)
140
+ );
141
+ }
142
+
143
+ // Third attempt: copy adjacent and import
144
+ let adjacentCopy;
145
+ try {
146
+ adjacentCopy = await copyModuleAdjacent(absolutePath);
147
+ const adjacentUrl = `${pathToFileURL(adjacentCopy).href}?t=${Date.now()}`;
148
+ return await import(adjacentUrl);
149
+ } catch (fallbackError) {
150
+ const messageLines = [
151
+ `Failed to load module "${absolutePath}" after attempting direct import, cache-busting import, and adjacent copy fallback.`,
152
+ `Direct import URL: ${fileUrl.href}`,
153
+ `Cache-busted URL: ${cacheBustedUrl}`,
154
+ `Adjacent fallback path attempted: ${adjacentCopy || "[adjacent copy creation failed]"}`,
155
+ `Original error: ${/** @type {Error} */ (error).message}`,
156
+ `Cache-bust error: ${/** @type {Error} */ (cacheBustedError).message}`,
157
+ `Fallback error: ${/** @type {Error} */ (fallbackError).message}`,
158
+ ];
159
+ const combined = new Error(messageLines.join("\n"));
160
+ if ("cause" in Error.prototype) {
161
+ combined.cause = fallbackError;
162
+ } else {
163
+ combined.fallbackError = fallbackError;
164
+ }
165
+ combined.initialError = error;
166
+ combined.cacheBustedError = cacheBustedError;
167
+ throw combined;
152
168
  }
153
- combined.initialError = error;
154
- throw combined;
155
169
  }
156
170
  }
157
171
  }
@@ -4,6 +4,11 @@ import { runPipeline } from "./task-runner.js";
4
4
  import { loadFreshModule } from "./module-loader.js";
5
5
  import { validatePipelineOrThrow } from "./validation.js";
6
6
  import { getPipelineConfig } from "./config.js";
7
+ import { writeJobStatus } from "./status-writer.js";
8
+ import { TaskState } from "../config/statuses.js";
9
+ import { ensureTaskSymlinkBridge } from "./symlink-bridge.js";
10
+ import { cleanupTaskSymlinks } from "./symlink-utils.js";
11
+ import { createTaskFileIO } from "./file-io.js";
7
12
 
8
13
  const ROOT = process.env.PO_ROOT || process.cwd();
9
14
  const DATA_DIR = path.join(ROOT, process.env.PO_DATA_DIR || "pipeline-data");
@@ -17,6 +22,8 @@ if (!jobId) throw new Error("runner requires jobId as argument");
17
22
 
18
23
  const workDir = path.join(CURRENT_DIR, jobId);
19
24
 
25
+ const startFromTask = process.env.PO_START_FROM_TASK;
26
+
20
27
  // Get pipeline slug from environment or fallback to seed.json
21
28
  let pipelineSlug = process.env.PO_PIPELINE_SLUG;
22
29
  if (!pipelineSlug) {
@@ -60,7 +67,15 @@ const seed = JSON.parse(
60
67
  let pipelineArtifacts = {};
61
68
 
62
69
  for (const taskName of pipeline.tasks) {
63
- if (status.tasks[taskName]?.state === "done") {
70
+ // Skip tasks before startFromTask when targeting a specific restart point
71
+ if (
72
+ startFromTask &&
73
+ pipeline.tasks.indexOf(taskName) < pipeline.tasks.indexOf(startFromTask)
74
+ ) {
75
+ continue;
76
+ }
77
+
78
+ if (status.tasks[taskName]?.state === TaskState.DONE) {
64
79
  try {
65
80
  const outputPath = path.join(workDir, "tasks", taskName, "output.json");
66
81
  const output = JSON.parse(await fs.readFile(outputPath, "utf8"));
@@ -70,17 +85,13 @@ for (const taskName of pipeline.tasks) {
70
85
  }
71
86
 
72
87
  await updateStatus(taskName, {
73
- state: "running",
88
+ state: TaskState.RUNNING,
74
89
  startedAt: now(),
75
90
  attempts: (status.tasks[taskName]?.attempts || 0) + 1,
76
91
  });
77
92
 
78
93
  const taskDir = path.join(workDir, "tasks", taskName);
79
94
  await fs.mkdir(taskDir, { recursive: true });
80
- await atomicWrite(
81
- path.join(taskDir, "letter.json"),
82
- JSON.stringify({ task: taskName, at: now() }, null, 2)
83
- );
84
95
 
85
96
  try {
86
97
  const ctx = {
@@ -103,14 +114,31 @@ for (const taskName of pipeline.tasks) {
103
114
  ? modulePath
104
115
  : path.resolve(path.dirname(TASK_REGISTRY), modulePath);
105
116
 
106
- const result = await runPipeline(absoluteModulePath, ctx);
117
+ // Create symlink bridge for deterministic module resolution
118
+ const poRoot = process.env.PO_ROOT || process.cwd();
119
+ const relocatedEntry = await ensureTaskSymlinkBridge({
120
+ taskDir,
121
+ poRoot,
122
+ taskModulePath: absoluteModulePath,
123
+ });
124
+
125
+ // Create fileIO for this task
126
+ const fileIO = createTaskFileIO({
127
+ workDir,
128
+ taskName,
129
+ getStage: () => null, // pipeline-runner doesn't have stages
130
+ statusPath: tasksStatusPath,
131
+ });
132
+
133
+ const result = await runPipeline(relocatedEntry, ctx);
107
134
 
108
135
  if (!result.ok) {
109
- // Persist execution-logs.json and failure-details.json on task failure
136
+ // Persist execution-logs.json and failure-details.json on task failure via IO
110
137
  if (result.logs) {
111
- await atomicWrite(
112
- path.join(taskDir, "execution-logs.json"),
113
- JSON.stringify(result.logs, null, 2)
138
+ await fileIO.writeLog(
139
+ "execution-logs.json",
140
+ JSON.stringify(result.logs, null, 2),
141
+ { mode: "replace" }
114
142
  );
115
143
  }
116
144
  const failureDetails = {
@@ -120,14 +148,15 @@ for (const taskName of pipeline.tasks) {
120
148
  context: result.context,
121
149
  refinementAttempts: result.refinementAttempts || 0,
122
150
  };
123
- await atomicWrite(
124
- path.join(taskDir, "failure-details.json"),
125
- JSON.stringify(failureDetails, null, 2)
151
+ await fileIO.writeLog(
152
+ "failure-details.json",
153
+ JSON.stringify(failureDetails, null, 2),
154
+ { mode: "replace" }
126
155
  );
127
156
 
128
157
  // Update tasks-status.json with enriched failure context
129
158
  await updateStatus(taskName, {
130
- state: "failed",
159
+ state: TaskState.FAILED,
131
160
  endedAt: now(),
132
161
  error: result.error, // Don't double-normalize - use result.error as-is
133
162
  failedStage: result.failedStage,
@@ -155,14 +184,15 @@ for (const taskName of pipeline.tasks) {
155
184
  // No need to manually write output.json or enumerate artifacts
156
185
 
157
186
  if (result.logs) {
158
- await atomicWrite(
159
- path.join(taskDir, "execution-logs.json"),
160
- JSON.stringify(result.logs, null, 2)
187
+ await fileIO.writeLog(
188
+ "execution-logs.json",
189
+ JSON.stringify(result.logs, null, 2),
190
+ { mode: "replace" }
161
191
  );
162
192
  }
163
193
 
164
194
  await updateStatus(taskName, {
165
- state: "done",
195
+ state: TaskState.DONE,
166
196
  endedAt: now(),
167
197
  executionTimeMs:
168
198
  result.logs?.reduce((total, log) => total + (log.ms || 0), 0) || 0,
@@ -170,7 +200,7 @@ for (const taskName of pipeline.tasks) {
170
200
  });
171
201
  } catch (err) {
172
202
  await updateStatus(taskName, {
173
- state: "failed",
203
+ state: TaskState.FAILED,
174
204
  endedAt: now(),
175
205
  error: normalizeError(err),
176
206
  });
@@ -200,17 +230,26 @@ await appendLine(
200
230
  }) + "\n"
201
231
  );
202
232
 
233
+ // Clean up task symlinks to avoid dangling links in archives
234
+ await cleanupTaskSymlinks(dest);
235
+
203
236
  function now() {
204
237
  return new Date().toISOString();
205
238
  }
206
239
 
207
240
  async function updateStatus(taskName, patch) {
208
- const current = JSON.parse(await fs.readFile(tasksStatusPath, "utf8"));
209
- current.current = taskName;
210
- current.tasks = current.tasks || {};
211
- current.tasks[taskName] = { ...(current.tasks[taskName] || {}), ...patch };
212
- await atomicWrite(tasksStatusPath, JSON.stringify(current, null, 2));
213
- Object.assign(status, current);
241
+ return await writeJobStatus(workDir, (snapshot) => {
242
+ snapshot.current = taskName;
243
+ snapshot.tasks = snapshot.tasks || {};
244
+ snapshot.tasks[taskName] = {
245
+ ...(snapshot.tasks[taskName] || {}),
246
+ ...patch,
247
+ };
248
+ return snapshot;
249
+ }).then((snap) => {
250
+ Object.assign(status, snap);
251
+ return snap;
252
+ });
214
253
  }
215
254
 
216
255
  async function appendLine(file, line) {
@@ -1,5 +1,6 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
+ import { TaskState } from "../config/statuses.js";
3
4
 
4
5
  // Lazy import SSE registry to avoid circular dependencies
5
6
  let sseRegistry = null;
@@ -16,6 +17,9 @@ async function getSSERegistry() {
16
17
  return sseRegistry;
17
18
  }
18
19
 
20
+ // Per-job write queues to serialize writes to tasks-status.json
21
+ const writeQueues = new Map(); // Map<string jobDir, Promise<any>>
22
+
19
23
  // Instrumentation helper for status writer
20
24
  const createStatusWriterLogger = (jobId) => {
21
25
  const prefix = `[StatusWriter:${jobId || "unknown"}]`;
@@ -54,7 +58,7 @@ const createStatusWriterLogger = (jobId) => {
54
58
  function createDefaultStatus(jobId) {
55
59
  return {
56
60
  id: jobId,
57
- state: "pending",
61
+ state: TaskState.PENDING,
58
62
  current: null,
59
63
  currentStage: null,
60
64
  lastUpdated: new Date().toISOString(),
@@ -128,7 +132,7 @@ function validateStatusSnapshot(snapshot) {
128
132
 
129
133
  // Ensure required root fields exist
130
134
  if (typeof snapshot.state !== "string") {
131
- snapshot.state = "pending";
135
+ snapshot.state = TaskState.PENDING;
132
136
  }
133
137
  if (snapshot.current !== null && typeof snapshot.current !== "string") {
134
138
  snapshot.current = null;
@@ -193,66 +197,80 @@ export async function writeJobStatus(jobDir, updateFn) {
193
197
  const jobId = path.basename(jobDir);
194
198
  const logger = createStatusWriterLogger(jobId);
195
199
 
196
- logger.group("Status Write Operation");
197
- logger.log(`Updating status for job: ${jobId}`);
198
- logger.log(`Status file path: ${statusPath}`);
199
-
200
- // Read existing status or create default
201
- let snapshot = await readStatusFile(statusPath, jobId);
202
- logger.log("Current status snapshot:", snapshot);
203
-
204
- // Validate basic structure
205
- snapshot = validateStatusSnapshot(snapshot);
206
-
207
- // Apply user updates
208
- try {
209
- const result = updateFn(snapshot);
210
- // If updateFn returns a value, use it as new snapshot
211
- if (result !== undefined) {
212
- snapshot = result;
213
- }
214
- logger.log("Status after update function:", snapshot);
215
- } catch (error) {
216
- logger.error("Update function failed:", error);
217
- throw new Error(`Update function failed: ${error.message}`);
218
- }
219
-
220
- // Validate final structure
221
- snapshot = validateStatusSnapshot(snapshot);
200
+ // Get or create the write queue for this job directory
201
+ const prev = writeQueues.get(jobDir) || Promise.resolve();
202
+ let resultSnapshot;
203
+
204
+ const next = prev
205
+ .then(async () => {
206
+ logger.group("Status Write Operation");
207
+ logger.log(`Updating status for job: ${jobId}`);
208
+ logger.log(`Status file path: ${statusPath}`);
209
+
210
+ // Read existing status or create default
211
+ const current = await readStatusFile(statusPath, jobId);
212
+ logger.log("Current status snapshot:", current);
213
+
214
+ // Validate basic structure
215
+ const validated = validateStatusSnapshot(current);
216
+
217
+ // Apply user updates
218
+ let maybeUpdated;
219
+ try {
220
+ maybeUpdated = updateFn(validated);
221
+ } catch (error) {
222
+ console.error(`[${jobId}] Error executing update function:`, error);
223
+ throw new Error(`Update function failed: ${error.message}`);
224
+ }
225
+ const snapshot = validateStatusSnapshot(
226
+ maybeUpdated === undefined ? validated : maybeUpdated
227
+ );
222
228
 
223
- // Update timestamp
224
- snapshot.lastUpdated = new Date().toISOString();
229
+ snapshot.lastUpdated = new Date().toISOString();
230
+ logger.log("Status after update function:", snapshot);
231
+
232
+ // Atomic write
233
+ await atomicWrite(statusPath, snapshot);
234
+ logger.log("Status file written successfully");
235
+
236
+ // Emit SSE event for tasks-status.json change
237
+ const registry = (await getSSERegistry().catch(() => null)) || null;
238
+ if (registry) {
239
+ try {
240
+ const eventData = {
241
+ type: "state:change",
242
+ data: {
243
+ path: path.join(jobDir, "tasks-status.json"),
244
+ id: jobId,
245
+ jobId,
246
+ },
247
+ };
248
+ registry.broadcast(eventData);
249
+ logger.sse("state:change", eventData.data);
250
+ logger.log("SSE event broadcasted successfully");
251
+ } catch (error) {
252
+ // Don't fail the write if SSE emission fails
253
+ logger.error("Failed to emit SSE event:", error);
254
+ console.warn(`Failed to emit SSE event: ${error.message}`);
255
+ }
256
+ } else {
257
+ logger.warn("SSE registry not available - no event broadcasted");
258
+ }
225
259
 
226
- // Atomic write
227
- await atomicWrite(statusPath, snapshot);
228
- logger.log("Status file written successfully");
260
+ logger.groupEnd();
261
+ resultSnapshot = snapshot;
262
+ })
263
+ .catch((e) => {
264
+ throw e;
265
+ });
229
266
 
230
- // Emit SSE event for tasks-status.json change
231
- const registry = await getSSERegistry();
232
- if (registry) {
233
- try {
234
- const eventData = {
235
- type: "state:change",
236
- data: {
237
- path: path.join(jobDir, "tasks-status.json"),
238
- id: jobId,
239
- jobId,
240
- },
241
- };
242
- registry.broadcast(eventData);
243
- logger.sse("state:change", eventData.data);
244
- logger.log("SSE event broadcasted successfully");
245
- } catch (error) {
246
- // Don't fail the write if SSE emission fails
247
- logger.error("Failed to emit SSE event:", error);
248
- console.warn(`Failed to emit SSE event: ${error.message}`);
249
- }
250
- } else {
251
- logger.warn("SSE registry not available - no event broadcasted");
252
- }
267
+ // Store the promise chain and set up cleanup
268
+ writeQueues.set(
269
+ jobDir,
270
+ next.finally(() => {})
271
+ );
253
272
 
254
- logger.groupEnd();
255
- return snapshot;
273
+ return next.then(() => resultSnapshot);
256
274
  }
257
275
 
258
276
  /**
@@ -329,3 +347,140 @@ export async function updateTaskStatus(jobDir, taskId, taskUpdateFn) {
329
347
  return snapshot;
330
348
  });
331
349
  }
350
+
351
+ /**
352
+ * Reset a job from a specific task onward, preserving prior completed tasks
353
+ *
354
+ * @param {string} jobDir - Job directory path containing tasks-status.json
355
+ * @param {string} fromTask - Task identifier to restart from (inclusive)
356
+ * @param {Object} options - Reset options
357
+ * @param {boolean} [options.clearTokenUsage=true] - Whether to clear token usage arrays
358
+ * @returns {Promise<Object>} The updated status snapshot
359
+ */
360
+ export async function resetJobFromTask(
361
+ jobDir,
362
+ fromTask,
363
+ { clearTokenUsage = true } = {}
364
+ ) {
365
+ if (!jobDir || typeof jobDir !== "string") {
366
+ throw new Error("jobDir must be a non-empty string");
367
+ }
368
+
369
+ if (!fromTask || typeof fromTask !== "string") {
370
+ throw new Error("fromTask must be a non-empty string");
371
+ }
372
+
373
+ return writeJobStatus(jobDir, (snapshot) => {
374
+ // Reset root-level status
375
+ snapshot.state = TaskState.PENDING;
376
+ snapshot.current = null;
377
+ snapshot.currentStage = null;
378
+ snapshot.progress = 0;
379
+ snapshot.lastUpdated = new Date().toISOString();
380
+
381
+ // Ensure tasks object exists
382
+ if (!snapshot.tasks || typeof snapshot.tasks !== "object") {
383
+ snapshot.tasks = {};
384
+ }
385
+
386
+ // Compute progress based on preserved (done) tasks before fromTask
387
+ let doneCount = 0;
388
+ const taskKeys = Object.keys(snapshot.tasks);
389
+ for (const taskId of taskKeys) {
390
+ if (snapshot.tasks[taskId]?.state === TaskState.DONE) {
391
+ doneCount++;
392
+ }
393
+ }
394
+ snapshot.progress =
395
+ taskKeys.length > 0 ? (doneCount / taskKeys.length) * 100 : 0;
396
+
397
+ // Reset tasks from fromTask onward to pending; keep earlier tasks as-is
398
+ for (const taskId of taskKeys) {
399
+ const task = snapshot.tasks[taskId];
400
+ if (!task) continue; // ensure task object exists
401
+
402
+ const shouldReset =
403
+ taskKeys.indexOf(taskId) >= taskKeys.indexOf(fromTask);
404
+ if (shouldReset) {
405
+ // Reset task state and metadata
406
+ task.state = TaskState.PENDING;
407
+ task.currentStage = null;
408
+
409
+ // Remove error-related fields
410
+ delete task.failedStage;
411
+ delete task.error;
412
+
413
+ // Reset counters
414
+ task.attempts = 0;
415
+ task.refinementAttempts = 0;
416
+
417
+ // Clear token usage if requested
418
+ if (clearTokenUsage) {
419
+ task.tokenUsage = [];
420
+ }
421
+ }
422
+ // If task appears before fromTask and is not done, keep its state untouched
423
+ // This preserves upstream work if user restarts from a mid-pipeline task
424
+ }
425
+
426
+ // Preserve files.* arrays - do not modify them
427
+ // This ensures generated files are preserved during restart
428
+
429
+ return snapshot;
430
+ });
431
+ }
432
+
433
+ /**
434
+ * Reset a job and all its tasks to clean-slate state atomically
435
+ *
436
+ * @param {string} jobDir - Job directory path containing tasks-status.json
437
+ * @param {Object} options - Reset options
438
+ * @param {boolean} [options.clearTokenUsage=true] - Whether to clear token usage arrays
439
+ * @returns {Promise<Object>} The updated status snapshot
440
+ */
441
+ export async function resetJobToCleanSlate(
442
+ jobDir,
443
+ { clearTokenUsage = true } = {}
444
+ ) {
445
+ if (!jobDir || typeof jobDir !== "string") {
446
+ throw new Error("jobDir must be a non-empty string");
447
+ }
448
+
449
+ return writeJobStatus(jobDir, (snapshot) => {
450
+ // Reset root-level status
451
+ snapshot.state = TaskState.PENDING;
452
+ snapshot.current = null;
453
+ snapshot.currentStage = null;
454
+ snapshot.progress = 0;
455
+ snapshot.lastUpdated = new Date().toISOString();
456
+
457
+ // Reset all tasks
458
+ if (snapshot.tasks && typeof snapshot.tasks === "object") {
459
+ for (const taskId of Object.keys(snapshot.tasks)) {
460
+ const task = snapshot.tasks[taskId];
461
+
462
+ // Reset task state
463
+ task.state = TaskState.PENDING;
464
+ task.currentStage = null;
465
+
466
+ // Remove error-related fields
467
+ delete task.failedStage;
468
+ delete task.error;
469
+
470
+ // Reset counters
471
+ task.attempts = 0;
472
+ task.refinementAttempts = 0;
473
+
474
+ // Clear token usage if requested
475
+ if (clearTokenUsage) {
476
+ task.tokenUsage = [];
477
+ }
478
+ }
479
+ }
480
+
481
+ // Preserve files.* arrays - do not modify them
482
+ // This ensures generated files are preserved during restart
483
+
484
+ return snapshot;
485
+ });
486
+ }
@@ -0,0 +1,57 @@
1
+ import path from "node:path";
2
+ import { ensureSymlink } from "./symlink-utils.js";
3
+
4
+ /**
5
+ * Creates a taskDir symlink bridge to ensure deterministic module resolution.
6
+ *
7
+ * This function creates three symlinks in the task directory:
8
+ * - taskDir/node_modules -> {poRoot}/node_modules (for bare package specifiers)
9
+ * - taskDir/project -> {poRoot} (optional convenience for absolute project paths)
10
+ * - taskDir/_task_root -> dirname(taskModulePath) (for relative imports)
11
+ *
12
+ * @param {Object} options - Configuration options
13
+ * @param {string} options.taskDir - The task directory where symlinks should be created
14
+ * @param {string} options.poRoot - The repository root directory
15
+ * @param {string} options.taskModulePath - Absolute path to the original task module
16
+ * @returns {string} The relocated entry path for the task module
17
+ * @throws {Error} If symlink creation fails
18
+ */
19
+ export async function ensureTaskSymlinkBridge({
20
+ taskDir,
21
+ poRoot,
22
+ taskModulePath,
23
+ }) {
24
+ // Normalize all paths to absolute paths
25
+ const normalizedTaskDir = path.resolve(taskDir);
26
+ const normalizedPoRoot = path.resolve(poRoot);
27
+ const normalizedTaskModulePath = path.resolve(taskModulePath);
28
+
29
+ // Ensure the task directory exists
30
+ await import("node:fs/promises").then((fs) =>
31
+ fs.mkdir(normalizedTaskDir, { recursive: true })
32
+ );
33
+
34
+ // Create symlink for node_modules -> {poRoot}/node_modules
35
+ const nodeModulesLink = path.join(normalizedTaskDir, "node_modules");
36
+ const nodeModulesTarget = path.join(normalizedPoRoot, "node_modules");
37
+ await ensureSymlink(nodeModulesLink, nodeModulesTarget, "dir");
38
+
39
+ // Create symlink for project -> {poRoot}
40
+ const projectLink = path.join(normalizedTaskDir, "project");
41
+ await ensureSymlink(projectLink, normalizedPoRoot, "dir");
42
+
43
+ // Create symlink for _task_root -> dirname(taskModulePath)
44
+ const taskRootLink = path.join(normalizedTaskDir, "_task_root");
45
+ const taskRootTarget = path.dirname(normalizedTaskModulePath);
46
+ await ensureSymlink(taskRootLink, taskRootTarget, "dir");
47
+
48
+ // Return the relocated entry path
49
+ const taskModuleBasename = path.basename(normalizedTaskModulePath);
50
+ const relocatedEntry = path.join(
51
+ normalizedTaskDir,
52
+ "_task_root",
53
+ taskModuleBasename
54
+ );
55
+
56
+ return relocatedEntry;
57
+ }