@ryanfw/prompt-orchestration-pipeline 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/package.json +1 -1
  2. package/src/components/JobCard.jsx +1 -1
  3. package/src/components/JobDetail.jsx +45 -12
  4. package/src/components/JobTable.jsx +40 -1
  5. package/src/components/Layout.jsx +146 -22
  6. package/src/components/PageSubheader.jsx +75 -0
  7. package/src/components/UploadSeed.jsx +0 -70
  8. package/src/components/ui/Logo.jsx +16 -0
  9. package/src/core/config.js +145 -13
  10. package/src/core/file-io.js +12 -27
  11. package/src/core/orchestrator.js +92 -78
  12. package/src/core/pipeline-runner.js +13 -6
  13. package/src/core/status-writer.js +63 -52
  14. package/src/core/task-runner.js +61 -1
  15. package/src/llm/index.js +97 -40
  16. package/src/pages/Code.jsx +297 -0
  17. package/src/pages/PipelineDetail.jsx +47 -8
  18. package/src/pages/PromptPipelineDashboard.jsx +6 -53
  19. package/src/providers/deepseek.js +17 -1
  20. package/src/providers/openai.js +1 -1
  21. package/src/ui/client/adapters/job-adapter.js +26 -2
  22. package/src/ui/client/hooks/useJobDetailWithUpdates.js +0 -1
  23. package/src/ui/client/index.css +6 -0
  24. package/src/ui/client/index.html +1 -1
  25. package/src/ui/client/main.jsx +2 -0
  26. package/src/ui/dist/assets/{index-CxcrauYR.js → index-WgJUlSmE.js} +716 -307
  27. package/src/ui/dist/assets/style-x0V-5m8e.css +62 -0
  28. package/src/ui/dist/index.html +3 -3
  29. package/src/ui/job-reader.js +0 -108
  30. package/src/ui/server.js +54 -0
  31. package/src/ui/sse-enhancer.js +0 -1
  32. package/src/ui/transformers/list-transformer.js +32 -12
  33. package/src/ui/transformers/status-transformer.js +11 -11
  34. package/src/utils/token-cost-calculator.js +297 -0
  35. package/src/utils/ui.jsx +4 -4
  36. package/src/ui/dist/assets/style-D6K_oQ12.css +0 -62
@@ -182,39 +182,171 @@ export const defaultConfig = {
182
182
  llmRequestTimeout: 60000,
183
183
  },
184
184
  llm: {
185
- defaultProvider: "openai",
186
- defaultModel: "gpt-5-chat-latest",
185
+ defaultProvider: "deepseek",
186
+ defaultModel: "chat",
187
187
  maxConcurrency: 5,
188
188
  retryMaxAttempts: 3,
189
189
  retryBackoffMs: 1000,
190
190
  models: {
191
+ // DeepSeek (2025)
192
+ "deepseek:chat": {
193
+ provider: "deepseek",
194
+ model: "deepseek-chat", // V3.2 Exp (non-thinking) under the hood
195
+ tokenCostInPerMillion: 0.27,
196
+ tokenCostOutPerMillion: 1.1,
197
+ },
198
+ "deepseek:reasoner": {
199
+ provider: "deepseek",
200
+ model: "deepseek-reasoner", // R1 family
201
+ tokenCostInPerMillion: 0.55,
202
+ tokenCostOutPerMillion: 2.19,
203
+ },
204
+
205
+ // — OpenAI (2024 legacy still callable) —
191
206
  "openai:gpt-4": {
192
207
  provider: "openai",
193
208
  model: "gpt-4",
209
+ tokenCostInPerMillion: 30.0,
210
+ tokenCostOutPerMillion: 60.0,
194
211
  },
195
212
  "openai:gpt-4-turbo": {
196
213
  provider: "openai",
197
214
  model: "gpt-4-turbo",
215
+ tokenCostInPerMillion: 10.0,
216
+ tokenCostOutPerMillion: 30.0,
198
217
  },
218
+
219
+ // — OpenAI (2025) —
199
220
  "openai:gpt-5": {
200
221
  provider: "openai",
201
- model: "gpt-5-chat-latest",
222
+ model: "gpt-5-chat-latest", // alias tracks GPT-5 pricing
223
+ tokenCostInPerMillion: 1.25,
224
+ tokenCostOutPerMillion: 10.0,
202
225
  },
203
- "deepseek:reasoner": {
204
- provider: "deepseek",
205
- model: "deepseek-reasoner",
226
+
227
+ "openai:gpt-5-core": {
228
+ provider: "openai",
229
+ model: "gpt-5", // flagship
230
+ tokenCostInPerMillion: 1.25,
231
+ tokenCostOutPerMillion: 10.0,
206
232
  },
207
- "deepseek:chat": {
208
- provider: "deepseek",
209
- model: "deepseek-chat",
233
+ "openai:gpt-5-chat": {
234
+ provider: "openai",
235
+ model: "gpt-5-chat-latest", // Chat variant
236
+ tokenCostInPerMillion: 1.25,
237
+ tokenCostOutPerMillion: 10.0,
238
+ },
239
+ "openai:gpt-5-pro": {
240
+ provider: "openai",
241
+ model: "gpt-5-pro", // higher-compute tier
242
+ tokenCostInPerMillion: 15.0,
243
+ tokenCostOutPerMillion: 120.0,
244
+ },
245
+ "openai:gpt-5-mini": {
246
+ provider: "openai",
247
+ model: "gpt-5-mini",
248
+ tokenCostInPerMillion: 0.25,
249
+ tokenCostOutPerMillion: 2.0,
250
+ },
251
+ "openai:gpt-5-nano": {
252
+ provider: "openai",
253
+ model: "gpt-5-nano",
254
+ tokenCostInPerMillion: 0.05,
255
+ tokenCostOutPerMillion: 0.4,
256
+ },
257
+
258
+ // — Google Gemini (2025) —
259
+ "gemini:2.5-pro": {
260
+ provider: "google",
261
+ model: "gemini-2.5-pro", // ≤200k input tier shown; >200k is higher
262
+ tokenCostInPerMillion: 1.25,
263
+ tokenCostOutPerMillion: 10.0,
264
+ },
265
+ "gemini:2.5-flash": {
266
+ provider: "google",
267
+ model: "gemini-2.5-flash",
268
+ tokenCostInPerMillion: 0.3,
269
+ tokenCostOutPerMillion: 2.5,
270
+ },
271
+ "gemini:2.5-flash-lite": {
272
+ provider: "google",
273
+ model: "gemini-2.5-flash-lite",
274
+ tokenCostInPerMillion: 0.1,
275
+ tokenCostOutPerMillion: 0.4,
276
+ },
277
+ "gemini:2.5-flash-image": {
278
+ provider: "google",
279
+ model: "gemini-2.5-flash-image",
280
+ // Inputs follow 2.5 Flash text pricing; outputs are **image tokens** at $30/M (≈$0.039 per 1024² image)
281
+ tokenCostInPerMillion: 0.3,
282
+ tokenCostOutPerMillion: 30.0,
283
+ },
284
+
285
+ // — Z.ai (formerly Zhipu) —
286
+ "zai:glm-4.6": {
287
+ provider: "zai",
288
+ model: "GLM-4.6",
289
+ tokenCostInPerMillion: 0.6,
290
+ tokenCostOutPerMillion: 2.2,
291
+ },
292
+ "zai:glm-4.5": {
293
+ provider: "zai",
294
+ model: "GLM-4.5",
295
+ tokenCostInPerMillion: 0.6,
296
+ tokenCostOutPerMillion: 2.2,
297
+ },
298
+ "zai:glm-4.5-air": {
299
+ provider: "zai",
300
+ model: "GLM-4.5-Air",
301
+ tokenCostInPerMillion: 0.2,
302
+ tokenCostOutPerMillion: 1.1,
303
+ },
304
+
305
+ // — Anthropic —
306
+ // current (Claude 4.5 / 4.1)
307
+ "anthropic:sonnet-4-5": {
308
+ provider: "anthropic",
309
+ model: "claude-sonnet-4-5",
310
+ tokenCostInPerMillion: 3.0,
311
+ tokenCostOutPerMillion: 15.0,
312
+ },
313
+ "anthropic:haiku-4-5": {
314
+ provider: "anthropic",
315
+ model: "claude-haiku-4-5",
316
+ tokenCostInPerMillion: 1.0,
317
+ tokenCostOutPerMillion: 5.0,
318
+ },
319
+ "anthropic:opus-4-1": {
320
+ provider: "anthropic",
321
+ model: "claude-opus-4-1",
322
+ tokenCostInPerMillion: 15.0,
323
+ tokenCostOutPerMillion: 75.0,
324
+ },
325
+
326
+ // legacy / still available
327
+ "anthropic:sonnet-4": {
328
+ provider: "anthropic",
329
+ model: "claude-sonnet-4-0",
330
+ tokenCostInPerMillion: 3.0,
331
+ tokenCostOutPerMillion: 15.0,
332
+ },
333
+ "anthropic:sonnet-3-7": {
334
+ provider: "anthropic",
335
+ model: "claude-3-7-sonnet-20250219",
336
+ tokenCostInPerMillion: 3.0,
337
+ tokenCostOutPerMillion: 15.0,
210
338
  },
211
- "anthropic:opus": {
339
+ "anthropic:opus-4": {
212
340
  provider: "anthropic",
213
- model: "claude-3-opus",
341
+ model: "claude-opus-4-0",
342
+ tokenCostInPerMillion: 15.0,
343
+ tokenCostOutPerMillion: 75.0,
214
344
  },
215
- "anthropic:sonnet": {
345
+ "anthropic:haiku-3-5": {
216
346
  provider: "anthropic",
217
- model: "claude-3-sonnet",
347
+ model: "claude-3-5-haiku-20241022",
348
+ tokenCostInPerMillion: 0.8,
349
+ tokenCostOutPerMillion: 4.0,
218
350
  },
219
351
  },
220
352
  },
@@ -1,5 +1,6 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
+ import { writeJobStatus } from "./status-writer.js";
3
4
 
4
5
  /**
5
6
  * Creates a task-scoped file I/O interface that manages file operations
@@ -30,41 +31,25 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
30
31
  * Updates tasks-status.json with file information, ensuring de-duplication
31
32
  */
32
33
  async function updateStatusWithFiles(fileType, fileName) {
33
- try {
34
- const statusContent = await fs.readFile(statusPath, "utf8");
35
- const status = JSON.parse(statusContent);
36
-
37
- // Initialize files object if it doesn't exist
38
- if (!status.files) {
39
- status.files = { artifacts: [], logs: [], tmp: [] };
40
- }
41
-
42
- // Initialize task files if they don't exist
43
- if (!status.tasks[taskName].files) {
44
- status.tasks[taskName].files = { artifacts: [], logs: [], tmp: [] };
45
- }
46
-
47
- // Add to job-level files array (de-duped)
48
- const jobArray = status.files[fileType];
34
+ const jobDir = path.dirname(statusPath);
35
+ await writeJobStatus(jobDir, (snapshot) => {
36
+ snapshot.files ||= { artifacts: [], logs: [], tmp: [] };
37
+ snapshot.tasks ||= {};
38
+ snapshot.tasks[taskName] ||= {};
39
+ snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
40
+
41
+ const jobArray = snapshot.files[fileType];
49
42
  if (!jobArray.includes(fileName)) {
50
43
  jobArray.push(fileName);
51
44
  }
52
45
 
53
- // Add to task-level files array (de-duped)
54
- const taskArray = status.tasks[taskName].files[fileType];
46
+ const taskArray = snapshot.tasks[taskName].files[fileType];
55
47
  if (!taskArray.includes(fileName)) {
56
48
  taskArray.push(fileName);
57
49
  }
58
50
 
59
- // Write back to file atomically
60
- await atomicWrite(statusPath, JSON.stringify(status, null, 2));
61
- } catch (error) {
62
- // If status file doesn't exist or is invalid, we'll log but not fail
63
- console.warn(
64
- `Failed to update status with file ${fileName}:`,
65
- error.message
66
- );
67
- }
51
+ return snapshot;
52
+ });
68
53
  }
69
54
 
70
55
  /**
@@ -220,97 +220,111 @@ export async function startOrchestrator(opts) {
220
220
  * @param {Object} seed - Seed data containing pipeline information
221
221
  */
222
222
  function spawnRunner(jobId, dirs, running, spawn, testMode, seed) {
223
- const runnerPath = path.join(
224
- process.cwd(),
225
- "src",
226
- "core",
227
- "pipeline-runner.js"
228
- );
229
-
230
- const configSnapshot = getConfig();
231
- const availablePipelines = Object.keys(configSnapshot?.pipelines ?? {});
232
- const pipelineSlug = seed?.pipeline;
233
-
234
- console.log("[Orchestrator] spawnRunner invoked", {
235
- jobId,
236
- pipelineSlug: pipelineSlug ?? null,
237
- availablePipelines,
238
- seedKeys: seed ? Object.keys(seed) : null,
239
- });
223
+ // Use path relative to this file to avoid process.cwd() issues
224
+ const orchestratorDir = path.dirname(new URL(import.meta.url).pathname);
225
+ const runnerPath = path.join(orchestratorDir, "pipeline-runner.js");
240
226
 
241
- if (!availablePipelines.length) {
242
- console.warn(
243
- "[Orchestrator] No pipelines registered in config() when spawnRunner invoked"
244
- );
245
- } else if (!availablePipelines.includes(pipelineSlug)) {
246
- console.warn(
247
- "[Orchestrator] Requested pipeline slug missing from registry snapshot",
248
- {
249
- jobId,
250
- pipelineSlug,
251
- availablePipelines,
252
- }
253
- );
254
- }
227
+ // Set PO_ROOT for the orchestrator process to match what the runner will use
228
+ const originalPoRoot = process.env.PO_ROOT;
229
+ const poRoot = path.resolve(dirs.dataDir, "..");
230
+ process.env.PO_ROOT = poRoot;
255
231
 
256
- if (!pipelineSlug) {
257
- console.error("[Orchestrator] Missing pipeline slug in seed", {
232
+ try {
233
+ const configSnapshot = getConfig();
234
+ const availablePipelines = Object.keys(configSnapshot?.pipelines ?? {});
235
+ const pipelineSlug = seed?.pipeline;
236
+
237
+ console.log("[Orchestrator] spawnRunner invoked", {
258
238
  jobId,
259
- seed,
239
+ pipelineSlug: pipelineSlug ?? null,
260
240
  availablePipelines,
241
+ seedKeys: seed ? Object.keys(seed) : null,
261
242
  });
262
- throw new Error(
263
- "Pipeline slug is required in seed data. Include a 'pipeline' field in your seed."
264
- );
265
- }
266
243
 
267
- let pipelineConfig;
268
- try {
269
- pipelineConfig = getPipelineConfig(pipelineSlug);
270
- } catch (error) {
271
- console.error("[Orchestrator] Pipeline lookup failed", {
272
- jobId,
273
- pipelineSlug,
274
- availablePipelines,
244
+ if (!availablePipelines.length) {
245
+ console.warn(
246
+ "[Orchestrator] No pipelines registered in config() when spawnRunner invoked"
247
+ );
248
+ } else if (!availablePipelines.includes(pipelineSlug)) {
249
+ console.warn(
250
+ "[Orchestrator] Requested pipeline slug missing from registry snapshot",
251
+ {
252
+ jobId,
253
+ pipelineSlug,
254
+ availablePipelines,
255
+ }
256
+ );
257
+ }
258
+
259
+ if (!pipelineSlug) {
260
+ console.error("[Orchestrator] Missing pipeline slug in seed", {
261
+ jobId,
262
+ seed,
263
+ availablePipelines,
264
+ });
265
+ throw new Error(
266
+ "Pipeline slug is required in seed data. Include a 'pipeline' field in your seed."
267
+ );
268
+ }
269
+
270
+ let pipelineConfig;
271
+ try {
272
+ pipelineConfig = getPipelineConfig(pipelineSlug);
273
+ } catch (error) {
274
+ console.error("[Orchestrator] Pipeline lookup failed", {
275
+ jobId,
276
+ pipelineSlug,
277
+ availablePipelines,
278
+ });
279
+ throw error;
280
+ }
281
+
282
+ // Use environment variables with explicit slug propagation
283
+ // PO_ROOT should point to the directory containing pipeline-config
284
+ // In our case, it's the parent of pipeline-data directory
285
+ const env = {
286
+ ...process.env,
287
+ PO_ROOT: poRoot,
288
+ PO_DATA_DIR: dirs.dataDir,
289
+ PO_PENDING_DIR: dirs.pending,
290
+ PO_CURRENT_DIR: dirs.current,
291
+ PO_COMPLETE_DIR: dirs.complete,
292
+ PO_PIPELINE_SLUG: pipelineSlug,
293
+ // Force mock provider for testing
294
+ PO_DEFAULT_PROVIDER: "mock",
295
+ };
296
+
297
+ // Always call spawn so tests can capture it
298
+ const child = spawn(process.execPath, [runnerPath, jobId], {
299
+ stdio: ["ignore", "inherit", "inherit"],
300
+ env,
301
+ cwd: process.cwd(),
275
302
  });
276
- throw error;
277
- }
278
303
 
279
- // Use environment variables with explicit slug propagation
280
- const env = {
281
- ...process.env,
282
- PO_DATA_DIR: dirs.dataDir,
283
- PO_PENDING_DIR: dirs.pending,
284
- PO_CURRENT_DIR: dirs.current,
285
- PO_COMPLETE_DIR: dirs.complete,
286
- PO_PIPELINE_SLUG: pipelineSlug,
287
- // Force mock provider for testing
288
- PO_DEFAULT_PROVIDER: "mock",
289
- };
290
-
291
- // Always call spawn so tests can capture it
292
- const child = spawn(process.execPath, [runnerPath, jobId], {
293
- stdio: ["ignore", "inherit", "inherit"],
294
- env,
295
- cwd: process.cwd(),
296
- });
304
+ running.set(jobId, child);
297
305
 
298
- running.set(jobId, child);
306
+ child.on("exit", () => {
307
+ running.delete(jobId);
308
+ });
309
+ child.on("error", () => {
310
+ running.delete(jobId);
311
+ });
299
312
 
300
- child.on("exit", () => {
301
- running.delete(jobId);
302
- });
303
- child.on("error", () => {
304
- running.delete(jobId);
305
- });
313
+ // In test mode: return immediately; in real mode you might await readiness
314
+ if (testMode) {
315
+ return child;
316
+ }
306
317
 
307
- // In test mode: return immediately; in real mode you might await readiness
308
- if (testMode) {
318
+ // Non-test: we can consider "started" immediately for simplicity
309
319
  return child;
320
+ } finally {
321
+ // Restore original PO_ROOT
322
+ if (originalPoRoot) {
323
+ process.env.PO_ROOT = originalPoRoot;
324
+ } else {
325
+ delete process.env.PO_ROOT;
326
+ }
310
327
  }
311
-
312
- // Non-test: we can consider "started" immediately for simplicity
313
- return child;
314
328
  }
315
329
 
316
330
  export default { startOrchestrator };
@@ -4,6 +4,7 @@ import { runPipeline } from "./task-runner.js";
4
4
  import { loadFreshModule } from "./module-loader.js";
5
5
  import { validatePipelineOrThrow } from "./validation.js";
6
6
  import { getPipelineConfig } from "./config.js";
7
+ import { writeJobStatus } from "./status-writer.js";
7
8
 
8
9
  const ROOT = process.env.PO_ROOT || process.cwd();
9
10
  const DATA_DIR = path.join(ROOT, process.env.PO_DATA_DIR || "pipeline-data");
@@ -205,12 +206,18 @@ function now() {
205
206
  }
206
207
 
207
208
  async function updateStatus(taskName, patch) {
208
- const current = JSON.parse(await fs.readFile(tasksStatusPath, "utf8"));
209
- current.current = taskName;
210
- current.tasks = current.tasks || {};
211
- current.tasks[taskName] = { ...(current.tasks[taskName] || {}), ...patch };
212
- await atomicWrite(tasksStatusPath, JSON.stringify(current, null, 2));
213
- Object.assign(status, current);
209
+ return await writeJobStatus(workDir, (snapshot) => {
210
+ snapshot.current = taskName;
211
+ snapshot.tasks = snapshot.tasks || {};
212
+ snapshot.tasks[taskName] = {
213
+ ...(snapshot.tasks[taskName] || {}),
214
+ ...patch,
215
+ };
216
+ return snapshot;
217
+ }).then((snap) => {
218
+ Object.assign(status, snap);
219
+ return snap;
220
+ });
214
221
  }
215
222
 
216
223
  async function appendLine(file, line) {
@@ -16,6 +16,9 @@ async function getSSERegistry() {
16
16
  return sseRegistry;
17
17
  }
18
18
 
19
+ // Per-job write queues to serialize writes to tasks-status.json
20
+ const writeQueues = new Map(); // Map<string jobDir, Promise<any>>
21
+
19
22
  // Instrumentation helper for status writer
20
23
  const createStatusWriterLogger = (jobId) => {
21
24
  const prefix = `[StatusWriter:${jobId || "unknown"}]`;
@@ -193,66 +196,74 @@ export async function writeJobStatus(jobDir, updateFn) {
193
196
  const jobId = path.basename(jobDir);
194
197
  const logger = createStatusWriterLogger(jobId);
195
198
 
196
- logger.group("Status Write Operation");
197
- logger.log(`Updating status for job: ${jobId}`);
198
- logger.log(`Status file path: ${statusPath}`);
199
+ // Get or create the write queue for this job directory
200
+ const prev = writeQueues.get(jobDir) || Promise.resolve();
201
+ let resultSnapshot;
199
202
 
200
- // Read existing status or create default
201
- let snapshot = await readStatusFile(statusPath, jobId);
202
- logger.log("Current status snapshot:", snapshot);
203
+ const next = prev
204
+ .then(async () => {
205
+ logger.group("Status Write Operation");
206
+ logger.log(`Updating status for job: ${jobId}`);
207
+ logger.log(`Status file path: ${statusPath}`);
203
208
 
204
- // Validate basic structure
205
- snapshot = validateStatusSnapshot(snapshot);
209
+ // Read existing status or create default
210
+ const current = await readStatusFile(statusPath, jobId);
211
+ logger.log("Current status snapshot:", current);
206
212
 
207
- // Apply user updates
208
- try {
209
- const result = updateFn(snapshot);
210
- // If updateFn returns a value, use it as new snapshot
211
- if (result !== undefined) {
212
- snapshot = result;
213
- }
214
- logger.log("Status after update function:", snapshot);
215
- } catch (error) {
216
- logger.error("Update function failed:", error);
217
- throw new Error(`Update function failed: ${error.message}`);
218
- }
213
+ // Validate basic structure
214
+ const validated = validateStatusSnapshot(current);
219
215
 
220
- // Validate final structure
221
- snapshot = validateStatusSnapshot(snapshot);
216
+ // Apply user updates
217
+ const maybeUpdated = updateFn(validated);
218
+ const snapshot = validateStatusSnapshot(
219
+ maybeUpdated === undefined ? validated : maybeUpdated
220
+ );
222
221
 
223
- // Update timestamp
224
- snapshot.lastUpdated = new Date().toISOString();
222
+ snapshot.lastUpdated = new Date().toISOString();
223
+ logger.log("Status after update function:", snapshot);
224
+
225
+ // Atomic write
226
+ await atomicWrite(statusPath, snapshot);
227
+ logger.log("Status file written successfully");
228
+
229
+ // Emit SSE event for tasks-status.json change
230
+ const registry = (await getSSERegistry().catch(() => null)) || null;
231
+ if (registry) {
232
+ try {
233
+ const eventData = {
234
+ type: "state:change",
235
+ data: {
236
+ path: path.join(jobDir, "tasks-status.json"),
237
+ id: jobId,
238
+ jobId,
239
+ },
240
+ };
241
+ registry.broadcast(eventData);
242
+ logger.sse("state:change", eventData.data);
243
+ logger.log("SSE event broadcasted successfully");
244
+ } catch (error) {
245
+ // Don't fail the write if SSE emission fails
246
+ logger.error("Failed to emit SSE event:", error);
247
+ console.warn(`Failed to emit SSE event: ${error.message}`);
248
+ }
249
+ } else {
250
+ logger.warn("SSE registry not available - no event broadcasted");
251
+ }
225
252
 
226
- // Atomic write
227
- await atomicWrite(statusPath, snapshot);
228
- logger.log("Status file written successfully");
253
+ logger.groupEnd();
254
+ resultSnapshot = snapshot;
255
+ })
256
+ .catch((e) => {
257
+ throw e;
258
+ });
229
259
 
230
- // Emit SSE event for tasks-status.json change
231
- const registry = await getSSERegistry();
232
- if (registry) {
233
- try {
234
- const eventData = {
235
- type: "state:change",
236
- data: {
237
- path: path.join(jobDir, "tasks-status.json"),
238
- id: jobId,
239
- jobId,
240
- },
241
- };
242
- registry.broadcast(eventData);
243
- logger.sse("state:change", eventData.data);
244
- logger.log("SSE event broadcasted successfully");
245
- } catch (error) {
246
- // Don't fail the write if SSE emission fails
247
- logger.error("Failed to emit SSE event:", error);
248
- console.warn(`Failed to emit SSE event: ${error.message}`);
249
- }
250
- } else {
251
- logger.warn("SSE registry not available - no event broadcasted");
252
- }
260
+ // Store the promise chain and set up cleanup
261
+ writeQueues.set(
262
+ jobDir,
263
+ next.finally(() => {})
264
+ );
253
265
 
254
- logger.groupEnd();
255
- return snapshot;
266
+ return next.then(() => resultSnapshot);
256
267
  }
257
268
 
258
269
  /**