@exaudeus/workrail 3.59.3 → 3.59.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -38,7 +38,18 @@ async function runReviewAndVerdictCycle(deps, opts, prUrl, coordinatorStartMs, i
38
38
  escalationReason: { phase: 'review', reason: `review session ${outcome}` },
39
39
  };
40
40
  }
41
- const agentResult = await deps.getAgentResult(reviewHandle);
41
+ let agentResult;
42
+ try {
43
+ agentResult = await deps.getAgentResult(reviewHandle);
44
+ }
45
+ catch (e) {
46
+ const msg = e instanceof Error ? e.message : String(e);
47
+ deps.stderr(`[coordinator] getAgentResult failed: ${msg}`);
48
+ return {
49
+ kind: 'escalated',
50
+ escalationReason: { phase: 'review', reason: `getAgentResult threw: ${msg}` },
51
+ };
52
+ }
42
53
  const verdictFromArtifact = (0, pr_review_js_1.readVerdictArtifact)(agentResult.artifacts, reviewHandle);
43
54
  const findingsResult = verdictFromArtifact !== null
44
55
  ? { kind: 'ok', value: verdictFromArtifact }
@@ -59,7 +70,12 @@ async function runReviewAndVerdictCycle(deps, opts, prUrl, coordinatorStartMs, i
59
70
  case 'minor': {
60
71
  if (iteration >= exports.MAX_FIX_ITERATIONS) {
61
72
  deps.stderr(`[review-cycle] ${exports.MAX_FIX_ITERATIONS} fix iterations exhausted -- escalating`);
62
- await deps.postToOutbox(`Adaptive pipeline escalated: fix loop exhausted after ${exports.MAX_FIX_ITERATIONS} iterations`, { prUrl, phase: 'fix-loop', reason: 'max iterations reached', findingSummaries: findings.findingSummaries });
73
+ try {
74
+ await deps.postToOutbox(`Adaptive pipeline escalated: fix loop exhausted after ${exports.MAX_FIX_ITERATIONS} iterations`, { prUrl, phase: 'fix-loop', reason: 'max iterations reached', findingSummaries: findings.findingSummaries });
75
+ }
76
+ catch (e) {
77
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
78
+ }
63
79
  return {
64
80
  kind: 'escalated',
65
81
  escalationReason: { phase: 'fix-loop', reason: `${exports.MAX_FIX_ITERATIONS} fix iterations exhausted` },
@@ -112,7 +128,12 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
112
128
  : 'production-readiness-audit';
113
129
  const auditSpawnResult = await deps.spawnSession(auditWorkflow, `Audit PR before merge: ${prUrl}`, opts.workspace, { prUrl, severity });
114
130
  if (auditSpawnResult.kind === 'err') {
115
- await deps.postToOutbox(`Adaptive pipeline escalated: audit workflow failed to spawn`, { prUrl, phase: 'audit', reason: auditSpawnResult.error, severity });
131
+ try {
132
+ await deps.postToOutbox(`Adaptive pipeline escalated: audit workflow failed to spawn`, { prUrl, phase: 'audit', reason: auditSpawnResult.error, severity });
133
+ }
134
+ catch (e) {
135
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
136
+ }
116
137
  return {
117
138
  kind: 'escalated',
118
139
  escalationReason: { phase: 'audit', reason: `audit spawn failed: ${auditSpawnResult.error}` },
@@ -120,7 +141,12 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
120
141
  }
121
142
  const auditHandle = auditSpawnResult.value;
122
143
  if (!auditHandle) {
123
- await deps.postToOutbox(`Adaptive pipeline escalated: audit returned empty handle`, { prUrl, phase: 'audit', severity });
144
+ try {
145
+ await deps.postToOutbox(`Adaptive pipeline escalated: audit returned empty handle`, { prUrl, phase: 'audit', severity });
146
+ }
147
+ catch (e) {
148
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
149
+ }
124
150
  return {
125
151
  kind: 'escalated',
126
152
  escalationReason: { phase: 'audit', reason: 'audit returned empty handle' },
@@ -130,7 +156,12 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
130
156
  const auditResult = auditAwait.results[0];
131
157
  if (!auditResult || auditResult.outcome !== 'success') {
132
158
  const outcome = auditResult?.outcome ?? 'not_found';
133
- await deps.postToOutbox(`Adaptive pipeline escalated: audit session ${outcome}`, { prUrl, phase: 'audit', auditOutcome: outcome, severity });
159
+ try {
160
+ await deps.postToOutbox(`Adaptive pipeline escalated: audit session ${outcome}`, { prUrl, phase: 'audit', auditOutcome: outcome, severity });
161
+ }
162
+ catch (e) {
163
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
164
+ }
134
165
  return {
135
166
  kind: 'escalated',
136
167
  escalationReason: { phase: 'audit', reason: `audit session ${outcome}` },
@@ -142,7 +173,12 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
142
173
  return reReviewCutoff;
143
174
  const reReviewSpawnResult = await deps.spawnSession('mr-review-workflow-agentic', `Re-review after audit: ${prUrl}`, opts.workspace, { prUrl, auditComplete: true });
144
175
  if (reReviewSpawnResult.kind === 'err') {
145
- await deps.postToOutbox(`Adaptive pipeline escalated: re-review after audit failed to spawn`, { prUrl, phase: 're-review-after-audit', reason: reReviewSpawnResult.error });
176
+ try {
177
+ await deps.postToOutbox(`Adaptive pipeline escalated: re-review after audit failed to spawn`, { prUrl, phase: 're-review-after-audit', reason: reReviewSpawnResult.error });
178
+ }
179
+ catch (e) {
180
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
181
+ }
146
182
  return {
147
183
  kind: 'escalated',
148
184
  escalationReason: {
@@ -153,7 +189,12 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
153
189
  }
154
190
  const reReviewHandle = reReviewSpawnResult.value;
155
191
  if (!reReviewHandle) {
156
- await deps.postToOutbox(`Adaptive pipeline escalated: re-review after audit returned empty handle`, { prUrl, phase: 're-review-after-audit' });
192
+ try {
193
+ await deps.postToOutbox(`Adaptive pipeline escalated: re-review after audit returned empty handle`, { prUrl, phase: 're-review-after-audit' });
194
+ }
195
+ catch (e) {
196
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
197
+ }
157
198
  return {
158
199
  kind: 'escalated',
159
200
  escalationReason: { phase: 're-review-after-audit', reason: 're-review returned empty handle' },
@@ -163,19 +204,40 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
163
204
  const reReviewResult = reReviewAwait.results[0];
164
205
  if (!reReviewResult || reReviewResult.outcome !== 'success') {
165
206
  const outcome = reReviewResult?.outcome ?? 'not_found';
166
- await deps.postToOutbox(`Adaptive pipeline escalated: re-review after audit session ${outcome}`, { prUrl, phase: 're-review-after-audit', reReviewOutcome: outcome });
207
+ try {
208
+ await deps.postToOutbox(`Adaptive pipeline escalated: re-review after audit session ${outcome}`, { prUrl, phase: 're-review-after-audit', reReviewOutcome: outcome });
209
+ }
210
+ catch (e) {
211
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
212
+ }
167
213
  return {
168
214
  kind: 'escalated',
169
215
  escalationReason: { phase: 're-review-after-audit', reason: `re-review session ${outcome}` },
170
216
  };
171
217
  }
172
- const reAgentResult = await deps.getAgentResult(reReviewHandle);
218
+ let reAgentResult;
219
+ try {
220
+ reAgentResult = await deps.getAgentResult(reReviewHandle);
221
+ }
222
+ catch (e) {
223
+ const msg = e instanceof Error ? e.message : String(e);
224
+ deps.stderr(`[coordinator] getAgentResult failed: ${msg}`);
225
+ return {
226
+ kind: 'escalated',
227
+ escalationReason: { phase: 'review', reason: `getAgentResult threw: ${msg}` },
228
+ };
229
+ }
173
230
  const reVerdictFromArtifact = (0, pr_review_js_1.readVerdictArtifact)(reAgentResult.artifacts, reReviewHandle);
174
231
  const reFindingsResult = reVerdictFromArtifact !== null
175
232
  ? { kind: 'ok', value: reVerdictFromArtifact }
176
233
  : (0, pr_review_js_1.parseFindingsFromNotes)(reAgentResult.recapMarkdown);
177
234
  if (reFindingsResult.kind === 'err') {
178
- await deps.postToOutbox(`Adaptive pipeline escalated: re-review verdict unparseable after audit`, { prUrl, phase: 're-review-after-audit' });
235
+ try {
236
+ await deps.postToOutbox(`Adaptive pipeline escalated: re-review verdict unparseable after audit`, { prUrl, phase: 're-review-after-audit' });
237
+ }
238
+ catch (e) {
239
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
240
+ }
179
241
  return {
180
242
  kind: 'escalated',
181
243
  escalationReason: { phase: 're-review-after-audit', reason: `re-review verdict parse failed` },
@@ -188,13 +250,18 @@ async function runAuditChain(deps, opts, prUrl, coordinatorStartMs, severity, fi
188
250
  return { kind: 'merged', prUrl };
189
251
  }
190
252
  deps.stderr(`[audit-chain] Post-audit verdict still ${reFindings.severity} -- escalating to Human Outbox`);
191
- await deps.postToOutbox(`PR requires human review: still ${reFindings.severity} after production-readiness audit`, {
192
- prUrl,
193
- phase: 'audit-chain-complete',
194
- severity: reFindings.severity,
195
- findingSummaries: reFindings.findingSummaries,
196
- note: 'Do NOT auto-merge. Human review required.',
197
- });
253
+ try {
254
+ await deps.postToOutbox(`PR requires human review: still ${reFindings.severity} after production-readiness audit`, {
255
+ prUrl,
256
+ phase: 'audit-chain-complete',
257
+ severity: reFindings.severity,
258
+ findingSummaries: reFindings.findingSummaries,
259
+ note: 'Do NOT auto-merge. Human review required.',
260
+ });
261
+ }
262
+ catch (e) {
263
+ deps.stderr(`[WARN coordinator] postToOutbox failed: ${e instanceof Error ? e.message : String(e)}`);
264
+ }
198
265
  return {
199
266
  kind: 'escalated',
200
267
  escalationReason: {
@@ -50,6 +50,12 @@ async function runImplementCore(deps, opts, pitchPath, coordinatorStartMs) {
50
50
  };
51
51
  }
52
52
  const uxHandle = uxSpawnResult.value;
53
+ if (!uxHandle || uxHandle.trim() === '') {
54
+ return {
55
+ kind: 'escalated',
56
+ escalationReason: { phase: 'ux-gate', reason: 'UX design session returned empty handle' },
57
+ };
58
+ }
53
59
  const uxAwait = await deps.awaitSessions([uxHandle], adaptive_pipeline_js_1.REVIEW_TIMEOUT_MS);
54
60
  const uxResult = uxAwait.results[0];
55
61
  if (!uxResult || uxResult.outcome !== 'success') {
@@ -93,7 +99,18 @@ async function runImplementCore(deps, opts, pitchPath, coordinatorStartMs) {
93
99
  deps.stderr(`[implement] Coding session completed (${Math.round((codingResult.durationMs ?? 0) / 1000)}s)`);
94
100
  const branchPattern = `worktrain/${codingHandle.slice(0, 16)}`;
95
101
  deps.stderr(`[implement] Polling for PR on branch pattern: ${branchPattern}`);
96
- const prUrl = await deps.pollForPR(branchPattern, PR_POLL_TIMEOUT_MS);
102
+ let prUrl;
103
+ try {
104
+ prUrl = await deps.pollForPR(branchPattern, PR_POLL_TIMEOUT_MS);
105
+ }
106
+ catch (e) {
107
+ const msg = e instanceof Error ? e.message : String(e);
108
+ deps.stderr(`[coordinator] pollForPR threw: ${msg}`);
109
+ return {
110
+ kind: 'escalated',
111
+ escalationReason: { phase: 'pr-detection', reason: `pollForPR threw: ${msg}` },
112
+ };
113
+ }
97
114
  if (!prUrl) {
98
115
  return {
99
116
  kind: 'escalated',
@@ -49,7 +49,7 @@ export interface CoordinatorDeps {
49
49
  readonly writeFile: (path: string, content: string) => Promise<void>;
50
50
  readonly stderr: (line: string) => void;
51
51
  readonly now: () => number;
52
- readonly port: number;
52
+ readonly port?: number;
53
53
  readonly readFile: (path: string) => Promise<string>;
54
54
  readonly appendFile: (path: string, content: string) => Promise<void>;
55
55
  readonly mkdir: (path: string, options: {
@@ -481,8 +481,8 @@
481
481
  "sha256": "5fe866e54f796975dec5d8ba9983aefd86074db212d3fccd64eed04bc9f0b3da",
482
482
  "bytes": 8011
483
483
  },
484
- "console-ui/assets/index-C8iMtnPv.js": {
485
- "sha256": "c707ddf823fe337a5ac6eed815d33ba7c0bccd964b84ecc7a1afee7636e24cd5",
484
+ "console-ui/assets/index-BuMfiLrV.js": {
485
+ "sha256": "e7d0e3f4ded8e370e8c34846ffce1404d28d0eb2613bf89b10cfc22a678ea6cf",
486
486
  "bytes": 760528
487
487
  },
488
488
  "console-ui/assets/index-DGj8EsFR.css": {
@@ -490,7 +490,7 @@
490
490
  "bytes": 60631
491
491
  },
492
492
  "console-ui/index.html": {
493
- "sha256": "c16cbc642df52ea7ddb8cf78e3e196dcfb8ee4798089e1686970317747ad5dec",
493
+ "sha256": "314e6af46d83e7e0daa7a3efb2a998ac981f7f2ff58bae090ddcbd26b477c855",
494
494
  "bytes": 417
495
495
  },
496
496
  "console/standalone-console.d.ts": {
@@ -546,24 +546,24 @@
546
546
  "bytes": 462
547
547
  },
548
548
  "coordinators/modes/full-pipeline.js": {
549
- "sha256": "762f4d637389e77cc71a31fc54034061edbb8a30a0332c0b60ecf560b7d6743a",
550
- "bytes": 11782
549
+ "sha256": "a03cf485201d23b0ddf75ca36ea10741bb9d0373479e7df3350401653229ef8b",
550
+ "bytes": 12850
551
551
  },
552
552
  "coordinators/modes/implement-shared.d.ts": {
553
553
  "sha256": "fbad9d91d84d2112b273175618686489a7f106385e0e62d6cab80804d6d0f2d7",
554
554
  "bytes": 708
555
555
  },
556
556
  "coordinators/modes/implement-shared.js": {
557
- "sha256": "a1727713839630d279377e9607b26068821a5a393bfbd42696222fb97ec4fe5f",
558
- "bytes": 11400
557
+ "sha256": "117eec98c38826e62150e4ca1ece6ac1cad91b2d0d7128dc34acb609151e4619",
558
+ "bytes": 13672
559
559
  },
560
560
  "coordinators/modes/implement.d.ts": {
561
561
  "sha256": "23919c24d62a0bf15296a52fbc594cca8b1b34e6f8d98dcf7dede8d97ad4cabb",
562
562
  "bytes": 347
563
563
  },
564
564
  "coordinators/modes/implement.js": {
565
- "sha256": "fa668b93b643ad3448844754e7e27ba33ca7324f6948e7414f04b69a8e3c5fba",
566
- "bytes": 4955
565
+ "sha256": "e05ad7330c00db7b2ad8baf27e224616eae1b5cce460a0394a80633fca8827bf",
566
+ "bytes": 5522
567
567
  },
568
568
  "coordinators/modes/quick-review.d.ts": {
569
569
  "sha256": "03a4f29a07047b0bf788d84f8e0ebab63d64c8eb98aa57087943a8fb84563998",
@@ -582,8 +582,8 @@
582
582
  "bytes": 1198
583
583
  },
584
584
  "coordinators/pr-review.d.ts": {
585
- "sha256": "8f6794d60ecabaf6898199120620fcd5aac932b64654636bc8d308103c987e57",
586
- "bytes": 3832
585
+ "sha256": "a8886a3c83a31e869522812d1342a301e9bfae92d8e5e694594c3c50912035d9",
586
+ "bytes": 3833
587
587
  },
588
588
  "coordinators/pr-review.js": {
589
589
  "sha256": "84b51f931eb55d908de8c60f90b4d4b66540054791a28ce2f07426a841fed386",
@@ -1730,8 +1730,8 @@
1730
1730
  "bytes": 1740
1731
1731
  },
1732
1732
  "trigger/trigger-listener.js": {
1733
- "sha256": "ac38c1cac9f8c3f16a3e3e2104b37082a2315a79ea1a419211064a92faaf72e3",
1734
- "bytes": 25084
1733
+ "sha256": "09b8bbcda1825a9314dc29ac7435ef703fb0cdad13fa54ffe45f68767f22fbc7",
1734
+ "bytes": 25095
1735
1735
  },
1736
1736
  "trigger/trigger-router.d.ts": {
1737
1737
  "sha256": "b916f33cab64d491ab04bd13dd37599d33e687f7aea1e69e50f5fcea4b3b4624",
@@ -201,8 +201,21 @@ async function startTriggerListener(ctx, options) {
201
201
  ? new notification_service_js_1.NotificationService({ macOs: notifyMacOs, webhookUrl: notifyWebhook })
202
202
  : undefined;
203
203
  const steerRegistry = new Map();
204
- const DAEMON_CONSOLE_PORT = 3456;
205
204
  const execFileAsync = (0, node_util_1.promisify)(node_child_process_1.execFile);
205
+ const { ConsoleService } = await Promise.resolve().then(() => __importStar(require('../v2/usecases/console-service.js')));
206
+ let consoleService = null;
207
+ if (!ctx.v2?.dataDir || !ctx.v2?.directoryListing) {
208
+ process.stderr.write('[CRITICAL trigger-listener:reason=consoleService_unavailable] ctx.v2.dataDir or ctx.v2.directoryListing not available -- awaitSessions and getAgentResult will degrade to all-failed / empty results\n');
209
+ }
210
+ else {
211
+ consoleService = new ConsoleService({
212
+ directoryListing: ctx.v2.directoryListing,
213
+ dataDir: ctx.v2.dataDir,
214
+ sessionStore: ctx.v2.sessionStore,
215
+ snapshotStore: ctx.v2.snapshotStore,
216
+ pinnedWorkflowStore: ctx.v2.pinnedStore,
217
+ });
218
+ }
206
219
  let routerRef;
207
220
  const coordinatorDeps = {
208
221
  spawnSession: async (workflowId, goal, workspace, context) => {
@@ -256,98 +269,97 @@ async function startTriggerListener(ctx, options) {
256
269
  nowIso: () => new Date().toISOString(),
257
270
  }),
258
271
  awaitSessions: async (handles, timeoutMs) => {
259
- const { executeWorktrainAwaitCommand } = await Promise.resolve().then(() => __importStar(require('../cli/commands/worktrain-await.js')));
260
- let resolvedResult = null;
261
- await executeWorktrainAwaitCommand({
262
- fetch: (url) => globalThis.fetch(url),
263
- readFile: (p) => fs.promises.readFile(p, 'utf-8'),
264
- stdout: (line) => {
272
+ const POLL_INTERVAL_MS = 3000;
273
+ if (consoleService === null) {
274
+ process.stderr.write(`[WARN coord:reason=await_degraded] awaitSessions: ConsoleService unavailable -- returning all ${handles.length} session(s) as failed.\n`);
275
+ return {
276
+ results: [...handles].map((h) => ({
277
+ handle: h,
278
+ outcome: 'failed',
279
+ status: null,
280
+ durationMs: 0,
281
+ })),
282
+ allSucceeded: false,
283
+ };
284
+ }
285
+ const startMs = Date.now();
286
+ const pending = new Set(handles);
287
+ const results = new Map();
288
+ while (pending.size > 0) {
289
+ const elapsed = Date.now() - startMs;
290
+ if (elapsed >= timeoutMs) {
291
+ break;
292
+ }
293
+ for (const handle of [...pending]) {
265
294
  try {
266
- resolvedResult = JSON.parse(line);
295
+ const detail = await consoleService.getSessionDetail(handle);
296
+ if (detail.isErr()) {
297
+ continue;
298
+ }
299
+ const run = detail.value.runs[0];
300
+ if (!run)
301
+ continue;
302
+ const status = run.status;
303
+ if (status === 'complete' || status === 'complete_with_gaps') {
304
+ results.set(handle, { handle, outcome: 'success', status, durationMs: Date.now() - startMs });
305
+ pending.delete(handle);
306
+ }
307
+ else if (status === 'blocked') {
308
+ results.set(handle, { handle, outcome: 'failed', status, durationMs: Date.now() - startMs });
309
+ pending.delete(handle);
310
+ }
267
311
  }
268
- catch { }
269
- },
270
- stderr: (line) => process.stderr.write(line + '\n'),
271
- homedir: os.homedir,
272
- joinPath: path.join,
273
- sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
274
- now: () => Date.now(),
275
- }, {
276
- sessions: [...handles].join(','),
277
- mode: 'all',
278
- timeout: `${Math.round(timeoutMs / 1000)}s`,
279
- port: DAEMON_CONSOLE_PORT,
280
- });
281
- if (resolvedResult === null) {
282
- process.stderr.write(`[WARN coord:reason=await_failed] awaitSessions: could not get session results -- daemon may be unreachable or timed out. Returning all ${handles.length} session(s) as failed.\n`);
312
+ catch {
313
+ results.set(handle, { handle, outcome: 'failed', status: null, durationMs: Date.now() - startMs });
314
+ pending.delete(handle);
315
+ }
316
+ }
317
+ if (pending.size > 0) {
318
+ await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS));
319
+ }
283
320
  }
284
- return resolvedResult ?? {
285
- results: [...handles].map((h) => ({
286
- handle: h,
287
- outcome: 'failed',
288
- status: null,
289
- durationMs: 0,
290
- })),
291
- allSucceeded: false,
321
+ for (const handle of pending) {
322
+ results.set(handle, { handle, outcome: 'timeout', status: null, durationMs: timeoutMs });
323
+ }
324
+ const resultsArray = [...results.values()];
325
+ return {
326
+ results: resultsArray,
327
+ allSucceeded: resultsArray.every((r) => r.outcome === 'success'),
292
328
  };
293
329
  },
294
330
  getAgentResult: async (sessionHandle) => {
295
331
  const emptyResult = { recapMarkdown: null, artifacts: [] };
332
+ if (consoleService === null) {
333
+ return emptyResult;
334
+ }
296
335
  try {
297
- const sessionUrl = `http://127.0.0.1:${DAEMON_CONSOLE_PORT}/api/v2/sessions/${encodeURIComponent(sessionHandle)}`;
298
- const sessionRes = await globalThis.fetch(sessionUrl, { signal: AbortSignal.timeout(30000) });
299
- if (!sessionRes.ok) {
300
- process.stderr.write(`[WARN coord:reason=http_error status=${sessionRes.status} handle=${sessionHandle.slice(0, 16)}] getAgentResult: session fetch returned HTTP ${sessionRes.status}\n`);
336
+ const detailResult = await consoleService.getSessionDetail(sessionHandle);
337
+ if (detailResult.isErr())
301
338
  return emptyResult;
302
- }
303
- const sessionBody = await sessionRes.json();
304
- if (sessionBody['success'] !== true) {
305
- return emptyResult;
306
- }
307
- const data = sessionBody['data'];
308
- if (!data)
339
+ const run = detailResult.value.runs[0];
340
+ if (!run)
309
341
  return emptyResult;
310
- const runs = data['runs'];
311
- if (!Array.isArray(runs) || runs.length === 0)
312
- return emptyResult;
313
- const firstRun = runs[0];
314
- const tipNodeId = typeof firstRun['preferredTipNodeId'] === 'string'
315
- ? firstRun['preferredTipNodeId']
316
- : null;
342
+ const tipNodeId = run.preferredTipNodeId;
317
343
  if (!tipNodeId)
318
344
  return emptyResult;
319
- const allNodes = Array.isArray(firstRun['nodes'])
320
- ? firstRun['nodes']
321
- : [];
322
- const allNodeIds = allNodes
323
- .map((n) => (typeof n['nodeId'] === 'string' ? n['nodeId'] : null))
324
- .filter((id) => id !== null);
345
+ const allNodeIds = run.nodes.map((n) => n.nodeId).filter((id) => typeof id === 'string' && id !== '');
325
346
  const nodeIdsToFetch = allNodeIds.length > 0 ? allNodeIds : [tipNodeId];
326
- const baseNodeUrl = `http://127.0.0.1:${DAEMON_CONSOLE_PORT}/api/v2/sessions/${encodeURIComponent(sessionHandle)}/nodes/`;
327
347
  let recap = null;
328
348
  const collectedArtifacts = [];
329
349
  for (const nodeId of nodeIdsToFetch) {
330
350
  try {
331
- const nodeRes = await globalThis.fetch(baseNodeUrl + encodeURIComponent(nodeId), { signal: AbortSignal.timeout(30000) });
332
- if (!nodeRes.ok)
333
- continue;
334
- const nodeBody = await nodeRes.json();
335
- if (nodeBody['success'] !== true)
336
- continue;
337
- const nodeData = nodeBody['data'];
338
- if (!nodeData)
351
+ const nodeResult = await consoleService.getNodeDetail(sessionHandle, nodeId);
352
+ if (nodeResult.isErr())
339
353
  continue;
340
354
  if (nodeId === tipNodeId) {
341
- recap = typeof nodeData['recapMarkdown'] === 'string' ? nodeData['recapMarkdown'] : null;
355
+ recap = nodeResult.value.recapMarkdown;
342
356
  }
343
- const nodeArtifacts = nodeData['artifacts'];
344
- if (Array.isArray(nodeArtifacts) && nodeArtifacts.length > 0) {
345
- collectedArtifacts.push(...nodeArtifacts);
357
+ if (nodeResult.value.artifacts.length > 0) {
358
+ collectedArtifacts.push(...nodeResult.value.artifacts);
346
359
  }
347
360
  }
348
- catch (nodeErr) {
349
- const msg = nodeErr instanceof Error ? nodeErr.message : String(nodeErr);
350
- process.stderr.write(`[WARN coord:reason=node_exception handle=${sessionHandle.slice(0, 16)} node=${nodeId.slice(0, 16)}] getAgentResult: ${msg}\n`);
361
+ catch {
362
+ continue;
351
363
  }
352
364
  }
353
365
  return { recapMarkdown: recap, artifacts: collectedArtifacts };
@@ -396,7 +408,6 @@ async function startTriggerListener(ctx, options) {
396
408
  generateId: () => (0, node_crypto_1.randomUUID)(),
397
409
  stderr: (line) => process.stderr.write(line + '\n'),
398
410
  now: () => Date.now(),
399
- port: DAEMON_CONSOLE_PORT,
400
411
  fileExists: (p) => fs.existsSync(p),
401
412
  archiveFile: (src, dest) => fs.promises.rename(src, dest),
402
413
  pollForPR: async (branchPattern, timeoutMs) => {
@@ -0,0 +1,128 @@
1
+ # Design Candidates: In-Process awaitSessions and getAgentResult
2
+
3
+ **Date:** 2026-04-19
4
+ **Task:** Replace HTTP-to-self `awaitSessions` and `getAgentResult` in `src/trigger/trigger-listener.ts` with in-process `ConsoleService` calls.
5
+
6
+ ---
7
+
8
+ ## Problem Understanding
9
+
10
+ ### Tensions
11
+
12
+ 1. **Construction order vs. dependency injection**: `coordinatorDeps` must be constructed before `TriggerRouter` (it is a constructor argument), but `ConsoleService` needs to be available inside the closure. The `routerRef` forward-reference pattern already solves a similar ordering problem -- `consoleService` can be constructed before the closure and captured by the closure.
13
+
14
+ 2. **Graceful degradation vs. correctness**: If `ctx.v2.dataDir` or `ctx.v2.directoryListing` is null (the daemon-console.ts path guards against this), should `awaitSessions` degrade to returning all-failed or crash? The design doc says construct before `coordinatorDeps` -- the guard should produce a logged warning and graceful fallback since the coordinator handles `allSucceeded: false`.
15
+
16
+ 3. **Session visibility race**: Sessions created in-process by `spawnSession()` may not be immediately readable via `getSessionDetail()`. This is why `SESSION_LOAD_FAILED` must be treated as "not ready yet" (retry), not as failure.
17
+
18
+ 4. **Interface cleanliness vs. minimal scope**: `CoordinatorDeps.port` is a required field (`readonly port: number`) that is never read by coordinator logic (`grep deps.port` returns nothing). Removing `port: DAEMON_CONSOLE_PORT` from the trigger-listener deps object requires either (a) making `port` optional in the interface, or (b) using a `0` sentinel.
19
+
20
+ ### Likely Seam
21
+
22
+ The composition root `startTriggerListener()` in `src/trigger/trigger-listener.ts` is the correct and only seam. This is where all other deps are wired, `ctx.v2.*` ports are available, and `ConsoleService` can be constructed.
23
+
24
+ ### What Makes This Hard
25
+
26
+ - `CoordinatorDeps.port` is required but unused by coordinator logic. The design doc says to remove it, but the interface requires it. TypeScript will reject omitting a required field.
27
+ - The `error` terminal status mentioned in the task description does not exist in `ConsoleRunStatus` (`'in_progress' | 'complete' | 'complete_with_gaps' | 'blocked'`). The design doc is authoritative.
28
+ - The dynamic import pattern is required to avoid circular dependency (same as `daemon-console.ts:113`).
29
+
30
+ ---
31
+
32
+ ## Philosophy Constraints
33
+
34
+ From `/Users/etienneb/CLAUDE.md`:
35
+ - **Architectural fixes over patches** -- this fix IS the architectural fix (in-process instead of HTTP)
36
+ - **Immutability by default** -- `pending Set` mutation is minimal and contained in the polling loop
37
+ - **Errors are data** -- use `.isOk()` / `.isErr()` on `ResultAsync`, not try/catch
38
+ - **Validate at boundaries, trust inside** -- guard `ctx.v2.dataDir` at construction time
39
+ - **Document "why", not "what"** -- add WHY comments on new implementations
40
+
41
+ No philosophy conflicts with repo patterns -- `daemon-console.ts` already uses the exact same construction approach.
42
+
43
+ ---
44
+
45
+ ## Impact Surface
46
+
47
+ - **`AdaptiveCoordinatorDeps` interface**: No change (extends `CoordinatorDeps`, no new fields needed)
48
+ - **`CoordinatorDeps` interface** (`src/coordinators/pr-review.ts:210`): `port` is `required number` -- must be made optional if removing from trigger-listener deps
49
+ - **CLI path** (`src/cli-worktrain.ts:1549`): sets `port` in deps object for out-of-process coordinator -- remains correct either way
50
+ - **Pipeline coordinators** (`full-pipeline.ts`, `implement.ts`, `pr-review.ts`): call `awaitSessions`/`getAgentResult` by interface -- behavior change is transparent
51
+ - **`src/mcp/`**: Not touched (explicit out-of-scope)
52
+
53
+ ---
54
+
55
+ ## Candidates
56
+
57
+ ### Candidate A: Design doc implementation with `port` made optional
58
+
59
+ **Summary**: Implement exactly per design doc. Construct `ConsoleService` locally in `startTriggerListener()`. Replace `awaitSessions` and `getAgentResult` with in-process calls. Make `readonly port: number` optional (`readonly port?: number`) in `CoordinatorDeps` to allow removing it from the trigger-listener deps object.
60
+
61
+ **Tensions resolved**: All four tensions resolved cleanly. `SESSION_LOAD_FAILED` = retry. Guard for `ctx.v2` nulls. `port` field made honest (optional, unused by logic).
62
+
63
+ **Tension accepted**: Requires touching `src/coordinators/pr-review.ts` for one-char interface change.
64
+
65
+ **Boundary**: `startTriggerListener()` composition root -- correct seam.
66
+
67
+ **Why best fit**: Fully executes design doc intent. `deps.port` confirmed unused by all coordinator logic.
68
+
69
+ **Failure mode**: None identified. `grep deps.port` confirmed zero usages in coordinator logic.
70
+
71
+ **Repo pattern**: Follows `daemon-console.ts` construction pattern exactly. Follows `spawnSession` in-process migration pattern.
72
+
73
+ **Gains**: Clean interface, no dead required field, full design doc compliance, no `0` sentinel.
74
+
75
+ **Losses**: Touches `src/coordinators/pr-review.ts` (one-char change).
76
+
77
+ **Scope judgment**: Best-fit. The scope restriction says "do not touch `src/mcp/`" not "do not touch `src/coordinators/`".
78
+
79
+ **Philosophy fit**: Honors "architectural fixes over patches", "make illegal states unrepresentable" (no sentinel), "errors are data".
80
+
81
+ ---
82
+
83
+ ### Candidate B: Design doc implementation, keep `port: 0` sentinel
84
+
85
+ **Summary**: Same `ConsoleService` construction and awaitSessions/getAgentResult replacement, but set `port: 0` in the trigger-listener deps object instead of making the interface field optional.
86
+
87
+ **Tensions resolved**: Removes HTTP-to-self bugs. Zero interface changes.
88
+
89
+ **Tension accepted**: Leaves dead required field with a misleading `0` value.
90
+
91
+ **Failure mode**: Future code reads `deps.port` and uses `0` as a real port, producing silent bugs.
92
+
93
+ **Repo pattern**: Departs from design doc intent ("remove the constant and port from coordinatorDeps").
94
+
95
+ **Gains**: No interface touch; purely local change.
96
+
97
+ **Losses**: Interface stays polluted with unused required field; violates design doc intent; `0` sentinel is an illegal state that can be constructed.
98
+
99
+ **Scope judgment**: Too narrow (doesn't fully execute design doc intent).
100
+
101
+ **Philosophy fit**: Conflicts with "make illegal states unrepresentable" and "architectural fixes over patches".
102
+
103
+ ---
104
+
105
+ ## Comparison and Recommendation
106
+
107
+ **Recommendation: Candidate A.**
108
+
109
+ `deps.port` is confirmed unused by any coordinator logic (exhaustive grep). Making it optional is a one-character change that eliminates a dead field and fully executes the design doc intent. The scope restriction is explicitly "do not touch `src/mcp/`" -- `src/coordinators/pr-review.ts` is in scope. Candidate A is the correct architectural fix.
110
+
111
+ The `0` sentinel in Candidate B is precisely the kind of "patch over architectural fix" that CLAUDE.md's philosophy warns against.
112
+
113
+ ---
114
+
115
+ ## Self-Critique
116
+
117
+ **Strongest argument against Candidate A**: A conservative interpretation of "only touch trigger-listener.ts" would favor the sentinel. If the reviewer intended zero interface changes, Candidate B is the safe choice.
118
+
119
+ **Pivot condition**: If touching `pr-review.ts` causes unexpected test failures (e.g., tests construct `CoordinatorDeps` with `port` required and would need to add `port: undefined`), fall back to `port: 0` sentinel or make it optional with a default. But since `port` is already unused, this risk is low.
120
+
121
+ **Assumption that would invalidate**: If some test or code path actually reads `deps.port` and would break if `0` is used or the field is absent. The grep confirms this does not exist.
122
+
123
+ ---
124
+
125
+ ## Open Questions for Main Agent
126
+
127
+ 1. Should the guard for `ctx.v2.dataDir === undefined` cause a process.stderr warning only, or should it cause `startTriggerListener` to return an `err`? (Daemon-console.ts returns `err` -- but trigger-listener has already started by this point. Recommendation: warn + let `awaitSessions`/`getAgentResult` return degraded results.)
128
+ 2. Should the `consoleService` local variable be constructed inside a try/catch or guarded more defensively? (No -- the constructor is synchronous and cannot throw given valid inputs.)