@exaudeus/workrail 3.42.0 → 3.43.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/console-ui/assets/{index-DwfWMKvv.js → index-Sb57DW4B.js} +1 -1
- package/dist/console-ui/index.html +1 -1
- package/dist/daemon/workflow-runner.d.ts +11 -1
- package/dist/daemon/workflow-runner.js +71 -9
- package/dist/manifest.json +17 -17
- package/dist/trigger/delivery-action.d.ts +2 -0
- package/dist/trigger/delivery-action.js +24 -0
- package/dist/trigger/trigger-router.js +24 -1
- package/dist/trigger/trigger-store.js +42 -0
- package/dist/trigger/types.d.ts +3 -0
- package/docs/design/adaptive-coordinator-context-candidates.md +265 -0
- package/docs/design/adaptive-coordinator-context-review.md +101 -0
- package/docs/design/adaptive-coordinator-context.md +504 -0
- package/docs/design/adaptive-coordinator-routing-candidates.md +340 -0
- package/docs/design/adaptive-coordinator-routing-design-review.md +135 -0
- package/docs/design/adaptive-coordinator-routing-review.md +156 -0
- package/docs/design/adaptive-coordinator-routing.md +660 -0
- package/docs/design/context-assembly-layer-design-review.md +110 -0
- package/docs/design/context-assembly-layer.md +622 -0
- package/docs/design/stuck-escalation-candidates.md +176 -0
- package/docs/design/stuck-escalation-design-review.md +70 -0
- package/docs/design/stuck-escalation.md +326 -0
- package/docs/design/worktrain-task-queue-candidates.md +252 -0
- package/docs/design/worktrain-task-queue-design-review.md +109 -0
- package/docs/design/worktrain-task-queue.md +443 -0
- package/docs/design/worktree-review-findings-candidates.md +101 -0
- package/docs/design/worktree-review-findings-design-review.md +65 -0
- package/docs/design/worktree-review-findings-implementation-plan.md +153 -0
- package/docs/ideas/backlog.md +148 -0
- package/package.json +3 -3
|
@@ -36,7 +36,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
36
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
37
|
};
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
-
exports.DAEMON_SIGNALS_DIR = exports.DAEMON_SOUL_TEMPLATE = exports.DAEMON_SOUL_DEFAULT = exports.DAEMON_SESSIONS_DIR = void 0;
|
|
39
|
+
exports.DAEMON_SIGNALS_DIR = exports.DAEMON_SOUL_TEMPLATE = exports.DAEMON_SOUL_DEFAULT = exports.WORKTREES_DIR = exports.DAEMON_SESSIONS_DIR = void 0;
|
|
40
40
|
exports.readDaemonSessionState = readDaemonSessionState;
|
|
41
41
|
exports.readAllDaemonSessions = readAllDaemonSessions;
|
|
42
42
|
exports.runStartupRecovery = runStartupRecovery;
|
|
@@ -83,7 +83,9 @@ function withWorkrailSession(sid) {
|
|
|
83
83
|
}
|
|
84
84
|
exports.DAEMON_SESSIONS_DIR = path.join(os.homedir(), '.workrail', 'daemon-sessions');
|
|
85
85
|
const MAX_ORPHAN_AGE_MS = 2 * 60 * 60 * 1000;
|
|
86
|
+
const MAX_WORKTREE_ORPHAN_AGE_MS = 24 * 60 * 60 * 1000;
|
|
86
87
|
const WORKRAIL_DIR = path.join(os.homedir(), '.workrail');
|
|
88
|
+
exports.WORKTREES_DIR = path.join(os.homedir(), '.workrail', 'worktrees');
|
|
87
89
|
const WORKSPACE_CONTEXT_MAX_BYTES = 32 * 1024;
|
|
88
90
|
const MAX_ASSEMBLED_CONTEXT_BYTES = 8192;
|
|
89
91
|
const WORKSPACE_CONTEXT_CANDIDATE_PATHS = [
|
|
@@ -96,10 +98,10 @@ const soul_template_js_1 = require("./soul-template.js");
|
|
|
96
98
|
var soul_template_js_2 = require("./soul-template.js");
|
|
97
99
|
Object.defineProperty(exports, "DAEMON_SOUL_DEFAULT", { enumerable: true, get: function () { return soul_template_js_2.DAEMON_SOUL_DEFAULT; } });
|
|
98
100
|
Object.defineProperty(exports, "DAEMON_SOUL_TEMPLATE", { enumerable: true, get: function () { return soul_template_js_2.DAEMON_SOUL_TEMPLATE; } });
|
|
99
|
-
async function persistTokens(sessionId, continueToken, checkpointToken) {
|
|
101
|
+
async function persistTokens(sessionId, continueToken, checkpointToken, worktreePath) {
|
|
100
102
|
await fs.mkdir(exports.DAEMON_SESSIONS_DIR, { recursive: true });
|
|
101
103
|
const sessionPath = path.join(exports.DAEMON_SESSIONS_DIR, `${sessionId}.json`);
|
|
102
|
-
const state = JSON.stringify({ continueToken, checkpointToken, ts: Date.now() }, null, 2);
|
|
104
|
+
const state = JSON.stringify({ continueToken, checkpointToken, ts: Date.now(), ...(worktreePath !== undefined ? { worktreePath } : {}) }, null, 2);
|
|
103
105
|
const tmp = `${sessionPath}.tmp`;
|
|
104
106
|
await fs.writeFile(tmp, state, 'utf8');
|
|
105
107
|
await fs.rename(tmp, sessionPath);
|
|
@@ -145,6 +147,7 @@ async function readAllDaemonSessions(sessionsDir = exports.DAEMON_SESSIONS_DIR)
|
|
|
145
147
|
continueToken: parsed.continueToken,
|
|
146
148
|
checkpointToken: typeof parsed.checkpointToken === 'string' ? parsed.checkpointToken : null,
|
|
147
149
|
ts: parsed.ts,
|
|
150
|
+
...(typeof parsed.worktreePath === 'string' ? { worktreePath: parsed.worktreePath } : {}),
|
|
148
151
|
});
|
|
149
152
|
}
|
|
150
153
|
catch (err) {
|
|
@@ -153,7 +156,7 @@ async function readAllDaemonSessions(sessionsDir = exports.DAEMON_SESSIONS_DIR)
|
|
|
153
156
|
}
|
|
154
157
|
return sessions;
|
|
155
158
|
}
|
|
156
|
-
async function runStartupRecovery(sessionsDir = exports.DAEMON_SESSIONS_DIR) {
|
|
159
|
+
async function runStartupRecovery(sessionsDir = exports.DAEMON_SESSIONS_DIR, execFn = execFileAsync) {
|
|
157
160
|
const sessions = await readAllDaemonSessions(sessionsDir);
|
|
158
161
|
if (sessions.length === 0) {
|
|
159
162
|
await clearStrayTmpFiles(sessionsDir);
|
|
@@ -168,6 +171,22 @@ async function runStartupRecovery(sessionsDir = exports.DAEMON_SESSIONS_DIR) {
|
|
|
168
171
|
const ageSec = Math.round(ageMs / 1000);
|
|
169
172
|
const label = isStale ? 'stale orphaned session' : 'orphaned session';
|
|
170
173
|
console.log(`[WorkflowRunner] Clearing ${label}: sessionId=${session.sessionId} age=${ageSec}s`);
|
|
174
|
+
if (session.worktreePath && ageMs > MAX_WORKTREE_ORPHAN_AGE_MS) {
|
|
175
|
+
console.log(`[WorkflowRunner] Removing orphan worktree: sessionId=${session.sessionId} worktreePath=${session.worktreePath}`);
|
|
176
|
+
try {
|
|
177
|
+
await execFn('git', ['worktree', 'remove', '--force', session.worktreePath]);
|
|
178
|
+
console.log(`[WorkflowRunner] Removed orphan worktree: ${session.worktreePath}`);
|
|
179
|
+
}
|
|
180
|
+
catch (err) {
|
|
181
|
+
console.warn(`[WorkflowRunner] Could not remove orphan worktree ${session.worktreePath}: ` +
|
|
182
|
+
`${err instanceof Error ? err.message : String(err)}`);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
else if (session.worktreePath && ageMs <= MAX_WORKTREE_ORPHAN_AGE_MS) {
|
|
186
|
+
const ageHours = (ageMs / (60 * 60 * 1000)).toFixed(1);
|
|
187
|
+
console.log(`[WorkflowRunner] Keeping recent orphan worktree: sessionId=${session.sessionId} ` +
|
|
188
|
+
`age=${ageHours}h (threshold=24h) worktreePath=${session.worktreePath}`);
|
|
189
|
+
}
|
|
171
190
|
try {
|
|
172
191
|
await fs.unlink(path.join(sessionsDir, `${session.sessionId}.json`));
|
|
173
192
|
cleared++;
|
|
@@ -1430,12 +1449,53 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry, emitter, steerR
|
|
|
1430
1449
|
if (startContinueToken) {
|
|
1431
1450
|
await persistTokens(sessionId, startContinueToken, startCheckpointToken);
|
|
1432
1451
|
}
|
|
1452
|
+
let sessionWorkspacePath = trigger.workspacePath;
|
|
1453
|
+
let sessionWorktreePath;
|
|
1454
|
+
if (trigger.branchStrategy === 'worktree') {
|
|
1455
|
+
const branchPrefix = trigger.branchPrefix ?? 'worktrain/';
|
|
1456
|
+
const baseBranch = trigger.baseBranch ?? 'main';
|
|
1457
|
+
sessionWorkspacePath = path.join(exports.WORKTREES_DIR, sessionId);
|
|
1458
|
+
sessionWorktreePath = sessionWorkspacePath;
|
|
1459
|
+
try {
|
|
1460
|
+
await fs.mkdir(exports.WORKTREES_DIR, { recursive: true });
|
|
1461
|
+
await execFileAsync('git', ['-C', trigger.workspacePath, 'fetch', 'origin', baseBranch]);
|
|
1462
|
+
await execFileAsync('git', [
|
|
1463
|
+
'-C', trigger.workspacePath,
|
|
1464
|
+
'worktree', 'add',
|
|
1465
|
+
sessionWorkspacePath,
|
|
1466
|
+
'-b', `${branchPrefix}${sessionId}`,
|
|
1467
|
+
`origin/${baseBranch}`,
|
|
1468
|
+
]);
|
|
1469
|
+
await persistTokens(sessionId, startContinueToken ?? currentContinueToken, startCheckpointToken, sessionWorktreePath);
|
|
1470
|
+
console.log(`[WorkflowRunner] Worktree created: sessionId=${sessionId} ` +
|
|
1471
|
+
`branch=${branchPrefix}${sessionId} path=${sessionWorkspacePath}`);
|
|
1472
|
+
}
|
|
1473
|
+
catch (err) {
|
|
1474
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
1475
|
+
console.error(`[WorkflowRunner] Worktree creation failed: sessionId=${sessionId} error=${errMsg}`);
|
|
1476
|
+
emitter?.emit({ kind: 'session_completed', sessionId, workflowId: trigger.workflowId, outcome: 'error', detail: errMsg.slice(0, 200), ...withWorkrailSession(workrailSessionId) });
|
|
1477
|
+
if (workrailSessionId !== null)
|
|
1478
|
+
daemonRegistry?.unregister(workrailSessionId, 'failed');
|
|
1479
|
+
return {
|
|
1480
|
+
_tag: 'error',
|
|
1481
|
+
workflowId: trigger.workflowId,
|
|
1482
|
+
message: `Worktree creation failed: ${errMsg}`,
|
|
1483
|
+
stopReason: 'error',
|
|
1484
|
+
};
|
|
1485
|
+
}
|
|
1486
|
+
}
|
|
1433
1487
|
if (firstStep.isComplete) {
|
|
1434
1488
|
await fs.unlink(path.join(exports.DAEMON_SESSIONS_DIR, `${sessionId}.json`)).catch(() => { });
|
|
1435
1489
|
emitter?.emit({ kind: 'session_completed', sessionId, workflowId: trigger.workflowId, outcome: 'success', detail: 'stop', ...withWorkrailSession(workrailSessionId) });
|
|
1436
1490
|
if (workrailSessionId !== null)
|
|
1437
1491
|
daemonRegistry?.unregister(workrailSessionId, 'completed');
|
|
1438
|
-
return {
|
|
1492
|
+
return {
|
|
1493
|
+
_tag: 'success',
|
|
1494
|
+
workflowId: trigger.workflowId,
|
|
1495
|
+
stopReason: 'stop',
|
|
1496
|
+
...(sessionWorktreePath !== undefined ? { sessionWorkspacePath: sessionWorktreePath } : {}),
|
|
1497
|
+
...(sessionWorktreePath !== undefined ? { sessionId } : {}),
|
|
1498
|
+
};
|
|
1439
1499
|
}
|
|
1440
1500
|
const schemas = getSchemas();
|
|
1441
1501
|
const spawnCurrentDepth = trigger.spawnDepth ?? 0;
|
|
@@ -1444,12 +1504,12 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry, emitter, steerR
|
|
|
1444
1504
|
const tools = [
|
|
1445
1505
|
makeCompleteStepTool(sessionId, ctx, () => currentContinueToken, onAdvance, onComplete, (t) => { currentContinueToken = t; }, schemas, index_js_1.executeContinueWorkflow, emitter, workrailSessionId),
|
|
1446
1506
|
makeContinueWorkflowTool(sessionId, ctx, onAdvance, onComplete, schemas, index_js_1.executeContinueWorkflow, emitter, workrailSessionId),
|
|
1447
|
-
makeBashTool(
|
|
1507
|
+
makeBashTool(sessionWorkspacePath, schemas, sessionId, emitter, workrailSessionId),
|
|
1448
1508
|
makeReadTool(readFileState, schemas, sessionId, emitter, workrailSessionId),
|
|
1449
1509
|
makeWriteTool(readFileState, schemas, sessionId, emitter, workrailSessionId),
|
|
1450
|
-
makeGlobTool(
|
|
1451
|
-
makeGrepTool(
|
|
1452
|
-
makeEditTool(
|
|
1510
|
+
makeGlobTool(sessionWorkspacePath, schemas, sessionId, emitter, workrailSessionId),
|
|
1511
|
+
makeGrepTool(sessionWorkspacePath, schemas, sessionId, emitter, workrailSessionId),
|
|
1512
|
+
makeEditTool(sessionWorkspacePath, readFileState, schemas, sessionId, emitter, workrailSessionId),
|
|
1453
1513
|
makeReportIssueTool(sessionId, emitter, workrailSessionId, undefined, (summary) => {
|
|
1454
1514
|
if (issueSummaries.length < MAX_ISSUE_SUMMARIES) {
|
|
1455
1515
|
issueSummaries.push(summary);
|
|
@@ -1670,5 +1730,7 @@ async function runWorkflow(trigger, ctx, apiKey, daemonRegistry, emitter, steerR
|
|
|
1670
1730
|
stopReason,
|
|
1671
1731
|
...(lastStepNotes !== undefined ? { lastStepNotes } : {}),
|
|
1672
1732
|
...(lastStepArtifacts !== undefined ? { lastStepArtifacts } : {}),
|
|
1733
|
+
...(sessionWorktreePath !== undefined ? { sessionWorkspacePath: sessionWorktreePath } : {}),
|
|
1734
|
+
...(sessionWorktreePath !== undefined ? { sessionId } : {}),
|
|
1673
1735
|
};
|
|
1674
1736
|
}
|
package/dist/manifest.json
CHANGED
|
@@ -453,12 +453,12 @@
|
|
|
453
453
|
"sha256": "3bdb55ec0957928e0ebbb86a7d6b36d28f7ba7d5c0f3e236fd8f2e2aacee2fa4",
|
|
454
454
|
"bytes": 60631
|
|
455
455
|
},
|
|
456
|
-
"console-ui/assets/index-
|
|
457
|
-
"sha256": "
|
|
456
|
+
"console-ui/assets/index-Sb57DW4B.js": {
|
|
457
|
+
"sha256": "54d09def45773f707ebf2bc17d109411a36ae1098d97d1f81a7423c69686520a",
|
|
458
458
|
"bytes": 760528
|
|
459
459
|
},
|
|
460
460
|
"console-ui/index.html": {
|
|
461
|
-
"sha256": "
|
|
461
|
+
"sha256": "5f4e96c20ab0286c7be620fbaa3fd18728866e25b320ea8622fdda58f81b047a",
|
|
462
462
|
"bytes": 417
|
|
463
463
|
},
|
|
464
464
|
"console/standalone-console.d.ts": {
|
|
@@ -550,12 +550,12 @@
|
|
|
550
550
|
"bytes": 1512
|
|
551
551
|
},
|
|
552
552
|
"daemon/workflow-runner.d.ts": {
|
|
553
|
-
"sha256": "
|
|
554
|
-
"bytes":
|
|
553
|
+
"sha256": "ba96e9b4437632c760a6da9b3d70e676a2ad0a0f0a6ae037ec83419d7ffaf7a8",
|
|
554
|
+
"bytes": 6529
|
|
555
555
|
},
|
|
556
556
|
"daemon/workflow-runner.js": {
|
|
557
|
-
"sha256": "
|
|
558
|
-
"bytes":
|
|
557
|
+
"sha256": "24acc5dce22f6619a05d7a5268a82f801a25d99e1fd8d7c021d2b47ce01deba8",
|
|
558
|
+
"bytes": 88602
|
|
559
559
|
},
|
|
560
560
|
"di/container.d.ts": {
|
|
561
561
|
"sha256": "003bb7fb7478d627524b9b1e76bd0a963a243794a687ff233b96dc0e33a06d9f",
|
|
@@ -1566,12 +1566,12 @@
|
|
|
1566
1566
|
"bytes": 5471
|
|
1567
1567
|
},
|
|
1568
1568
|
"trigger/delivery-action.d.ts": {
|
|
1569
|
-
"sha256": "
|
|
1570
|
-
"bytes":
|
|
1569
|
+
"sha256": "2b3f165759b0de49b7f49023a05efa50848331ab6cd9969b49c1409346959994",
|
|
1570
|
+
"bytes": 1257
|
|
1571
1571
|
},
|
|
1572
1572
|
"trigger/delivery-action.js": {
|
|
1573
|
-
"sha256": "
|
|
1574
|
-
"bytes":
|
|
1573
|
+
"sha256": "1a9c0d097dc0f14e66765366f878f5f8386a4a1b0c5eb9572fa90a2b60643bab",
|
|
1574
|
+
"bytes": 9016
|
|
1575
1575
|
},
|
|
1576
1576
|
"trigger/delivery-client.d.ts": {
|
|
1577
1577
|
"sha256": "0cb2be24b854cb31e3d2fe7eeaba6032de7a9b2a5290c8bc886df94faf5306f7",
|
|
@@ -1626,20 +1626,20 @@
|
|
|
1626
1626
|
"bytes": 2123
|
|
1627
1627
|
},
|
|
1628
1628
|
"trigger/trigger-router.js": {
|
|
1629
|
-
"sha256": "
|
|
1630
|
-
"bytes":
|
|
1629
|
+
"sha256": "605cdce397bd19e5b991fe7378faf17b4f25b4421749e1b5349413a208a4f3dd",
|
|
1630
|
+
"bytes": 17250
|
|
1631
1631
|
},
|
|
1632
1632
|
"trigger/trigger-store.d.ts": {
|
|
1633
1633
|
"sha256": "7afb05127d55bc3757a550dd15d4b797766b3fff29d1bfe76b303764b93322e7",
|
|
1634
1634
|
"bytes": 1588
|
|
1635
1635
|
},
|
|
1636
1636
|
"trigger/trigger-store.js": {
|
|
1637
|
-
"sha256": "
|
|
1638
|
-
"bytes":
|
|
1637
|
+
"sha256": "f4e5c6d28db4c9c05df07b29eb627bc7a48f234a2c16ea4adfe57de24ca6bf38",
|
|
1638
|
+
"bytes": 36591
|
|
1639
1639
|
},
|
|
1640
1640
|
"trigger/types.d.ts": {
|
|
1641
|
-
"sha256": "
|
|
1642
|
-
"bytes":
|
|
1641
|
+
"sha256": "f9ccdc2bea0bee7557362cc7b64d89ed3ab26f7cd954e05395ce4cabe536ab02",
|
|
1642
|
+
"bytes": 2929
|
|
1643
1643
|
},
|
|
1644
1644
|
"trigger/types.js": {
|
|
1645
1645
|
"sha256": "45b4e4f23a6d1a2b07350196871b0c53840e5d8142b47f7acedd2f40ae7a6b73",
|
|
@@ -11,6 +11,8 @@ export interface HandoffArtifact {
|
|
|
11
11
|
export interface DeliveryFlags {
|
|
12
12
|
readonly autoCommit?: boolean;
|
|
13
13
|
readonly autoOpenPR?: boolean;
|
|
14
|
+
readonly sessionId?: string;
|
|
15
|
+
readonly branchPrefix?: string;
|
|
14
16
|
}
|
|
15
17
|
export type DeliveryResult = {
|
|
16
18
|
readonly _tag: 'committed';
|
|
@@ -146,6 +146,30 @@ async function runDelivery(artifact, workspacePath, flags, execFn) {
|
|
|
146
146
|
reason: 'filesChanged is empty -- cannot stage files safely (no git add -A fallback)',
|
|
147
147
|
};
|
|
148
148
|
}
|
|
149
|
+
if (flags.sessionId) {
|
|
150
|
+
const expectedBranch = `${flags.branchPrefix ?? 'worktrain/'}${flags.sessionId}`;
|
|
151
|
+
let headBranch;
|
|
152
|
+
try {
|
|
153
|
+
const result = await execFn('git', ['rev-parse', '--abbrev-ref', 'HEAD'], { cwd: workspacePath, timeout: DELIVERY_TIMEOUT_MS });
|
|
154
|
+
headBranch = result.stdout.trim();
|
|
155
|
+
}
|
|
156
|
+
catch (e) {
|
|
157
|
+
return {
|
|
158
|
+
_tag: 'error',
|
|
159
|
+
phase: 'commit',
|
|
160
|
+
details: `HEAD branch check failed (cannot stage): ${formatExecError(e)}`,
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
if (headBranch !== expectedBranch) {
|
|
164
|
+
return {
|
|
165
|
+
_tag: 'error',
|
|
166
|
+
phase: 'commit',
|
|
167
|
+
details: `HEAD branch mismatch: expected "${expectedBranch}" but found "${headBranch}". ` +
|
|
168
|
+
`Refusing to stage or push -- the agent may have switched branches. ` +
|
|
169
|
+
`Worktree path: ${workspacePath}`,
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
}
|
|
149
173
|
const commitMessage = artifact.commitSubject.startsWith(`${artifact.commitType}(`)
|
|
150
174
|
? artifact.commitSubject
|
|
151
175
|
: `${artifact.commitType}(${artifact.commitScope}): ${artifact.commitSubject}`;
|
|
@@ -136,7 +136,17 @@ async function maybeRunDelivery(triggerId, trigger, result, execFn) {
|
|
|
136
136
|
`Ensure the workflow's final step produces a JSON block with commitType, filesChanged, etc.`);
|
|
137
137
|
return;
|
|
138
138
|
}
|
|
139
|
-
const
|
|
139
|
+
const deliveryCwd = result.sessionWorkspacePath ?? trigger.workspacePath;
|
|
140
|
+
const deliveryResult = await (0, delivery_action_js_1.runDelivery)(parseResult.value, deliveryCwd, {
|
|
141
|
+
autoCommit: trigger.autoCommit,
|
|
142
|
+
autoOpenPR: trigger.autoOpenPR,
|
|
143
|
+
...(trigger.branchStrategy === 'worktree' && result.sessionWorkspacePath
|
|
144
|
+
? {
|
|
145
|
+
sessionId: result.sessionId ?? '',
|
|
146
|
+
branchPrefix: trigger.branchPrefix ?? 'worktrain/',
|
|
147
|
+
}
|
|
148
|
+
: {}),
|
|
149
|
+
}, execFn);
|
|
140
150
|
switch (deliveryResult._tag) {
|
|
141
151
|
case 'committed':
|
|
142
152
|
console.log(`[TriggerRouter] Delivery committed: triggerId=${triggerId} sha=${deliveryResult.sha}`);
|
|
@@ -152,6 +162,16 @@ async function maybeRunDelivery(triggerId, trigger, result, execFn) {
|
|
|
152
162
|
`details=${deliveryResult.details}`);
|
|
153
163
|
break;
|
|
154
164
|
}
|
|
165
|
+
if (trigger.branchStrategy === 'worktree' && result.sessionWorkspacePath) {
|
|
166
|
+
try {
|
|
167
|
+
await execFn('git', ['-C', trigger.workspacePath, 'worktree', 'remove', '--force', result.sessionWorkspacePath], { cwd: trigger.workspacePath, timeout: 60000 });
|
|
168
|
+
console.log(`[TriggerRouter] Worktree removed: triggerId=${triggerId} path=${result.sessionWorkspacePath}`);
|
|
169
|
+
}
|
|
170
|
+
catch (err) {
|
|
171
|
+
console.warn(`[TriggerRouter] Could not remove worktree: triggerId=${triggerId} ` +
|
|
172
|
+
`path=${result.sessionWorkspacePath}: ${err instanceof Error ? err.message : String(err)}`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
155
175
|
}
|
|
156
176
|
class Semaphore {
|
|
157
177
|
constructor(max) {
|
|
@@ -246,6 +266,9 @@ class TriggerRouter {
|
|
|
246
266
|
...(trigger.referenceUrls !== undefined ? { referenceUrls: trigger.referenceUrls } : {}),
|
|
247
267
|
...(trigger.agentConfig !== undefined ? { agentConfig: trigger.agentConfig } : {}),
|
|
248
268
|
...(trigger.soulFile !== undefined ? { soulFile: trigger.soulFile } : {}),
|
|
269
|
+
...(trigger.branchStrategy !== undefined ? { branchStrategy: trigger.branchStrategy } : {}),
|
|
270
|
+
...(trigger.baseBranch !== undefined ? { baseBranch: trigger.baseBranch } : {}),
|
|
271
|
+
...(trigger.branchPrefix !== undefined ? { branchPrefix: trigger.branchPrefix } : {}),
|
|
249
272
|
};
|
|
250
273
|
this.emitter?.emit({ kind: 'trigger_fired', triggerId: trigger.id, workflowId: trigger.workflowId });
|
|
251
274
|
const queueKey = trigger.concurrencyMode === 'parallel'
|
|
@@ -391,6 +391,15 @@ function setTriggerField(trigger, key, value) {
|
|
|
391
391
|
case 'soulFile':
|
|
392
392
|
trigger.soulFile = value;
|
|
393
393
|
break;
|
|
394
|
+
case 'branchStrategy':
|
|
395
|
+
trigger.branchStrategy = value;
|
|
396
|
+
break;
|
|
397
|
+
case 'baseBranch':
|
|
398
|
+
trigger.baseBranch = value;
|
|
399
|
+
break;
|
|
400
|
+
case 'branchPrefix':
|
|
401
|
+
trigger.branchPrefix = value;
|
|
402
|
+
break;
|
|
394
403
|
default:
|
|
395
404
|
break;
|
|
396
405
|
}
|
|
@@ -602,6 +611,36 @@ function validateAndResolveTrigger(raw, env, workspaces = {}) {
|
|
|
602
611
|
console.warn(`[TriggerStore] Warning: trigger "${rawId}" has autoOpenPR: true but autoCommit is not true. ` +
|
|
603
612
|
`A PR requires a commit -- delivery will be skipped unless autoCommit is also set to true.`);
|
|
604
613
|
}
|
|
614
|
+
const rawBranchStrategy = raw.branchStrategy?.trim();
|
|
615
|
+
if (rawBranchStrategy !== undefined && rawBranchStrategy !== 'worktree' && rawBranchStrategy !== 'none') {
|
|
616
|
+
return (0, result_js_1.err)({
|
|
617
|
+
kind: 'invalid_field_value',
|
|
618
|
+
field: `branchStrategy (must be "worktree" or "none", got: "${rawBranchStrategy}")`,
|
|
619
|
+
triggerId: rawId,
|
|
620
|
+
});
|
|
621
|
+
}
|
|
622
|
+
const branchStrategy = rawBranchStrategy === 'worktree' ? 'worktree' : rawBranchStrategy === 'none' ? 'none' : undefined;
|
|
623
|
+
const baseBranch = raw.baseBranch?.trim() || undefined;
|
|
624
|
+
const branchPrefix = raw.branchPrefix?.trim() || undefined;
|
|
625
|
+
const GIT_SAFE_RE = /^[a-zA-Z0-9._/-]+$/;
|
|
626
|
+
if (baseBranch !== undefined) {
|
|
627
|
+
if (!GIT_SAFE_RE.test(baseBranch) || baseBranch.startsWith('-')) {
|
|
628
|
+
return (0, result_js_1.err)({
|
|
629
|
+
kind: 'invalid_field_value',
|
|
630
|
+
field: `baseBranch (must match /^[a-zA-Z0-9._/-]+$/ and not start with "-", got: "${baseBranch}")`,
|
|
631
|
+
triggerId: rawId,
|
|
632
|
+
});
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
if (branchPrefix !== undefined) {
|
|
636
|
+
if (!GIT_SAFE_RE.test(branchPrefix) || branchPrefix.startsWith('-')) {
|
|
637
|
+
return (0, result_js_1.err)({
|
|
638
|
+
kind: 'invalid_field_value',
|
|
639
|
+
field: `branchPrefix (must match /^[a-zA-Z0-9._/-]+$/ and not start with "-", got: "${branchPrefix}")`,
|
|
640
|
+
triggerId: rawId,
|
|
641
|
+
});
|
|
642
|
+
}
|
|
643
|
+
}
|
|
605
644
|
function parsePollIntervalSeconds(raw2, triggerId2) {
|
|
606
645
|
const intervalRaw = raw2.pollIntervalSeconds?.trim();
|
|
607
646
|
if (!intervalRaw)
|
|
@@ -741,6 +780,9 @@ function validateAndResolveTrigger(raw, env, workspaces = {}) {
|
|
|
741
780
|
...(pollingSource !== undefined ? { pollingSource } : {}),
|
|
742
781
|
...(resolvedWorkspaceName !== undefined ? { workspaceName: resolvedWorkspaceName } : {}),
|
|
743
782
|
...(resolvedSoulFile ? { soulFile: resolvedSoulFile } : {}),
|
|
783
|
+
...(branchStrategy !== undefined ? { branchStrategy } : {}),
|
|
784
|
+
...(baseBranch !== undefined ? { baseBranch } : {}),
|
|
785
|
+
...(branchPrefix !== undefined ? { branchPrefix } : {}),
|
|
744
786
|
};
|
|
745
787
|
return (0, result_js_1.ok)(trigger);
|
|
746
788
|
}
|
package/dist/trigger/types.d.ts
CHANGED
|
@@ -68,6 +68,9 @@ export interface TriggerDefinition {
|
|
|
68
68
|
readonly pollingSource?: PollingSource;
|
|
69
69
|
readonly workspaceName?: WorkspaceName;
|
|
70
70
|
readonly soulFile?: string;
|
|
71
|
+
readonly branchStrategy?: 'worktree' | 'none';
|
|
72
|
+
readonly baseBranch?: string;
|
|
73
|
+
readonly branchPrefix?: string;
|
|
71
74
|
}
|
|
72
75
|
export interface TriggerConfig {
|
|
73
76
|
readonly triggers: readonly TriggerDefinition[];
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
# Inter-Phase Context Passing: Design Candidates
|
|
2
|
+
|
|
3
|
+
*Working analysis document -- raw investigative material, not a final decision.*
|
|
4
|
+
*Produced during wr.discovery workflow for adaptive-coordinator-context.md.*
|
|
5
|
+
|
|
6
|
+
---
|
|
7
|
+
|
|
8
|
+
## Problem Understanding
|
|
9
|
+
|
|
10
|
+
### Core tensions
|
|
11
|
+
|
|
12
|
+
1. **Concern separation vs. convenience:** ContextAssembler is the only session injection mechanism but was designed for startup context (git diff, prior session notes), not inter-phase structured contracts. Using it for inter-phase handoff is convenient but muddles its purpose and change rate.
|
|
13
|
+
|
|
14
|
+
2. **Coupling vs. simplicity:** File-based handoff (pitch.md) is simple and already works for Shaping->Coding. Extending the pattern to all transitions requires each workflow's final step to write to a fixed `.workrail/` path -- coupling pipeline topology to workflow authoring.
|
|
15
|
+
|
|
16
|
+
3. **Uniform bridge vs. typed contracts:** Coordinator-injected freetext (assembledContextSummary) is uniform across all transitions but imprecise -- no schema enforcement. Typed artifacts per transition are precise and Zod-validated but add per-transition schema overhead.
|
|
17
|
+
|
|
18
|
+
4. **Coordinator responsibility vs. session responsibility:** Coordinator can bridge at spawn time (reads lastStepNotes, builds context, injects at spawn). Sessions can self-discover (Phase 0.5 already does this for pitch.md). Self-discovery is more robust but requires each session to know the file conventions.
|
|
19
|
+
|
|
20
|
+
### Likely seam
|
|
21
|
+
|
|
22
|
+
The problem does NOT live in ContextAssembler. The coordinator already passes arbitrary context at spawn time via the 4th parameter to `spawnSession`. The real seam is: **what to put in that context object and how to extract it from the previous phase's output.**
|
|
23
|
+
|
|
24
|
+
The problem is narrower than it appears:
|
|
25
|
+
- Shaping->Coding: already solved (Phase 0.5 + pitch.md)
|
|
26
|
+
- Review->Fix: already solved (wr.review_verdict + pr-review.ts)
|
|
27
|
+
- Remaining gap: Discovery->Shaping only
|
|
28
|
+
|
|
29
|
+
### What makes this hard
|
|
30
|
+
|
|
31
|
+
- Coordinator and session are in different systems; workflow prompt changes can silently break coordinator parsing
|
|
32
|
+
- The only injection point (`assembledContextSummary`) is a freetext markdown string, not a typed contract
|
|
33
|
+
- Two transitions need structured coordinator routing (D->S needs direction; Review->Fix already solved). Three transitions are pass-through.
|
|
34
|
+
- Silent failure is the dominant risk
|
|
35
|
+
|
|
36
|
+
---
|
|
37
|
+
|
|
38
|
+
## Philosophy Constraints
|
|
39
|
+
|
|
40
|
+
**Principles that constrain this design:**
|
|
41
|
+
- Immutability by default: all new types use `readonly`
|
|
42
|
+
- Make illegal states unrepresentable: discriminated unions for AssemblyTask; typed artifacts for coordinator contracts
|
|
43
|
+
- Errors are data: new ContextBundle sources should use `Result<T, string>`
|
|
44
|
+
- Validate at boundaries: Zod for external inputs (typed artifacts, notes parsing)
|
|
45
|
+
- Compose with small pure functions: handoff builder/parser functions must be pure
|
|
46
|
+
- Dependency injection: file reads go through CoordinatorDeps
|
|
47
|
+
- YAGNI: only add typed artifacts for transitions where coordinator branches on structured values
|
|
48
|
+
|
|
49
|
+
**Active conflict:** YAGNI vs. "make illegal states unrepresentable" -- typed artifacts everywhere would enforce all contracts but add schema overhead for 3 transitions that don't need it.
|
|
50
|
+
|
|
51
|
+
---
|
|
52
|
+
|
|
53
|
+
## Impact Surface
|
|
54
|
+
|
|
55
|
+
Must stay consistent with:
|
|
56
|
+
- `src/context-assembly/types.ts` (AssemblyTask union)
|
|
57
|
+
- `src/context-assembly/index.ts` (assemble() switch on task.kind)
|
|
58
|
+
- `src/coordinators/pr-review.ts` (CoordinatorDeps interface)
|
|
59
|
+
- `src/v2/durable-core/schemas/artifacts/` (typed artifact schemas)
|
|
60
|
+
- `workflows/wr.discovery.json` (Phase 7 -- if emitting artifact)
|
|
61
|
+
- `workflows/wr.shaping.json` (Step 1 -- if adding file search)
|
|
62
|
+
- `workflows/coding-task-workflow-agentic.json` (Phase 0.5 -- no changes expected)
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Candidates
|
|
67
|
+
|
|
68
|
+
### Candidate A: Goal string composition + notes injection
|
|
69
|
+
|
|
70
|
+
**Summary:** Coordinator composes a rich goal string from the previous phase's `lastStepNotes` and passes notes as `assembledContextSummary` at spawn time -- no new mechanisms required.
|
|
71
|
+
|
|
72
|
+
**Mechanism:**
|
|
73
|
+
When spawning wr.shaping, coordinator constructs:
|
|
74
|
+
```
|
|
75
|
+
goal = "Shape the following problem discovered in our discovery session:\n\n[lastStepNotes summary]"
|
|
76
|
+
context = { assembledContextSummary: lastStepNotes }
|
|
77
|
+
```
|
|
78
|
+
wr.shaping Step 1 reads from "goal text, discovery notes, tickets, user stories" -- both injection points land in session context.
|
|
79
|
+
|
|
80
|
+
**Tensions resolved:** Workflow independence, coordinator simplicity, coherence
|
|
81
|
+
|
|
82
|
+
**Tensions accepted:** No mechanism clarity, no schema validation, silent failure when notes format changes
|
|
83
|
+
|
|
84
|
+
**Boundary:** Coordinator spawn site only
|
|
85
|
+
|
|
86
|
+
**Failure mode:** wr.discovery notes format changes silently. Shaping session starts from degraded context. No error surfaced.
|
|
87
|
+
|
|
88
|
+
**Repo pattern:** Follows existing assembledContextSummary pattern. No departure.
|
|
89
|
+
|
|
90
|
+
**Gains:** Zero new code outside coordinator. Zero workflow changes. Works today.
|
|
91
|
+
**Losses:** No machine-parseable contract. Coordinator cannot extract structured fields programmatically.
|
|
92
|
+
|
|
93
|
+
**Scope judgment:** Best-fit for pass-through transitions. Too narrow for transitions where coordinator branches on structured values.
|
|
94
|
+
|
|
95
|
+
**Philosophy:** Honors YAGNI, DI. Conflicts with "make illegal states unrepresentable", "validate at boundaries".
|
|
96
|
+
|
|
97
|
+
---
|
|
98
|
+
|
|
99
|
+
### Candidate B: File convention extension (adapt pitch.md)
|
|
100
|
+
|
|
101
|
+
**Summary:** wr.discovery Phase 7 writes `.workrail/current-discovery.md`; wr.shaping Step 1 searches for it -- the same file convention that already works for Shaping->Coding.
|
|
102
|
+
|
|
103
|
+
**Mechanism:**
|
|
104
|
+
- wr.discovery Phase 7 updated to write `.workrail/current-discovery.md`
|
|
105
|
+
- wr.shaping Step 1 updated to search for `.workrail/current-discovery.md` when no goal text provides discovery context
|
|
106
|
+
- Coordinator passes correct workspacePath -- that's the only coordinator involvement
|
|
107
|
+
|
|
108
|
+
**Tensions resolved:** Discoverable handoff (session self-discovers), coordinator logic (no bridging for D->S)
|
|
109
|
+
|
|
110
|
+
**Tensions accepted:** 2 workflow changes, stale file risk (failed discovery leaves wrong file)
|
|
111
|
+
|
|
112
|
+
**Boundary:** Filesystem at `.workrail/`
|
|
113
|
+
|
|
114
|
+
**Failure mode:** Discovery fails before Phase 7. File is stale from prior session. Shaping reads wrong discovery output. No timestamp check.
|
|
115
|
+
|
|
116
|
+
**Repo pattern:** Directly adapts pitch.md convention from wr.shaping Step 9.
|
|
117
|
+
|
|
118
|
+
**Gains:** Sessions self-discover. Coordinator unchanged for D->S. Clean separation.
|
|
119
|
+
**Losses:** Two workflow changes. Stale file risk. Conventions must be maintained.
|
|
120
|
+
|
|
121
|
+
**Scope judgment:** Best-fit for D->S specifically. Over-engineered for other transitions.
|
|
122
|
+
|
|
123
|
+
**Philosophy:** Honors YAGNI, "make illegal states unrepresentable" (file presence checkable). Conflicts with "validate at boundaries" (stale file not caught at coordinator boundary).
|
|
124
|
+
|
|
125
|
+
---
|
|
126
|
+
|
|
127
|
+
### Candidate C: Coordinator-injected structured context (pure builder functions)
|
|
128
|
+
|
|
129
|
+
**Summary:** Add a pure `buildDiscoveryHandoffContext(notes: string): PhaseHandoffContext` function in the coordinator that extracts structured fields from discovery notes and passes them as spawn context.
|
|
130
|
+
|
|
131
|
+
**Type shape:**
|
|
132
|
+
```typescript
|
|
133
|
+
interface PhaseHandoffContext {
|
|
134
|
+
readonly phaseFrom: string;
|
|
135
|
+
readonly phaseTo: string;
|
|
136
|
+
readonly keyFindings: string;
|
|
137
|
+
readonly selectedDirection?: string;
|
|
138
|
+
readonly designDocPath?: string;
|
|
139
|
+
}
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
**Tensions resolved:** Coordinator simplicity (named pure function per transition), no workflow changes, coordinator owns contracts
|
|
143
|
+
|
|
144
|
+
**Tensions accepted:** Notes format still unconstrained. Builder functions must be maintained when format changes.
|
|
145
|
+
|
|
146
|
+
**Boundary:** Coordinator spawn site + named pure function (slightly better than A)
|
|
147
|
+
|
|
148
|
+
**Failure mode:** Notes format changes silently break builder. Same failure as A but slightly more visible (named function can be tested).
|
|
149
|
+
|
|
150
|
+
**Repo pattern:** Adapts parseFindingsFromNotes() pattern (pure function + coordinator uses result). Direct precedent in pr-review.ts.
|
|
151
|
+
|
|
152
|
+
**Gains:** Named, testable, pure functions. Coordinator owns contracts. No workflow changes.
|
|
153
|
+
**Losses:** Notes format still unconstrained. Per-transition builder functions as pipeline grows.
|
|
154
|
+
|
|
155
|
+
**Scope judgment:** Best-fit for coordinator-centric design with intermediate rigor.
|
|
156
|
+
|
|
157
|
+
**Philosophy:** Honors "compose with small pure functions", DI. Conflicts with "validate at boundaries", "make illegal states unrepresentable".
|
|
158
|
+
|
|
159
|
+
---
|
|
160
|
+
|
|
161
|
+
### Candidate D: Typed handoff artifact for decision-critical transitions only (wr.discovery_handoff)
|
|
162
|
+
|
|
163
|
+
**Summary:** Add a `wr.discovery_handoff` typed artifact emitted in wr.discovery Phase 7, Zod-validated by the coordinator -- applied only to D->S; all other transitions use goal string composition.
|
|
164
|
+
|
|
165
|
+
**Schema:**
|
|
166
|
+
```typescript
|
|
167
|
+
interface DiscoveryHandoffArtifactV1 {
|
|
168
|
+
readonly kind: 'wr.discovery_handoff';
|
|
169
|
+
readonly version: 1;
|
|
170
|
+
readonly selectedDirection: string;
|
|
171
|
+
readonly designDocPath: string;
|
|
172
|
+
readonly confidenceBand: 'high' | 'medium' | 'low';
|
|
173
|
+
readonly keyInvariants: readonly string[];
|
|
174
|
+
}
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
**Coordinator parsing:**
|
|
178
|
+
```typescript
|
|
179
|
+
const handoffArtifact = readDiscoveryHandoffArtifact(lastStepArtifacts, sessionHandle);
|
|
180
|
+
const spawnContext = handoffArtifact
|
|
181
|
+
? { selectedDirection: handoffArtifact.selectedDirection, designDocPath: handoffArtifact.designDocPath, assembledContextSummary: render(handoffArtifact) }
|
|
182
|
+
: { assembledContextSummary: lastStepNotes };
|
|
183
|
+
await deps.spawnSession('wr.shaping', buildShapingGoal(handoffArtifact ?? notes), workspace, spawnContext);
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
**Transition coverage:**
|
|
187
|
+
- D->S: wr.discovery_handoff (NEW)
|
|
188
|
+
- Review->Fix: wr.review_verdict (already exists)
|
|
189
|
+
- S->C: goal string + pitch.md self-discovery via Phase 0.5 (no typed artifact)
|
|
190
|
+
- C->PR: PR number in goal string (no typed artifact)
|
|
191
|
+
- PR->Review: PR number (no typed artifact)
|
|
192
|
+
|
|
193
|
+
**Tensions resolved:** Mechanism clarity (schema IS the contract), validates at boundaries (Zod), makes illegal states unrepresentable for D->S
|
|
194
|
+
|
|
195
|
+
**Tensions accepted:** 1 workflow change (wr.discovery Phase 7 must emit artifact). Two-tier fallback complexity. Schema maintenance.
|
|
196
|
+
|
|
197
|
+
**Boundary:** wr.discovery Phase 7 (emitter) + coordinator lastStepArtifacts handling (consumer)
|
|
198
|
+
|
|
199
|
+
**Failure mode:** Agent doesn't emit artifact. Coordinator falls back to notes parsing. Two-tier failure, same as wr.review_verdict today.
|
|
200
|
+
|
|
201
|
+
**Repo pattern:** Directly follows wr.review_verdict pattern in every structural detail.
|
|
202
|
+
|
|
203
|
+
**Gains:** Machine-parseable D->S contract. Coordinator can route on confidenceBand. Type-safe. Explicit failure.
|
|
204
|
+
**Losses:** 1 workflow change. Schema maintenance. Two-tier parsing. Agent must reliably emit artifact.
|
|
205
|
+
|
|
206
|
+
**Scope judgment:** Best-fit for D->S. Combined with A for other transitions = minimum typed surface.
|
|
207
|
+
|
|
208
|
+
**Philosophy:** Fully honors "make illegal states unrepresentable", "validate at boundaries", "type safety as first line of defense", YAGNI (applied only to 2 decision-critical transitions).
|
|
209
|
+
|
|
210
|
+
---
|
|
211
|
+
|
|
212
|
+
## Comparison and Recommendation
|
|
213
|
+
|
|
214
|
+
### Comparison matrix
|
|
215
|
+
|
|
216
|
+
| Criterion | A | B | C | D |
|
|
217
|
+
|-----------|---|---|---|---|
|
|
218
|
+
| Mechanism clarity | Low | Medium | Medium | High |
|
|
219
|
+
| Coordinator logic minimality | High | High | Medium | Medium |
|
|
220
|
+
| Workflow independence | High | Low | High | Medium |
|
|
221
|
+
| Coherence with existing patterns | Medium | High | Medium | High |
|
|
222
|
+
| Discoverable handoff | Low | High | Low | Low |
|
|
223
|
+
| Philosophy fit | Medium | Medium | Medium | High |
|
|
224
|
+
| Failure mode explicitness | Low | Medium | Medium | High |
|
|
225
|
+
|
|
226
|
+
### Recommendation: Hybrid D + A
|
|
227
|
+
|
|
228
|
+
**D for Discovery->Shaping:**
|
|
229
|
+
Coordinator branches on `selectedDirection` -- this is a routing decision, not just context enrichment. Mirrors the wr.review_verdict precedent. Schema overhead proportionate to decision weight.
|
|
230
|
+
|
|
231
|
+
**A for all other pass-through transitions:**
|
|
232
|
+
- Shaping->Coding: Phase 0.5 finds pitch.md automatically (confirmed in workflow prompt)
|
|
233
|
+
- Coding->PR: PR number in goal string
|
|
234
|
+
- PR->Review: pr-review.ts already handles this
|
|
235
|
+
|
|
236
|
+
**No ContextAssembler change needed:**
|
|
237
|
+
Coordinator passes `{ selectedDirection, designDocPath, assembledContextSummary }` as spawn context keys. The existing `context: Record<string, unknown>` parameter on `spawnSession` is sufficient.
|
|
238
|
+
|
|
239
|
+
---
|
|
240
|
+
|
|
241
|
+
## Self-Critique
|
|
242
|
+
|
|
243
|
+
**Strongest counter-argument against D:**
|
|
244
|
+
The workflow change to wr.discovery Phase 7 is a point of failure. If the agent emits a malformed artifact or omits it, coordinator falls back to notes parsing -- same fragility as Candidate A. The typed contract only helps when the artifact IS correctly emitted.
|
|
245
|
+
|
|
246
|
+
**Why A is a legitimate simpler answer:**
|
|
247
|
+
wr.shaping Step 1 explicitly reads "goal text, discovery notes, tickets, user stories." If coordinator goal string includes discovery summary, this may be sufficient. No structured routing on selectedDirection may be needed. Validate this when the first adaptive coordinator is written.
|
|
248
|
+
|
|
249
|
+
**Pivot condition to A:**
|
|
250
|
+
When the first adaptive coordinator is written, if wr.shaping Step 1 produces sound results without structured `selectedDirection` access, Candidate A is correct and D's overhead is unjustified.
|
|
251
|
+
|
|
252
|
+
**Broader option:**
|
|
253
|
+
D for all transitions (add wr.shaping_handoff, wr.coding_handoff). Justified only if coordinator needs to branch on pitch content or implementation scope. Not justified now.
|
|
254
|
+
|
|
255
|
+
---
|
|
256
|
+
|
|
257
|
+
## Open Questions for the Main Agent
|
|
258
|
+
|
|
259
|
+
1. Does the coordinator need to branch on `selectedDirection` from discovery, or is passing the full notes blob sufficient for wr.shaping Step 1? This determines whether D is needed at all.
|
|
260
|
+
|
|
261
|
+
2. Is there a planned use case where the adaptive coordinator skips Shaping based on discovery confidence band? If yes, D is clearly justified. If not, A may be sufficient.
|
|
262
|
+
|
|
263
|
+
3. What does the routing agent (adaptive-coordinator-routing.md) expect from context passing? Its design may have specific requirements for structured fields at routing decision time.
|
|
264
|
+
|
|
265
|
+
4. Is `.workrail/current-pitch.md` reliably found by Phase 0.5 in practice? If Phase 0.5 sometimes misses it, the coordinator may need to inject `pitchPath` explicitly.
|