switchman-dev 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1113 @@
1
+ import { spawn, spawnSync } from 'child_process';
2
+ import { mkdirSync, writeFileSync } from 'fs';
3
+ import { join } from 'path';
4
+
5
+ import { completeLeaseTask, createTask, failLeaseTask, getTaskSpec, listAuditEvents, listLeases, listTasks, listWorktrees, logAuditEvent, retryTask, startTaskLease, upsertTaskSpec } from './db.js';
6
+ import { scanAllWorktrees } from './detector.js';
7
+ import { runAiMergeGate } from './merge-gate.js';
8
+ import { evaluateTaskOutcome } from './outcome.js';
9
+ import { buildTaskSpec, planPipelineTasks } from './planner.js';
10
+ import { getWorktreeBranch } from './git.js';
11
+
12
+ function sleepSync(ms) {
13
+ if (ms > 0) {
14
+ Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
15
+ }
16
+ }
17
+
18
+ function uniq(values) {
19
+ return [...new Set(values)];
20
+ }
21
+
22
+ function makePipelineId() {
23
+ return `pipe-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
24
+ }
25
+
26
+ function parseSuggestedWorktree(description) {
27
+ const match = String(description || '').match(/Suggested worktree:\s*(.+)$/m);
28
+ return match?.[1] || null;
29
+ }
30
+
31
+ function parseDependencies(description) {
32
+ const match = String(description || '').match(/Depends on:\s*(.+)$/m);
33
+ if (!match) return [];
34
+ return match[1]
35
+ .split(',')
36
+ .map((item) => item.trim())
37
+ .filter(Boolean);
38
+ }
39
+
40
+ function getPipelineMetadata(db, pipelineId) {
41
+ const events = listAuditEvents(db, { eventType: 'pipeline_created', limit: 500 });
42
+ for (const event of events) {
43
+ try {
44
+ const details = JSON.parse(event.details || '{}');
45
+ if (details.pipeline_id === pipelineId) {
46
+ return details;
47
+ }
48
+ } catch {
49
+ // Ignore malformed audit payloads.
50
+ }
51
+ }
52
+ return null;
53
+ }
54
+
55
+ function withTaskSpec(db, task) {
56
+ return {
57
+ ...task,
58
+ task_spec: getTaskSpec(db, task.id),
59
+ };
60
+ }
61
+
62
+ function nextPipelineTaskId(tasks, pipelineId) {
63
+ const nextNumber = tasks
64
+ .map((task) => Number.parseInt(task.id.slice(`${pipelineId}-`.length), 10))
65
+ .filter((value) => Number.isFinite(value))
66
+ .reduce((max, value) => Math.max(max, value), 0) + 1;
67
+ return `${pipelineId}-${String(nextNumber).padStart(2, '0')}`;
68
+ }
69
+
70
+ export function startPipeline(db, { title, description = null, priority = 5, pipelineId = null, maxTasks = 5 }) {
71
+ const resolvedPipelineId = pipelineId || makePipelineId();
72
+ const registeredWorktrees = listWorktrees(db);
73
+ const suggestedWorktrees = registeredWorktrees.filter((worktree) => worktree.name !== 'main');
74
+ const repoRoot = registeredWorktrees.find((worktree) => worktree.name === 'main')?.path || process.cwd();
75
+ const plannedTasks = planPipelineTasks({
76
+ pipelineId: resolvedPipelineId,
77
+ title,
78
+ description,
79
+ worktrees: suggestedWorktrees,
80
+ maxTasks,
81
+ repoRoot,
82
+ });
83
+
84
+ const tasks = plannedTasks.map((plannedTask, index) => {
85
+ const taskDescription = [
86
+ `[Pipeline ${resolvedPipelineId}]`,
87
+ plannedTask.suggested_worktree ? `Suggested worktree: ${plannedTask.suggested_worktree}` : null,
88
+ plannedTask.dependencies.length > 0 ? `Depends on: ${plannedTask.dependencies.join(', ')}` : null,
89
+ index === 0 && description ? description : null,
90
+ ].filter(Boolean).join('\n');
91
+
92
+ createTask(db, {
93
+ id: plannedTask.id,
94
+ title: plannedTask.title,
95
+ description: taskDescription,
96
+ priority,
97
+ });
98
+ upsertTaskSpec(db, plannedTask.id, plannedTask.task_spec);
99
+
100
+ const taskRecord = {
101
+ id: plannedTask.id,
102
+ title: plannedTask.title,
103
+ priority,
104
+ suggested_worktree: plannedTask.suggested_worktree,
105
+ dependencies: plannedTask.dependencies,
106
+ task_spec: plannedTask.task_spec,
107
+ status: 'pending',
108
+ };
109
+
110
+ return taskRecord;
111
+ });
112
+
113
+ logAuditEvent(db, {
114
+ eventType: 'pipeline_created',
115
+ status: 'allowed',
116
+ details: JSON.stringify({
117
+ pipeline_id: resolvedPipelineId,
118
+ title,
119
+ description,
120
+ priority,
121
+ task_ids: tasks.map((task) => task.id),
122
+ }),
123
+ });
124
+
125
+ return {
126
+ pipeline_id: resolvedPipelineId,
127
+ title,
128
+ description,
129
+ priority,
130
+ tasks,
131
+ };
132
+ }
133
+
134
+ export function getPipelineStatus(db, pipelineId) {
135
+ const tasks = listTasks(db).filter((task) => task.id.startsWith(`${pipelineId}-`));
136
+ if (tasks.length === 0) {
137
+ throw new Error(`Pipeline ${pipelineId} does not exist.`);
138
+ }
139
+
140
+ const metadata = getPipelineMetadata(db, pipelineId);
141
+ const counts = {
142
+ pending: tasks.filter((task) => task.status === 'pending').length,
143
+ in_progress: tasks.filter((task) => task.status === 'in_progress').length,
144
+ done: tasks.filter((task) => task.status === 'done').length,
145
+ failed: tasks.filter((task) => task.status === 'failed').length,
146
+ };
147
+
148
+ return {
149
+ pipeline_id: pipelineId,
150
+ title: metadata?.title || tasks[0].title,
151
+ description: metadata?.description || null,
152
+ priority: metadata?.priority || tasks[0].priority,
153
+ counts,
154
+ tasks: tasks.map((task) => {
155
+ const dependencies = parseDependencies(task.description);
156
+ const blockedBy = dependencies.filter((dependencyId) =>
157
+ tasks.find((candidate) => candidate.id === dependencyId)?.status !== 'done',
158
+ );
159
+ const taskSpec = getTaskSpec(db, task.id);
160
+ const failure = task.status === 'failed' ? parseTaskFailure(task.description) : null;
161
+ return {
162
+ ...task,
163
+ task_spec: taskSpec,
164
+ suggested_worktree: parseSuggestedWorktree(task.description),
165
+ dependencies,
166
+ blocked_by: blockedBy,
167
+ ready_to_run: task.status === 'pending' && blockedBy.length === 0,
168
+ failure,
169
+ next_action: task.status === 'failed'
170
+ ? inferTaskNextAction({ ...task, task_spec: taskSpec }, failure)
171
+ : null,
172
+ };
173
+ }),
174
+ };
175
+ }
176
+
177
+ function parseTaskFailure(description) {
178
+ const lines = String(description || '')
179
+ .split('\n')
180
+ .map((line) => line.trim())
181
+ .filter(Boolean);
182
+ const failureLine = [...lines].reverse().find((line) => line.startsWith('FAILED: '));
183
+ if (!failureLine) return null;
184
+
185
+ const message = failureLine.slice('FAILED: '.length);
186
+ const match = message.match(/^([a-z0-9_]+):\s*(.+)$/i);
187
+ return {
188
+ raw: message,
189
+ reason_code: match ? match[1] : null,
190
+ summary: match ? match[2] : message,
191
+ };
192
+ }
193
+
194
+ function inferTaskNextAction(task, failure) {
195
+ if (!failure?.reason_code) return null;
196
+
197
+ switch (failure.reason_code) {
198
+ case 'changes_outside_claims':
199
+ return 'Claim every edited file first, or split the task into smaller scoped changes.';
200
+ case 'changes_outside_task_scope':
201
+ return 'Keep edits inside allowed paths or widen the planned task scope.';
202
+ case 'missing_expected_tests':
203
+ return 'Add or update tests before rerunning this task.';
204
+ case 'missing_expected_docs':
205
+ return 'Add the expected docs change or update the docs path in the task spec.';
206
+ case 'missing_expected_source_changes':
207
+ return 'Make a source change inside the claimed task scope.';
208
+ case 'objective_not_evidenced':
209
+ return 'Produce output that clearly matches the task objective or rewrite the task intent.';
210
+ case 'no_changes_detected':
211
+ return 'Create a tracked file change or move the work into a more appropriate follow-up task.';
212
+ default:
213
+ return 'Inspect the task output and rerun with a clearer, narrower task scope.';
214
+ }
215
+ }
216
+
217
+ function chooseWorktree(task, availableWorktrees) {
218
+ const suggested = parseSuggestedWorktree(task.description);
219
+ if (suggested) {
220
+ const suggestedIndex = availableWorktrees.findIndex((worktree) => worktree.name === suggested);
221
+ if (suggestedIndex >= 0) {
222
+ return availableWorktrees.splice(suggestedIndex, 1)[0];
223
+ }
224
+ }
225
+
226
+ if (availableWorktrees.length === 0) return null;
227
+ return availableWorktrees.shift();
228
+ }
229
+
230
+ function buildLaunchEnv(repoRoot, task, lease, worktree) {
231
+ const taskSpec = task.task_spec || null;
232
+ const executionPolicy = taskSpec?.execution_policy || null;
233
+ return {
234
+ ...process.env,
235
+ SWITCHMAN_PIPELINE_ID: task.id.split('-').slice(0, -1).join('-'),
236
+ SWITCHMAN_TASK_ID: task.id,
237
+ SWITCHMAN_TASK_TITLE: task.title,
238
+ SWITCHMAN_TASK_TYPE: taskSpec?.task_type || '',
239
+ SWITCHMAN_TASK_SPEC: taskSpec ? JSON.stringify(taskSpec) : '',
240
+ SWITCHMAN_TASK_OUTPUT_PATH: taskSpec?.primary_output_path || '',
241
+ SWITCHMAN_TASK_TIMEOUT_MS: executionPolicy?.timeout_ms ? String(executionPolicy.timeout_ms) : '',
242
+ SWITCHMAN_TASK_MAX_RETRIES: Number.isInteger(executionPolicy?.max_retries) ? String(executionPolicy.max_retries) : '',
243
+ SWITCHMAN_TASK_RETRY_BACKOFF_MS: executionPolicy?.retry_backoff_ms ? String(executionPolicy.retry_backoff_ms) : '',
244
+ SWITCHMAN_LEASE_ID: lease.id,
245
+ SWITCHMAN_WORKTREE: worktree.name,
246
+ SWITCHMAN_WORKTREE_PATH: worktree.path,
247
+ SWITCHMAN_REPO_ROOT: repoRoot,
248
+ };
249
+ }
250
+
251
+ function getHeadRevision(worktreePath) {
252
+ const result = spawnSync('git', ['rev-parse', 'HEAD'], {
253
+ cwd: worktreePath,
254
+ encoding: 'utf8',
255
+ stdio: ['ignore', 'pipe', 'ignore'],
256
+ });
257
+ if (result.status !== 0) return null;
258
+ return String(result.stdout || '').trim() || null;
259
+ }
260
+
261
+ function getTaskRetryCount(db, taskId) {
262
+ return listAuditEvents(db, {
263
+ eventType: 'pipeline_task_retry_scheduled',
264
+ taskId,
265
+ limit: 1000,
266
+ }).length;
267
+ }
268
+
269
+ function resolveExecutionPolicy(taskSpec, defaults = {}) {
270
+ const policy = taskSpec?.execution_policy || {};
271
+ const timeoutMs = Number.isFinite(policy.timeout_ms) ? policy.timeout_ms : (defaults.timeoutMs ?? 0);
272
+ const maxRetries = Number.isInteger(policy.max_retries) ? policy.max_retries : (defaults.maxRetries ?? 1);
273
+ const retryBackoffMs = Number.isFinite(policy.retry_backoff_ms) ? policy.retry_backoff_ms : (defaults.retryBackoffMs ?? 0);
274
+
275
+ return {
276
+ timeout_ms: Math.max(0, timeoutMs),
277
+ max_retries: Math.max(0, maxRetries),
278
+ retry_backoff_ms: Math.max(0, retryBackoffMs),
279
+ };
280
+ }
281
+
282
+ function scheduleTaskRetry(db, { pipelineId, taskId, maxRetries, retryBackoffMs = 0 }) {
283
+ const retriesUsed = getTaskRetryCount(db, taskId);
284
+ if (retriesUsed >= maxRetries) {
285
+ return {
286
+ retried: false,
287
+ retry_attempt: retriesUsed,
288
+ retries_remaining: 0,
289
+ retry_delay_ms: 0,
290
+ };
291
+ }
292
+
293
+ const nextAttempt = retriesUsed + 1;
294
+ const delayMs = Math.max(0, retryBackoffMs * nextAttempt);
295
+ const task = retryTask(db, taskId, `retry attempt ${nextAttempt} of ${maxRetries}`);
296
+ if (!task) {
297
+ return {
298
+ retried: false,
299
+ retry_attempt: retriesUsed,
300
+ retries_remaining: Math.max(0, maxRetries - retriesUsed),
301
+ retry_delay_ms: 0,
302
+ };
303
+ }
304
+
305
+ logAuditEvent(db, {
306
+ eventType: 'pipeline_task_retry_scheduled',
307
+ status: 'warn',
308
+ taskId,
309
+ reasonCode: 'retry_scheduled',
310
+ details: JSON.stringify({
311
+ pipeline_id: pipelineId,
312
+ retry_attempt: nextAttempt,
313
+ max_retries: maxRetries,
314
+ retry_delay_ms: delayMs,
315
+ }),
316
+ });
317
+
318
+ if (delayMs > 0) sleepSync(delayMs);
319
+
320
+ return {
321
+ retried: true,
322
+ retry_attempt: nextAttempt,
323
+ retries_remaining: Math.max(0, maxRetries - nextAttempt),
324
+ retry_delay_ms: delayMs,
325
+ };
326
+ }
327
+
328
+ function resumeRetryablePipelineTasks(db, pipelineId, defaults = {}) {
329
+ const tasks = listTasks(db)
330
+ .filter((task) => task.id.startsWith(`${pipelineId}-`) && task.status === 'failed')
331
+ .map((task) => withTaskSpec(db, task))
332
+ .sort((a, b) => a.id.localeCompare(b.id));
333
+ const resumed = [];
334
+
335
+ for (const task of tasks) {
336
+ const executionPolicy = resolveExecutionPolicy(task.task_spec, defaults);
337
+ if (executionPolicy.max_retries <= 0) continue;
338
+ const retriesUsed = getTaskRetryCount(db, task.id);
339
+ if (retriesUsed >= executionPolicy.max_retries) continue;
340
+
341
+ const nextAttempt = retriesUsed + 1;
342
+ const resumedTask = retryTask(db, task.id, `resume retry attempt ${nextAttempt} of ${executionPolicy.max_retries}`);
343
+ if (!resumedTask) continue;
344
+
345
+ logAuditEvent(db, {
346
+ eventType: 'pipeline_task_retry_scheduled',
347
+ status: 'warn',
348
+ taskId: task.id,
349
+ reasonCode: 'retry_resumed',
350
+ details: JSON.stringify({
351
+ pipeline_id: pipelineId,
352
+ retry_attempt: nextAttempt,
353
+ max_retries: executionPolicy.max_retries,
354
+ resumed: true,
355
+ }),
356
+ });
357
+
358
+ resumed.push({
359
+ task_id: task.id,
360
+ retry_attempt: nextAttempt,
361
+ retries_remaining: Math.max(0, executionPolicy.max_retries - nextAttempt),
362
+ });
363
+ }
364
+
365
+ return resumed;
366
+ }
367
+
368
+ export function runPipeline(
369
+ db,
370
+ repoRoot,
371
+ {
372
+ pipelineId,
373
+ agentCommand = [],
374
+ agentName = 'pipeline-runner',
375
+ detached = false,
376
+ },
377
+ ) {
378
+ const allPipelineTasks = listTasks(db).filter((task) => task.id.startsWith(`${pipelineId}-`));
379
+ const taskStatusById = new Map(allPipelineTasks.map((task) => [task.id, task.status]));
380
+ const tasks = allPipelineTasks
381
+ .map((task) => withTaskSpec(db, task))
382
+ .filter((task) => task.status === 'pending')
383
+ .filter((task) => parseDependencies(task.description).every((dependencyId) => taskStatusById.get(dependencyId) === 'done'))
384
+ .sort((a, b) => a.id.localeCompare(b.id));
385
+ if (tasks.length === 0) {
386
+ throw new Error(`Pipeline ${pipelineId} has no pending tasks to run.`);
387
+ }
388
+
389
+ const activeLeaseWorktrees = new Set(listLeases(db, 'active').map((lease) => lease.worktree));
390
+ const availableWorktrees = listWorktrees(db)
391
+ .filter((worktree) => worktree.name !== 'main' && !activeLeaseWorktrees.has(worktree.name))
392
+ .sort((a, b) => a.name.localeCompare(b.name));
393
+
394
+ const assignments = [];
395
+ const launched = [];
396
+
397
+ for (const task of tasks) {
398
+ const worktree = chooseWorktree(task, availableWorktrees);
399
+ if (!worktree) break;
400
+
401
+ const lease = startTaskLease(db, task.id, worktree.name, agentName);
402
+ if (!lease) continue;
403
+
404
+ const assignment = {
405
+ task_id: task.id,
406
+ title: task.title,
407
+ task_spec: task.task_spec || null,
408
+ worktree: worktree.name,
409
+ worktree_path: worktree.path,
410
+ lease_id: lease.id,
411
+ };
412
+ assignments.push(assignment);
413
+
414
+ logAuditEvent(db, {
415
+ eventType: 'pipeline_task_dispatched',
416
+ status: 'allowed',
417
+ worktree: worktree.name,
418
+ taskId: task.id,
419
+ leaseId: lease.id,
420
+ details: JSON.stringify({ pipeline_id: pipelineId }),
421
+ });
422
+
423
+ if (agentCommand.length > 0) {
424
+ const [command, ...args] = agentCommand;
425
+ const child = spawn(command, args, {
426
+ cwd: worktree.path,
427
+ env: buildLaunchEnv(repoRoot, task, lease, worktree),
428
+ detached,
429
+ stdio: detached ? 'ignore' : 'inherit',
430
+ });
431
+ if (detached) child.unref();
432
+
433
+ launched.push({
434
+ ...assignment,
435
+ pid: child.pid,
436
+ command,
437
+ args,
438
+ });
439
+
440
+ logAuditEvent(db, {
441
+ eventType: 'pipeline_agent_launched',
442
+ status: 'allowed',
443
+ worktree: worktree.name,
444
+ taskId: task.id,
445
+ leaseId: lease.id,
446
+ details: JSON.stringify({
447
+ pipeline_id: pipelineId,
448
+ command,
449
+ args,
450
+ pid: child.pid,
451
+ }),
452
+ });
453
+ }
454
+ }
455
+
456
+ return {
457
+ pipeline_id: pipelineId,
458
+ assigned: assignments,
459
+ launched,
460
+ remaining_pending: tasks.length - assignments.length,
461
+ };
462
+ }
463
+
464
+ function runPipelineIteration(
465
+ db,
466
+ repoRoot,
467
+ {
468
+ pipelineId,
469
+ agentCommand = [],
470
+ agentName = 'pipeline-runner',
471
+ maxRetries = 1,
472
+ retryBackoffMs = 0,
473
+ timeoutMs = 0,
474
+ },
475
+ ) {
476
+ const dispatch = runPipeline(db, repoRoot, {
477
+ pipelineId,
478
+ agentCommand: [],
479
+ agentName,
480
+ detached: false,
481
+ });
482
+
483
+ const executed = [];
484
+
485
+ if (agentCommand.length > 0) {
486
+ for (const assignment of dispatch.assigned) {
487
+ const [command, ...args] = agentCommand;
488
+ const executionPolicy = resolveExecutionPolicy(assignment.task_spec, {
489
+ maxRetries,
490
+ retryBackoffMs,
491
+ timeoutMs,
492
+ });
493
+ const beforeHead = getHeadRevision(assignment.worktree_path);
494
+ const result = spawnSync(command, args, {
495
+ cwd: assignment.worktree_path,
496
+ env: buildLaunchEnv(
497
+ repoRoot,
498
+ { id: assignment.task_id, title: assignment.title, task_spec: assignment.task_spec },
499
+ { id: assignment.lease_id },
500
+ { name: assignment.worktree, path: assignment.worktree_path },
501
+ ),
502
+ encoding: 'utf8',
503
+ timeout: executionPolicy.timeout_ms > 0 ? executionPolicy.timeout_ms : undefined,
504
+ });
505
+ const afterHead = getHeadRevision(assignment.worktree_path);
506
+
507
+ const timedOut = result.error?.code === 'ETIMEDOUT';
508
+ const commandOk = !result.error && result.status === 0;
509
+ let evaluation = commandOk
510
+ ? evaluateTaskOutcome(db, repoRoot, { leaseId: assignment.lease_id })
511
+ : null;
512
+ if (commandOk && evaluation?.reason_code === 'no_changes_detected' && beforeHead && afterHead && beforeHead !== afterHead) {
513
+ evaluation = {
514
+ status: 'accepted',
515
+ reason_code: null,
516
+ changed_files: [],
517
+ claimed_files: [],
518
+ findings: ['task created a new commit with no remaining uncommitted diff'],
519
+ };
520
+ }
521
+ const ok = commandOk && evaluation?.status === 'accepted';
522
+ let retry = {
523
+ retried: false,
524
+ retry_attempt: getTaskRetryCount(db, assignment.task_id),
525
+ retries_remaining: Math.max(0, executionPolicy.max_retries - getTaskRetryCount(db, assignment.task_id)),
526
+ retry_delay_ms: 0,
527
+ };
528
+ if (ok) {
529
+ completeLeaseTask(db, assignment.lease_id);
530
+ } else {
531
+ const failureReason = !commandOk
532
+ ? (timedOut
533
+ ? `agent command timed out after ${executionPolicy.timeout_ms}ms`
534
+ : (result.error?.message || `agent command exited with status ${result.status}`))
535
+ : `${evaluation.reason_code}: ${evaluation.findings.join('; ')}`;
536
+ failLeaseTask(db, assignment.lease_id, failureReason);
537
+ retry = scheduleTaskRetry(db, {
538
+ pipelineId,
539
+ taskId: assignment.task_id,
540
+ maxRetries: executionPolicy.max_retries,
541
+ retryBackoffMs: executionPolicy.retry_backoff_ms,
542
+ });
543
+ }
544
+
545
+ executed.push({
546
+ ...assignment,
547
+ ok,
548
+ outcome_status: evaluation?.status ?? null,
549
+ outcome_reason_code: evaluation?.reason_code ?? null,
550
+ outcome_findings: evaluation?.findings ?? [],
551
+ retried: retry.retried,
552
+ retry_attempt: retry.retry_attempt,
553
+ retries_remaining: retry.retries_remaining,
554
+ retry_delay_ms: retry.retry_delay_ms,
555
+ execution_policy: executionPolicy,
556
+ timed_out: timedOut,
557
+ exit_code: result.status,
558
+ stdout: result.stdout || '',
559
+ stderr: result.stderr || '',
560
+ });
561
+
562
+ logAuditEvent(db, {
563
+ eventType: 'pipeline_task_executed',
564
+ status: ok ? 'allowed' : 'denied',
565
+ reasonCode: ok ? null : (timedOut ? 'task_execution_timeout' : 'agent_command_failed'),
566
+ worktree: assignment.worktree,
567
+ taskId: assignment.task_id,
568
+ leaseId: assignment.lease_id,
569
+ details: JSON.stringify({
570
+ pipeline_id: pipelineId,
571
+ command,
572
+ args,
573
+ exit_code: result.status,
574
+ timed_out: timedOut,
575
+ execution_policy: executionPolicy,
576
+ outcome_status: evaluation?.status ?? null,
577
+ outcome_reason_code: evaluation?.reason_code ?? null,
578
+ retried: retry.retried,
579
+ retry_attempt: retry.retry_attempt,
580
+ retries_remaining: retry.retries_remaining,
581
+ }),
582
+ });
583
+ }
584
+ }
585
+
586
+ return {
587
+ ...dispatch,
588
+ executed,
589
+ };
590
+ }
591
+
592
+ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
593
+ const status = getPipelineStatus(db, pipelineId);
594
+ const report = await scanAllWorktrees(db, repoRoot);
595
+ const aiGate = await runAiMergeGate(db, repoRoot);
596
+ const allLeases = listLeases(db);
597
+ const ciGateOk = report.conflicts.length === 0
598
+ && report.fileConflicts.length === 0
599
+ && (report.semanticConflicts?.length || 0) === 0
600
+ && report.unclaimedChanges.length === 0
601
+ && report.complianceSummary.non_compliant === 0
602
+ && report.complianceSummary.stale === 0
603
+ && (aiGate.dependency_invalidations || []).filter((item) => item.severity === 'blocked').length === 0;
604
+
605
+ const involvedWorktrees = [...new Set(status.tasks.map((task) => task.worktree).filter(Boolean))];
606
+ const worktreeChanges = involvedWorktrees.map((worktree) => ({
607
+ worktree,
608
+ files: report.fileMap?.[worktree] ?? [],
609
+ }));
610
+ const completedTasks = status.tasks.filter((task) => task.status === 'done');
611
+ const remainingTasks = status.tasks.filter((task) => task.status !== 'done');
612
+ const completedLeaseByTask = new Map();
613
+ for (const lease of allLeases) {
614
+ if (lease.status !== 'completed') continue;
615
+ if (!completedLeaseByTask.has(lease.task_id)) {
616
+ completedLeaseByTask.set(lease.task_id, lease);
617
+ }
618
+ }
619
+ const provenance = completedTasks.map((task) => ({
620
+ task_id: task.id,
621
+ lease_id: completedLeaseByTask.get(task.id)?.id || null,
622
+ title: task.title,
623
+ task_type: task.task_spec?.task_type || null,
624
+ risk_level: task.task_spec?.risk_level || null,
625
+ worktree: task.worktree || task.suggested_worktree || null,
626
+ agent: completedLeaseByTask.get(task.id)?.agent || null,
627
+ subsystem_tags: task.task_spec?.subsystem_tags || [],
628
+ required_deliverables: task.task_spec?.required_deliverables || [],
629
+ }));
630
+ const changedFiles = uniq(worktreeChanges.flatMap((entry) => entry.files));
631
+ const subsystemTags = uniq(completedTasks.flatMap((task) => task.task_spec?.subsystem_tags || []));
632
+ const riskNotes = [];
633
+ if (!ciGateOk) riskNotes.push('Repo gate is blocked by conflicts, unmanaged changes, or stale worktrees.');
634
+ if (aiGate.status !== 'pass') riskNotes.push(aiGate.summary);
635
+ if ((aiGate.dependency_invalidations || []).length > 0) {
636
+ riskNotes.push('Some completed work is stale and needs revalidation after a shared boundary changed.');
637
+ }
638
+ if ((report.semanticConflicts?.length || 0) > 0) {
639
+ riskNotes.push('Semantic overlap was detected between changed exported objects across worktrees.');
640
+ }
641
+ if (completedTasks.some((task) => task.task_spec?.risk_level === 'high')) {
642
+ riskNotes.push('High-risk work is included in this PR and should receive explicit reviewer attention.');
643
+ }
644
+ if (changedFiles.some((file) => /(^|\/)(auth|payments|db|migrations?|schema|config)(\/|$)/i.test(file))) {
645
+ riskNotes.push('Changed files touch sensitive areas such as auth, payments, schema, or config.');
646
+ }
647
+ const reviewerChecklist = [
648
+ ciGateOk ? 'Repo gate passed' : 'Resolve repo gate failures before merge',
649
+ aiGate.status === 'pass' ? 'AI merge gate passed' : `Review AI merge gate findings: ${aiGate.summary}`,
650
+ completedTasks.some((task) => task.task_spec?.risk_level === 'high')
651
+ ? 'Confirm high-risk tasks have the expected tests and docs'
652
+ : 'Review changed files and task outcomes',
653
+ ];
654
+ const prTitle = status.title.startsWith('Implement:')
655
+ ? status.title.replace(/^Implement:\s*/i, '')
656
+ : status.title;
657
+ const prBody = [
658
+ '## Summary',
659
+ ...(completedTasks.length > 0
660
+ ? completedTasks.map((task) => `- ${task.title}`)
661
+ : ['- No completed tasks yet']),
662
+ '',
663
+ '## Validation',
664
+ `- Repo gate: ${ciGateOk ? 'pass' : 'blocked'}`,
665
+ `- AI merge gate: ${aiGate.status}`,
666
+ '',
667
+ '## Reviewer Checklist',
668
+ ...reviewerChecklist.map((item) => `- ${item}`),
669
+ '',
670
+ '## Provenance',
671
+ ...(provenance.length > 0
672
+ ? provenance.map((entry) => `- ${entry.task_id} (${entry.task_type || 'unknown'}) via ${entry.worktree || 'unassigned'} lease ${entry.lease_id || 'none'}`)
673
+ : ['- No completed task provenance yet']),
674
+ ].join('\n');
675
+ const ready = status.counts.failed === 0
676
+ && status.counts.pending === 0
677
+ && status.counts.in_progress === 0
678
+ && status.counts.done > 0
679
+ && ciGateOk
680
+ && aiGate.status !== 'blocked';
681
+
682
+ const markdown = [
683
+ `# PR Summary: ${status.title}`,
684
+ '',
685
+ `- Pipeline: \`${pipelineId}\``,
686
+ `- Task status: ${status.counts.done} done, ${status.counts.in_progress} in progress, ${status.counts.pending} pending, ${status.counts.failed} failed`,
687
+ `- CI gate: ${ciGateOk ? 'pass' : 'blocked'}`,
688
+ `- AI merge gate: ${aiGate.status}`,
689
+ '',
690
+ '## Completed Tasks',
691
+ ...completedTasks.map((task) => `- ${task.title}`),
692
+ ...(completedTasks.length === 0 ? ['- None yet'] : []),
693
+ '',
694
+ '## Remaining Tasks',
695
+ ...remainingTasks.map((task) => `- [${task.status}] ${task.title}`),
696
+ ...(remainingTasks.length === 0 ? ['- None'] : []),
697
+ '',
698
+ '## Worktree Changes',
699
+ ...worktreeChanges.map((entry) => `- ${entry.worktree}: ${entry.files.length ? entry.files.join(', ') : 'no active changes'}`),
700
+ ...(worktreeChanges.length === 0 ? ['- No active worktree assignments yet'] : []),
701
+ '',
702
+ '## Reviewer Notes',
703
+ ...reviewerChecklist.map((item) => `- ${item}`),
704
+ '',
705
+ '## Provenance',
706
+ ...provenance.map((entry) => `- ${entry.task_id}: ${entry.title} (${entry.task_type || 'unknown'}, ${entry.worktree || 'unassigned'}, lease ${entry.lease_id || 'none'})`),
707
+ ...(provenance.length === 0 ? ['- No completed task provenance yet'] : []),
708
+ '',
709
+ '## Gate Notes',
710
+ `- Repo gate summary: ${ciGateOk ? 'clear' : 'blocked by conflicts or unmanaged changes'}`,
711
+ `- AI merge summary: ${aiGate.summary}`,
712
+ ...(riskNotes.length > 0 ? ['', '## Risk Notes', ...riskNotes.map((note) => `- ${note}`)] : []),
713
+ ].join('\n');
714
+
715
+ logAuditEvent(db, {
716
+ eventType: 'pipeline_pr_summary',
717
+ status: ready ? 'allowed' : 'warn',
718
+ reasonCode: ready ? null : 'pipeline_not_ready',
719
+ details: JSON.stringify({
720
+ pipeline_id: pipelineId,
721
+ ready,
722
+ ci_gate_ok: ciGateOk,
723
+ ai_gate_status: aiGate.status,
724
+ }),
725
+ });
726
+
727
+ return {
728
+ ready,
729
+ pipeline_id: pipelineId,
730
+ title: status.title,
731
+ pr_artifact: {
732
+ title: prTitle,
733
+ body: prBody,
734
+ reviewer_checklist: reviewerChecklist,
735
+ provenance,
736
+ risk_notes: riskNotes,
737
+ changed_files: changedFiles,
738
+ subsystem_tags: subsystemTags,
739
+ },
740
+ counts: status.counts,
741
+ ci_gate: {
742
+ ok: ciGateOk,
743
+ summary: ciGateOk ? 'Repo gate passed.' : 'Repo gate blocked by conflicts, unmanaged changes, or stale worktrees.',
744
+ },
745
+ ai_gate: {
746
+ ok: aiGate.status !== 'blocked',
747
+ status: aiGate.status,
748
+ summary: aiGate.summary,
749
+ },
750
+ worktree_changes: worktreeChanges,
751
+ markdown,
752
+ };
753
+ }
754
+
755
+ export async function exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir = null) {
756
+ const summary = await buildPipelinePrSummary(db, repoRoot, pipelineId);
757
+ const bundleDir = outputDir || join(repoRoot, '.switchman', 'pipelines', pipelineId);
758
+ mkdirSync(bundleDir, { recursive: true });
759
+
760
+ const summaryJsonPath = join(bundleDir, 'pr-summary.json');
761
+ const summaryMarkdownPath = join(bundleDir, 'pr-summary.md');
762
+ const prBodyPath = join(bundleDir, 'pr-body.md');
763
+
764
+ writeFileSync(summaryJsonPath, `${JSON.stringify(summary, null, 2)}\n`);
765
+ writeFileSync(summaryMarkdownPath, `${summary.markdown}\n`);
766
+ writeFileSync(prBodyPath, `${summary.pr_artifact.body}\n`);
767
+
768
+ logAuditEvent(db, {
769
+ eventType: 'pipeline_pr_bundle_exported',
770
+ status: 'allowed',
771
+ reasonCode: null,
772
+ details: JSON.stringify({
773
+ pipeline_id: pipelineId,
774
+ output_dir: bundleDir,
775
+ files: [summaryJsonPath, summaryMarkdownPath, prBodyPath],
776
+ }),
777
+ });
778
+
779
+ return {
780
+ pipeline_id: pipelineId,
781
+ output_dir: bundleDir,
782
+ files: {
783
+ summary_json: summaryJsonPath,
784
+ summary_markdown: summaryMarkdownPath,
785
+ pr_body_markdown: prBodyPath,
786
+ },
787
+ summary,
788
+ };
789
+ }
790
+
791
+ function resolvePipelineHeadBranch(db, repoRoot, pipelineStatus, explicitHeadBranch = null) {
792
+ if (explicitHeadBranch) return explicitHeadBranch;
793
+
794
+ const worktreesByName = new Map(listWorktrees(db).map((worktree) => [worktree.name, worktree]));
795
+ const resolveBranchForTask = (task) => {
796
+ const worktreeName = task.worktree || task.suggested_worktree || null;
797
+ const branch = worktreeName ? worktreesByName.get(worktreeName)?.branch || null : null;
798
+ return branch && branch !== 'main' && branch !== 'unknown' ? branch : null;
799
+ };
800
+
801
+ const implementationBranches = uniq(
802
+ pipelineStatus.tasks
803
+ .filter((task) => task.task_spec?.task_type === 'implementation')
804
+ .map(resolveBranchForTask)
805
+ .filter(Boolean),
806
+ );
807
+ if (implementationBranches.length === 1) {
808
+ return implementationBranches[0];
809
+ }
810
+
811
+ const candidateBranches = uniq(
812
+ pipelineStatus.tasks
813
+ .map(resolveBranchForTask)
814
+ .filter(Boolean),
815
+ );
816
+
817
+ if (candidateBranches.length === 1) {
818
+ return candidateBranches[0];
819
+ }
820
+
821
+ const currentBranch = getWorktreeBranch(repoRoot);
822
+ if (currentBranch && currentBranch !== 'main') {
823
+ return currentBranch;
824
+ }
825
+
826
+ return null;
827
+ }
828
+
829
+ export async function publishPipelinePr(
830
+ db,
831
+ repoRoot,
832
+ pipelineId,
833
+ {
834
+ baseBranch = 'main',
835
+ headBranch = null,
836
+ draft = false,
837
+ ghCommand = 'gh',
838
+ outputDir = null,
839
+ } = {},
840
+ ) {
841
+ const bundle = await exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir);
842
+ const status = getPipelineStatus(db, pipelineId);
843
+ const resolvedHeadBranch = resolvePipelineHeadBranch(db, repoRoot, status, headBranch);
844
+
845
+ if (!resolvedHeadBranch) {
846
+ throw new Error(`Could not determine a head branch for pipeline ${pipelineId}. Pass --head <branch>.`);
847
+ }
848
+
849
+ const args = [
850
+ 'pr',
851
+ 'create',
852
+ '--base',
853
+ baseBranch,
854
+ '--head',
855
+ resolvedHeadBranch,
856
+ '--title',
857
+ bundle.summary.pr_artifact.title,
858
+ '--body-file',
859
+ bundle.files.pr_body_markdown,
860
+ ];
861
+
862
+ if (draft) {
863
+ args.push('--draft');
864
+ }
865
+
866
+ const result = spawnSync(ghCommand, args, {
867
+ cwd: repoRoot,
868
+ encoding: 'utf8',
869
+ });
870
+
871
+ const ok = !result.error && result.status === 0;
872
+ const output = `${result.stdout || ''}${result.stderr || ''}`.trim();
873
+
874
+ logAuditEvent(db, {
875
+ eventType: 'pipeline_pr_published',
876
+ status: ok ? 'allowed' : 'denied',
877
+ reasonCode: ok ? null : 'pr_publish_failed',
878
+ details: JSON.stringify({
879
+ pipeline_id: pipelineId,
880
+ base_branch: baseBranch,
881
+ head_branch: resolvedHeadBranch,
882
+ gh_command: ghCommand,
883
+ draft,
884
+ exit_code: result.status,
885
+ output: output.slice(0, 500),
886
+ }),
887
+ });
888
+
889
+ if (!ok) {
890
+ throw new Error(result.error?.message || output || `gh pr create failed with status ${result.status}`);
891
+ }
892
+
893
+ return {
894
+ pipeline_id: pipelineId,
895
+ base_branch: baseBranch,
896
+ head_branch: resolvedHeadBranch,
897
+ draft,
898
+ bundle,
899
+ output,
900
+ };
901
+ }
902
+
903
+ export async function createPipelineFollowupTasks(db, repoRoot, pipelineId) {
904
+ const status = getPipelineStatus(db, pipelineId);
905
+ const report = await scanAllWorktrees(db, repoRoot);
906
+ const aiGate = await runAiMergeGate(db, repoRoot);
907
+ const existingTitles = new Set(status.tasks.map((task) => task.title));
908
+ const hasPlannedTestsTask = status.tasks.some((task) =>
909
+ task.task_spec?.task_type === 'tests' && !task.title.startsWith('Add missing tests'),
910
+ );
911
+ const hasGovernanceTask = status.tasks.some((task) => task.task_spec?.task_type === 'governance');
912
+ const created = [];
913
+ const pipelineTasks = [...status.tasks];
914
+
915
+ function maybeCreateTask(title, description) {
916
+ if (existingTitles.has(title)) return;
917
+ const taskId = nextPipelineTaskId(pipelineTasks, pipelineId);
918
+ const taskSpec = buildTaskSpec({
919
+ pipelineId,
920
+ taskId,
921
+ title,
922
+ issueTitle: status.title,
923
+ issueDescription: status.description,
924
+ dependencies: [],
925
+ });
926
+ createTask(db, {
927
+ id: taskId,
928
+ title,
929
+ description: [
930
+ `[Pipeline ${pipelineId}]`,
931
+ description,
932
+ ].filter(Boolean).join('\n'),
933
+ priority: status.priority,
934
+ });
935
+ upsertTaskSpec(db, taskId, taskSpec);
936
+ pipelineTasks.push({ id: taskId, title, task_spec: taskSpec });
937
+ existingTitles.add(title);
938
+ created.push({ id: taskId, title, description, task_spec: taskSpec });
939
+ }
940
+
941
+ for (const entry of report.unclaimedChanges) {
942
+ maybeCreateTask(
943
+ `Govern unmanaged changes in ${entry.worktree}`,
944
+ `Files: ${entry.files.join(', ')}\nReasons: ${entry.reasons.map((reason) => `${reason.file}:${reason.reason_code}`).join(', ')}`,
945
+ );
946
+ }
947
+
948
+ for (const conflict of report.conflicts) {
949
+ maybeCreateTask(
950
+ `Resolve merge conflict between ${conflict.worktreeA} and ${conflict.worktreeB}`,
951
+ `Conflicting files: ${conflict.conflictingFiles.join(', ')}`,
952
+ );
953
+ }
954
+
955
+ if (!hasGovernanceTask && aiGate.status === 'blocked') {
956
+ const blockedPairs = aiGate.pairs.filter((item) => item.status !== 'pass');
957
+ if (blockedPairs.length > 0) {
958
+ maybeCreateTask(
959
+ 'Review blocked AI merge findings',
960
+ blockedPairs
961
+ .map((pair) => `${pair.worktree_a} <-> ${pair.worktree_b}\n${pair.reasons.join('\n')}`)
962
+ .join('\n\n'),
963
+ );
964
+ }
965
+ }
966
+
967
+ for (const worktree of aiGate.worktrees) {
968
+ if (!hasPlannedTestsTask && worktree.findings.includes('source changes without corresponding test updates')) {
969
+ maybeCreateTask(
970
+ `Add missing tests for ${worktree.worktree}`,
971
+ `Source files without test updates: ${worktree.source_files.join(', ')}`,
972
+ );
973
+ }
974
+ }
975
+
976
+ logAuditEvent(db, {
977
+ eventType: 'pipeline_followups_created',
978
+ status: created.length > 0 ? 'allowed' : 'info',
979
+ reasonCode: created.length > 0 ? null : 'no_followups_needed',
980
+ details: JSON.stringify({
981
+ pipeline_id: pipelineId,
982
+ created_count: created.length,
983
+ ai_gate_status: aiGate.status,
984
+ }),
985
+ });
986
+
987
+ return {
988
+ pipeline_id: pipelineId,
989
+ created,
990
+ created_count: created.length,
991
+ ai_gate_status: aiGate.status,
992
+ ci_gate_ok: report.conflicts.length === 0
993
+ && report.fileConflicts.length === 0
994
+ && report.unclaimedChanges.length === 0
995
+ && report.complianceSummary.non_compliant === 0
996
+ && report.complianceSummary.stale === 0,
997
+ };
998
+ }
999
+
1000
+ export async function executePipeline(
1001
+ db,
1002
+ repoRoot,
1003
+ {
1004
+ pipelineId,
1005
+ agentCommand = [],
1006
+ agentName = 'pipeline-runner',
1007
+ maxIterations = 3,
1008
+ maxRetries = 1,
1009
+ retryBackoffMs = 0,
1010
+ timeoutMs = 0,
1011
+ },
1012
+ ) {
1013
+ const iterations = [];
1014
+
1015
+ for (let iteration = 1; iteration <= maxIterations; iteration++) {
1016
+ const resumed = resumeRetryablePipelineTasks(db, pipelineId, {
1017
+ maxRetries,
1018
+ retryBackoffMs,
1019
+ timeoutMs,
1020
+ });
1021
+ const before = getPipelineStatus(db, pipelineId);
1022
+ const run = before.counts.pending > 0
1023
+ ? runPipelineIteration(db, repoRoot, {
1024
+ pipelineId,
1025
+ agentCommand,
1026
+ agentName,
1027
+ maxRetries,
1028
+ retryBackoffMs,
1029
+ timeoutMs,
1030
+ })
1031
+ : {
1032
+ pipeline_id: pipelineId,
1033
+ assigned: [],
1034
+ launched: [],
1035
+ executed: [],
1036
+ remaining_pending: 0,
1037
+ };
1038
+ const followups = await createPipelineFollowupTasks(db, repoRoot, pipelineId);
1039
+ const pr = await buildPipelinePrSummary(db, repoRoot, pipelineId);
1040
+ const after = getPipelineStatus(db, pipelineId);
1041
+
1042
+ const record = {
1043
+ iteration,
1044
+ before: before.counts,
1045
+ resumed_retries: resumed.length,
1046
+ dispatched: run.assigned.length,
1047
+ executed: run.executed.length,
1048
+ executed_failures: run.executed.filter((item) => !item.ok).length,
1049
+ retries_scheduled: run.executed.filter((item) => item.retried).length,
1050
+ followups_created: followups.created_count,
1051
+ ready: pr.ready,
1052
+ ai_gate_status: pr.ai_gate.status,
1053
+ after: after.counts,
1054
+ };
1055
+ iterations.push(record);
1056
+
1057
+ if (pr.ready) {
1058
+ logAuditEvent(db, {
1059
+ eventType: 'pipeline_exec',
1060
+ status: 'allowed',
1061
+ details: JSON.stringify({
1062
+ pipeline_id: pipelineId,
1063
+ outcome: 'ready',
1064
+ iteration,
1065
+ }),
1066
+ });
1067
+ return {
1068
+ pipeline_id: pipelineId,
1069
+ status: 'ready',
1070
+ iterations,
1071
+ pr,
1072
+ };
1073
+ }
1074
+
1075
+ if (run.assigned.length === 0 && resumed.length === 0 && followups.created_count === 0) {
1076
+ logAuditEvent(db, {
1077
+ eventType: 'pipeline_exec',
1078
+ status: 'warn',
1079
+ reasonCode: 'pipeline_blocked',
1080
+ details: JSON.stringify({
1081
+ pipeline_id: pipelineId,
1082
+ outcome: 'blocked',
1083
+ iteration,
1084
+ }),
1085
+ });
1086
+ return {
1087
+ pipeline_id: pipelineId,
1088
+ status: 'blocked',
1089
+ iterations,
1090
+ pr,
1091
+ };
1092
+ }
1093
+ }
1094
+
1095
+ const pr = await buildPipelinePrSummary(db, repoRoot, pipelineId);
1096
+ logAuditEvent(db, {
1097
+ eventType: 'pipeline_exec',
1098
+ status: 'warn',
1099
+ reasonCode: 'max_iterations_reached',
1100
+ details: JSON.stringify({
1101
+ pipeline_id: pipelineId,
1102
+ outcome: 'max_iterations',
1103
+ iterations: maxIterations,
1104
+ }),
1105
+ });
1106
+
1107
+ return {
1108
+ pipeline_id: pipelineId,
1109
+ status: 'max_iterations',
1110
+ iterations,
1111
+ pr,
1112
+ };
1113
+ }