switchman-dev 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,15 @@
1
1
  import { spawn, spawnSync } from 'child_process';
2
- import { mkdirSync, writeFileSync } from 'fs';
3
- import { join } from 'path';
2
+ import { existsSync, mkdirSync, realpathSync, writeFileSync } from 'fs';
3
+ import { tmpdir } from 'os';
4
+ import { basename, join } from 'path';
4
5
 
5
- import { completeLeaseTask, createTask, failLeaseTask, getTaskSpec, listAuditEvents, listLeases, listTasks, listWorktrees, logAuditEvent, retryTask, startTaskLease, upsertTaskSpec } from './db.js';
6
+ import { completeLeaseTask, createTask, createTempResource, failLeaseTask, finishOperationJournalEntry, getTaskSpec, listAuditEvents, listBoundaryValidationStates, listDependencyInvalidations, listLeases, listMergeQueue, listPolicyOverrides, listTasks, listTempResources, listWorktrees, logAuditEvent, retryTask, startOperationJournalEntry, startTaskLease, updateTempResource, upsertTaskSpec } from './db.js';
6
7
  import { scanAllWorktrees } from './detector.js';
7
8
  import { runAiMergeGate } from './merge-gate.js';
8
9
  import { evaluateTaskOutcome } from './outcome.js';
10
+ import { loadChangePolicy } from './policy.js';
9
11
  import { buildTaskSpec, planPipelineTasks } from './planner.js';
10
- import { getWorktreeBranch } from './git.js';
12
+ import { getWorktreeBranch, gitBranchExists, gitMaterializeIntegrationBranch, gitPrepareIntegrationRecoveryWorktree, gitRemoveWorktree, gitRevParse, listGitWorktrees } from './git.js';
11
13
 
12
14
  function sleepSync(ms) {
13
15
  if (ms > 0) {
@@ -37,6 +39,10 @@ function parseDependencies(description) {
37
39
  .filter(Boolean);
38
40
  }
39
41
 
42
+ function stringifyResourceDetails(details) {
43
+ return JSON.stringify(details);
44
+ }
45
+
40
46
  function getPipelineMetadata(db, pipelineId) {
41
47
  const events = listAuditEvents(db, { eventType: 'pipeline_created', limit: 500 });
42
48
  for (const event of events) {
@@ -52,6 +58,276 @@ function getPipelineMetadata(db, pipelineId) {
52
58
  return null;
53
59
  }
54
60
 
61
+ function parseAuditDetails(details) {
62
+ try {
63
+ return JSON.parse(details || '{}');
64
+ } catch {
65
+ return {};
66
+ }
67
+ }
68
+
69
+ function pipelineOwnsAuditEvent(event, pipelineId) {
70
+ if (event.task_id?.startsWith(`${pipelineId}-`)) return true;
71
+ const details = parseAuditDetails(event.details);
72
+ if (details.pipeline_id === pipelineId) return true;
73
+ if (details.source_pipeline_id === pipelineId) return true;
74
+ if (Array.isArray(details.task_ids) && details.task_ids.some((taskId) => String(taskId).startsWith(`${pipelineId}-`))) {
75
+ return true;
76
+ }
77
+ return false;
78
+ }
79
+
80
+ function buildPipelineTrustAudit(db, pipelineId, { limit = 8 } = {}) {
81
+ const interestingEventTypes = new Set([
82
+ 'boundary_validation_state',
83
+ 'dependency_invalidations_updated',
84
+ 'pipeline_followups_created',
85
+ 'policy_override_created',
86
+ 'policy_override_revoked',
87
+ 'task_retried',
88
+ 'pipeline_task_retry_scheduled',
89
+ ]);
90
+
91
+ const events = listAuditEvents(db, { limit: 2000 })
92
+ .filter((event) => interestingEventTypes.has(event.event_type))
93
+ .filter((event) => pipelineOwnsAuditEvent(event, pipelineId))
94
+ .slice(0, limit);
95
+
96
+ const mappedEvents = events.map((event) => {
97
+ const details = parseAuditDetails(event.details);
98
+
99
+ switch (event.event_type) {
100
+ case 'boundary_validation_state': {
101
+ const missing = details.missing_task_types || [];
102
+ const summary = details.status === 'satisfied'
103
+ ? 'Policy validation requirements are currently satisfied.'
104
+ : missing.length > 0
105
+ ? `Policy validation is waiting on ${missing.join(', ')}.`
106
+ : `Policy validation state changed to ${details.status || 'pending'}.`;
107
+ return {
108
+ category: 'policy',
109
+ created_at: event.created_at,
110
+ event_type: event.event_type,
111
+ status: event.status,
112
+ reason_code: event.reason_code || null,
113
+ summary,
114
+ missing_task_types: missing,
115
+ next_action: missing.length > 0 ? `switchman pipeline review ${pipelineId}` : `switchman pipeline status ${pipelineId}`,
116
+ };
117
+ }
118
+ case 'dependency_invalidations_updated':
119
+ {
120
+ const reasonTypes = details.reason_types || [];
121
+ const revalidationSets = details.revalidation_sets || [];
122
+ const reasonSummary = revalidationSets.length > 0
123
+ ? ` across ${revalidationSets.join(', ')} revalidation`
124
+ : reasonTypes.length > 0
125
+ ? ` across ${reasonTypes.join(', ')}`
126
+ : '';
127
+ return {
128
+ category: 'stale',
129
+ created_at: event.created_at,
130
+ event_type: event.event_type,
131
+ status: event.status,
132
+ reason_code: event.reason_code || null,
133
+ summary: `A shared change marked ${details.stale_count || 0} dependent task${details.stale_count === 1 ? '' : 's'} stale${reasonSummary}.`,
134
+ affected_task_ids: details.affected_task_ids || [],
135
+ next_action: `switchman explain stale --pipeline ${pipelineId}`,
136
+ };
137
+ }
138
+ case 'pipeline_followups_created':
139
+ return {
140
+ category: 'remediation',
141
+ created_at: event.created_at,
142
+ event_type: event.event_type,
143
+ status: event.status,
144
+ reason_code: event.reason_code || null,
145
+ summary: details.created_count > 0
146
+ ? `Created ${details.created_count} follow-up task${details.created_count === 1 ? '' : 's'} to satisfy validation or policy work.`
147
+ : 'No new follow-up tasks were needed after review.',
148
+ created_count: details.created_count || 0,
149
+ next_action: details.created_count > 0 ? `switchman pipeline status ${pipelineId}` : `switchman pipeline status ${pipelineId}`,
150
+ };
151
+ case 'policy_override_created':
152
+ return {
153
+ category: 'policy',
154
+ created_at: event.created_at,
155
+ event_type: event.event_type,
156
+ status: event.status,
157
+ reason_code: event.reason_code || null,
158
+ summary: `Policy override ${details.override_id || 'unknown'} was approved${details.task_types?.length ? ` for ${details.task_types.join(', ')}` : ''}${details.reason ? `: ${details.reason}` : ''}.`,
159
+ next_action: `switchman pipeline status ${pipelineId}`,
160
+ };
161
+ case 'policy_override_revoked':
162
+ return {
163
+ category: 'policy',
164
+ created_at: event.created_at,
165
+ event_type: event.event_type,
166
+ status: event.status,
167
+ reason_code: event.reason_code || null,
168
+ summary: `Policy override ${details.override_id || 'unknown'} was revoked${details.revoked_reason ? `: ${details.revoked_reason}` : ''}.`,
169
+ next_action: `switchman pipeline status ${pipelineId}`,
170
+ };
171
+ case 'task_retried':
172
+ case 'pipeline_task_retry_scheduled':
173
+ return {
174
+ category: 'remediation',
175
+ created_at: event.created_at,
176
+ event_type: event.event_type,
177
+ status: event.status,
178
+ reason_code: event.reason_code || null,
179
+ summary: `Scheduled revalidation for ${event.task_id || 'a pipeline task'}.`,
180
+ task_id: event.task_id || null,
181
+ next_action: `switchman pipeline status ${pipelineId}`,
182
+ };
183
+ default:
184
+ return {
185
+ category: 'trust',
186
+ created_at: event.created_at,
187
+ event_type: event.event_type,
188
+ status: event.status,
189
+ reason_code: event.reason_code || null,
190
+ summary: event.event_type,
191
+ next_action: `switchman pipeline status ${pipelineId}`,
192
+ };
193
+ }
194
+ });
195
+
196
+ const staleClusterEntries = buildStaleClusters(listDependencyInvalidations(db, { pipelineId }), { pipelineId })
197
+ .map((cluster) => ({
198
+ category: 'stale',
199
+ created_at: cluster.invalidations[0]?.created_at || null,
200
+ event_type: 'stale_cluster_active',
201
+ status: cluster.severity === 'block' ? 'denied' : 'warn',
202
+ reason_code: 'dependent_work_stale',
203
+ summary: `${cluster.title}.`,
204
+ affected_task_ids: cluster.affected_task_ids,
205
+ next_action: cluster.next_action,
206
+ }));
207
+
208
+ const staleWaveEntries = summarizeStaleCausalWaves(
209
+ buildStaleClusters(listDependencyInvalidations(db, { pipelineId }), { pipelineId }),
210
+ ).map((wave) => ({
211
+ category: 'stale',
212
+ created_at: null,
213
+ event_type: 'stale_wave_active',
214
+ status: 'warn',
215
+ reason_code: 'dependent_work_stale',
216
+ summary: `${wave.summary}. Affects ${wave.affected_pipeline_ids.join(', ') || 'unknown'} across ${wave.cluster_count} stale cluster${wave.cluster_count === 1 ? '' : 's'}.`,
217
+ affected_pipeline_ids: wave.affected_pipeline_ids,
218
+ next_action: `switchman explain stale --pipeline ${pipelineId}`,
219
+ }));
220
+
221
+ return [...mappedEvents, ...staleWaveEntries, ...staleClusterEntries]
222
+ .sort((a, b) => String(b.created_at || '').localeCompare(String(a.created_at || '')))
223
+ .slice(0, limit);
224
+ }
225
+
226
+ function buildPipelinePolicyEvidence(status, requiredTaskTypes = [], { completedLeaseByTask = new Map(), completionAuditByTask = new Map() } = {}) {
227
+ const completedTasks = (status.tasks || []).filter((task) => task.status === 'done' && task.task_spec?.task_type);
228
+ const pipelineId = status.pipeline_id;
229
+ const evidenceObjects = [];
230
+ const evidenceByTaskType = {};
231
+
232
+ for (const taskType of requiredTaskTypes) {
233
+ evidenceByTaskType[taskType] = completedTasks
234
+ .filter((task) => task.task_spec?.task_type === taskType)
235
+ .map((task) => {
236
+ const lease = completedLeaseByTask.get(task.id) || null;
237
+ const auditEvent = completionAuditByTask.get(task.id) || null;
238
+ const evidence = {
239
+ evidence_id: `policy-evidence:${pipelineId}:${taskType}:${task.id}`,
240
+ schema_version: 1,
241
+ requirement_key: `completed_task_type:${taskType}`,
242
+ requirement_type: 'completed_task_type',
243
+ pipeline_id: pipelineId,
244
+ task_id: task.id,
245
+ title: task.title,
246
+ task_type: task.task_spec?.task_type || null,
247
+ worktree: task.worktree || task.suggested_worktree || null,
248
+ artifact_path: task.task_spec?.primary_output_path || null,
249
+ subsystem_tags: task.task_spec?.subsystem_tags || [],
250
+ satisfied_at: task.completed_at || auditEvent?.created_at || null,
251
+ satisfied_by: {
252
+ actor_type: lease?.agent ? 'agent' : 'task_completion',
253
+ agent: lease?.agent || null,
254
+ worktree: lease?.worktree || task.worktree || task.suggested_worktree || null,
255
+ lease_id: lease?.id || null,
256
+ audit_event_id: auditEvent?.id || null,
257
+ audit_event_type: auditEvent?.event_type || null,
258
+ },
259
+ };
260
+ evidenceObjects.push(evidence);
261
+ return evidence;
262
+ });
263
+ }
264
+
265
+ return {
266
+ evidence_by_task_type: evidenceByTaskType,
267
+ evidence_objects: evidenceObjects,
268
+ };
269
+ }
270
+
271
+ function buildPipelinePolicySatisfactionHistory(policyState) {
272
+ return (policyState.evidence_objects || [])
273
+ .map((evidence) => ({
274
+ category: 'policy',
275
+ created_at: evidence.satisfied_at || null,
276
+ event_type: 'policy_requirement_satisfied',
277
+ status: 'allowed',
278
+ reason_code: null,
279
+ summary: `Policy requirement ${evidence.requirement_key} is satisfied by ${evidence.task_id}.`,
280
+ requirement_key: evidence.requirement_key,
281
+ evidence_id: evidence.evidence_id,
282
+ task_id: evidence.task_id,
283
+ satisfied_by: evidence.satisfied_by,
284
+ next_action: `switchman pipeline status ${policyState.pipeline_id || '<pipelineId>'}`,
285
+ }))
286
+ .sort((a, b) => String(b.created_at || '').localeCompare(String(a.created_at || '')));
287
+ }
288
+
289
+ function buildPipelinePolicyOverrideState(db, pipelineId, requiredTaskTypes = []) {
290
+ if (!db || !pipelineId) {
291
+ return {
292
+ active_overrides: [],
293
+ active_task_types: [],
294
+ active_requirement_keys: [],
295
+ override_history: [],
296
+ };
297
+ }
298
+
299
+ const overrides = listPolicyOverrides(db, { pipelineId, limit: 200 });
300
+ const activeOverrides = overrides.filter((entry) => entry.status === 'active');
301
+ const activeTaskTypes = uniq(activeOverrides.flatMap((entry) => entry.task_types || []));
302
+ const activeRequirementKeys = uniq(activeOverrides.flatMap((entry) => entry.requirement_keys || []));
303
+
304
+ const overrideHistory = overrides.map((entry) => ({
305
+ category: 'policy',
306
+ created_at: entry.status === 'active' ? entry.created_at : (entry.revoked_at || entry.created_at),
307
+ event_type: entry.status === 'active' ? 'policy_override_active' : 'policy_override_revoked',
308
+ status: entry.status === 'active' ? 'warn' : 'info',
309
+ reason_code: entry.status === 'active' ? 'policy_override' : 'policy_override_revoked',
310
+ override_id: entry.id,
311
+ task_types: entry.task_types || [],
312
+ requirement_keys: entry.requirement_keys || [],
313
+ approved_by: entry.approved_by || null,
314
+ summary: entry.status === 'active'
315
+ ? `Policy override ${entry.id} allows ${entry.task_types?.join(', ') || 'governed requirements'} for pipeline ${pipelineId}.`
316
+ : `Policy override ${entry.id} was revoked${entry.revoked_reason ? `: ${entry.revoked_reason}` : ''}.`,
317
+ next_action: `switchman pipeline status ${pipelineId}`,
318
+ }));
319
+
320
+ const missingButOverridden = requiredTaskTypes.filter((taskType) => activeTaskTypes.includes(taskType));
321
+
322
+ return {
323
+ active_overrides: activeOverrides,
324
+ active_task_types: activeTaskTypes,
325
+ active_requirement_keys: activeRequirementKeys,
326
+ missing_but_overridden: missingButOverridden,
327
+ override_history: overrideHistory,
328
+ };
329
+ }
330
+
55
331
  function withTaskSpec(db, task) {
56
332
  return {
57
333
  ...task,
@@ -67,6 +343,373 @@ function nextPipelineTaskId(tasks, pipelineId) {
67
343
  return `${pipelineId}-${String(nextNumber).padStart(2, '0')}`;
68
344
  }
69
345
 
346
+ function rankEnforcement(level = 'none') {
347
+ return level === 'blocked' ? 2 : level === 'warn' ? 1 : 0;
348
+ }
349
+
350
+ function normalizeDependencyInvalidation(item) {
351
+ const details = item.details || {};
352
+ const objectNames = Array.isArray(details.object_names) ? details.object_names : [];
353
+ const contractNames = Array.isArray(details.contract_names) ? details.contract_names : [];
354
+ const modulePaths = Array.isArray(details.module_paths) ? details.module_paths : [];
355
+ return {
356
+ ...item,
357
+ severity: item.severity || details.severity || (item.reason_type === 'semantic_contract_drift' ? 'blocked' : 'warn'),
358
+ details,
359
+ revalidation_set: details.revalidation_set || (item.reason_type === 'semantic_contract_drift' ? 'contract' : item.reason_type === 'semantic_object_overlap' ? 'semantic_object' : item.reason_type === 'shared_module_drift' ? 'shared_module' : item.reason_type === 'subsystem_overlap' ? 'subsystem' : 'scope'),
360
+ stale_area: item.reason_type === 'subsystem_overlap'
361
+ ? `subsystem:${item.subsystem_tag}`
362
+ : item.reason_type === 'semantic_contract_drift'
363
+ ? `contract:${contractNames.join('|') || 'unknown'}`
364
+ : item.reason_type === 'semantic_object_overlap'
365
+ ? `object:${objectNames.join('|') || 'unknown'}`
366
+ : item.reason_type === 'shared_module_drift'
367
+ ? `module:${modulePaths.join('|') || 'unknown'}`
368
+ : `${item.source_scope_pattern} ↔ ${item.affected_scope_pattern}`,
369
+ summary: item.reason_type === 'semantic_contract_drift'
370
+ ? `${details?.source_task_title || item.source_task_id} changed shared contract ${contractNames.join(', ') || 'unknown'}`
371
+ : item.reason_type === 'semantic_object_overlap'
372
+ ? `${details?.source_task_title || item.source_task_id} changed shared exported object ${objectNames.join(', ') || 'unknown'}`
373
+ : item.reason_type === 'shared_module_drift'
374
+ ? `${details?.source_task_title || item.source_task_id} changed shared module ${modulePaths.join(', ') || 'unknown'} used by ${(details.dependent_files || []).join(', ') || item.affected_task_id}`
375
+ : `${details?.source_task_title || item.source_task_id} changed shared ${item.reason_type === 'subsystem_overlap' ? `subsystem:${item.subsystem_tag}` : 'scope'}`,
376
+ };
377
+ }
378
+
379
+ function buildStaleClusters(invalidations = [], { pipelineId = null } = {}) {
380
+ const clusters = new Map();
381
+ for (const invalidation of invalidations.map(normalizeDependencyInvalidation)) {
382
+ if (pipelineId && invalidation.affected_pipeline_id !== pipelineId) continue;
383
+ const clusterKey = invalidation.affected_pipeline_id
384
+ ? `pipeline:${invalidation.affected_pipeline_id}`
385
+ : `task:${invalidation.affected_task_id}`;
386
+ if (!clusters.has(clusterKey)) {
387
+ clusters.set(clusterKey, {
388
+ key: clusterKey,
389
+ affected_pipeline_id: invalidation.affected_pipeline_id || null,
390
+ affected_task_ids: new Set(),
391
+ source_task_titles: new Set(),
392
+ source_worktrees: new Set(),
393
+ stale_areas: new Set(),
394
+ revalidation_sets: new Set(),
395
+ dependent_files: new Set(),
396
+ dependent_areas: new Set(),
397
+ module_paths: new Set(),
398
+ invalidations: [],
399
+ severity: 'warn',
400
+ highest_affected_priority: 0,
401
+ highest_source_priority: 0,
402
+ });
403
+ }
404
+ const cluster = clusters.get(clusterKey);
405
+ cluster.invalidations.push(invalidation);
406
+ cluster.affected_task_ids.add(invalidation.affected_task_id);
407
+ if (invalidation.details?.source_task_title) cluster.source_task_titles.add(invalidation.details.source_task_title);
408
+ if (invalidation.source_worktree) cluster.source_worktrees.add(invalidation.source_worktree);
409
+ cluster.stale_areas.add(invalidation.stale_area);
410
+ if (invalidation.revalidation_set) cluster.revalidation_sets.add(invalidation.revalidation_set);
411
+ for (const filePath of invalidation.details?.dependent_files || []) cluster.dependent_files.add(filePath);
412
+ for (const area of invalidation.details?.dependent_areas || []) cluster.dependent_areas.add(area);
413
+ for (const modulePath of invalidation.details?.module_paths || []) cluster.module_paths.add(modulePath);
414
+ if (invalidation.severity === 'blocked') cluster.severity = 'block';
415
+ cluster.highest_affected_priority = Math.max(cluster.highest_affected_priority, Number(invalidation.details?.affected_task_priority || 0));
416
+ cluster.highest_source_priority = Math.max(cluster.highest_source_priority, Number(invalidation.details?.source_task_priority || 0));
417
+ }
418
+
419
+ const clusterEntries = [...clusters.values()].map((cluster) => ({
420
+ key: cluster.key,
421
+ affected_pipeline_id: cluster.affected_pipeline_id,
422
+ affected_task_ids: [...cluster.affected_task_ids],
423
+ invalidation_count: cluster.invalidations.length,
424
+ source_task_ids: [...new Set(cluster.invalidations.map((item) => item.source_task_id).filter(Boolean))],
425
+ source_pipeline_ids: [...new Set(cluster.invalidations.map((item) => item.source_pipeline_id).filter(Boolean))],
426
+ source_task_titles: [...cluster.source_task_titles],
427
+ source_worktrees: [...cluster.source_worktrees],
428
+ stale_areas: [...cluster.stale_areas],
429
+ revalidation_sets: [...cluster.revalidation_sets],
430
+ dependent_files: [...cluster.dependent_files],
431
+ dependent_areas: [...cluster.dependent_areas],
432
+ module_paths: [...cluster.module_paths],
433
+ revalidation_set_type: cluster.revalidation_sets.has('contract')
434
+ ? 'contract'
435
+ : cluster.revalidation_sets.has('shared_module')
436
+ ? 'shared_module'
437
+ : cluster.revalidation_sets.has('semantic_object')
438
+ ? 'semantic_object'
439
+ : cluster.revalidation_sets.has('subsystem')
440
+ ? 'subsystem'
441
+ : 'scope',
442
+ rerun_priority: cluster.severity === 'block'
443
+ ? (cluster.revalidation_sets.has('contract') || cluster.highest_affected_priority >= 8 ? 'urgent' : 'high')
444
+ : cluster.revalidation_sets.has('shared_module') && cluster.dependent_files.size >= 3
445
+ ? 'high'
446
+ : cluster.highest_affected_priority >= 8
447
+ ? 'high'
448
+ : cluster.highest_affected_priority >= 5
449
+ ? 'medium'
450
+ : 'low',
451
+ rerun_priority_score: (cluster.severity === 'block' ? 100 : 0)
452
+ + (cluster.revalidation_sets.has('contract') ? 30 : cluster.revalidation_sets.has('shared_module') ? 20 : cluster.revalidation_sets.has('semantic_object') ? 15 : 0)
453
+ + (cluster.highest_affected_priority * 3)
454
+ + (cluster.dependent_files.size * 4)
455
+ + (cluster.dependent_areas.size * 2)
456
+ + cluster.module_paths.size
457
+ + cluster.invalidations.length,
458
+ rerun_breadth_score: (cluster.dependent_files.size * 4) + (cluster.dependent_areas.size * 2) + cluster.module_paths.size,
459
+ highest_affected_priority: cluster.highest_affected_priority,
460
+ highest_source_priority: cluster.highest_source_priority,
461
+ severity: cluster.severity,
462
+ invalidations: cluster.invalidations,
463
+ title: cluster.affected_pipeline_id
464
+ ? `Pipeline ${cluster.affected_pipeline_id} has ${cluster.invalidations.length} stale ${cluster.revalidation_sets.has('contract') ? 'contract' : cluster.revalidation_sets.has('shared_module') ? 'shared-module' : cluster.revalidation_sets.has('semantic_object') ? 'semantic-object' : 'dependency'} invalidation${cluster.invalidations.length === 1 ? '' : 's'}`
465
+ : `${[...cluster.affected_task_ids][0]} has ${cluster.invalidations.length} stale ${cluster.revalidation_sets.has('contract') ? 'contract' : cluster.revalidation_sets.has('shared_module') ? 'shared-module' : cluster.revalidation_sets.has('semantic_object') ? 'semantic-object' : 'dependency'} invalidation${cluster.invalidations.length === 1 ? '' : 's'}`,
466
+ detail: `${[...cluster.source_task_titles][0] || cluster.invalidations[0]?.source_task_id || 'unknown source'} (${[...cluster.stale_areas].join(', ')})`,
467
+ next_action: cluster.affected_pipeline_id
468
+ ? `switchman task retry-stale --pipeline ${cluster.affected_pipeline_id}`
469
+ : `switchman task retry ${[...cluster.affected_task_ids][0]}`,
470
+ }));
471
+
472
+ const causeGroups = new Map();
473
+ for (const cluster of clusterEntries) {
474
+ const primary = cluster.invalidations[0] || {};
475
+ const details = primary.details || {};
476
+ const causeKey = cluster.revalidation_set_type === 'contract'
477
+ ? `contract:${(details.contract_names || []).join('|') || cluster.stale_areas.join('|')}|source:${cluster.source_task_ids.join('|') || 'unknown'}`
478
+ : cluster.revalidation_set_type === 'shared_module'
479
+ ? `shared_module:${(details.module_paths || cluster.module_paths || []).join('|') || cluster.stale_areas.join('|')}|source:${cluster.source_task_ids.join('|') || 'unknown'}`
480
+ : cluster.revalidation_set_type === 'semantic_object'
481
+ ? `semantic_object:${(details.object_names || []).join('|') || cluster.stale_areas.join('|')}|source:${cluster.source_task_ids.join('|') || 'unknown'}`
482
+ : `dependency:${cluster.stale_areas.join('|')}|source:${cluster.source_task_ids.join('|') || 'unknown'}`;
483
+ if (!causeGroups.has(causeKey)) causeGroups.set(causeKey, []);
484
+ causeGroups.get(causeKey).push(cluster);
485
+ }
486
+
487
+ for (const [causeKey, relatedClusters] of causeGroups.entries()) {
488
+ const relatedPipelines = [...new Set(relatedClusters.map((cluster) => cluster.affected_pipeline_id).filter(Boolean))];
489
+ const primary = relatedClusters[0];
490
+ const details = primary.invalidations[0]?.details || {};
491
+ const causeSummary = primary.revalidation_set_type === 'contract'
492
+ ? `shared contract drift in ${(details.contract_names || []).join(', ') || 'unknown contract'}`
493
+ : primary.revalidation_set_type === 'shared_module'
494
+ ? `shared module drift in ${(details.module_paths || primary.module_paths || []).join(', ') || 'unknown module'}`
495
+ : primary.revalidation_set_type === 'semantic_object'
496
+ ? `shared exported object drift in ${(details.object_names || []).join(', ') || 'unknown object'}`
497
+ : `shared dependency drift across ${primary.stale_areas.join(', ')}`;
498
+ for (let index = 0; index < relatedClusters.length; index += 1) {
499
+ relatedClusters[index].causal_group_id = `cause-${causeKey}`;
500
+ relatedClusters[index].causal_group_size = relatedClusters.length;
501
+ relatedClusters[index].causal_group_rank = index + 1;
502
+ relatedClusters[index].causal_group_summary = causeSummary;
503
+ relatedClusters[index].related_affected_pipelines = relatedPipelines;
504
+ }
505
+ }
506
+
507
+ return clusterEntries.sort((a, b) =>
508
+ b.rerun_priority_score - a.rerun_priority_score
509
+ || (a.severity === 'block' ? -1 : 1) - (b.severity === 'block' ? -1 : 1)
510
+ || (a.revalidation_set_type === 'contract' ? -1 : 1) - (b.revalidation_set_type === 'contract' ? -1 : 1)
511
+ || (a.revalidation_set_type === 'shared_module' ? -1 : 1) - (b.revalidation_set_type === 'shared_module' ? -1 : 1)
512
+ || b.invalidation_count - a.invalidation_count);
513
+ }
514
+
515
+ export function summarizeStaleCausalWaves(staleClusters = []) {
516
+ const grouped = new Map();
517
+ for (const cluster of staleClusters) {
518
+ if (!cluster.causal_group_id) continue;
519
+ if (!grouped.has(cluster.causal_group_id)) {
520
+ grouped.set(cluster.causal_group_id, {
521
+ causal_group_id: cluster.causal_group_id,
522
+ summary: cluster.causal_group_summary || cluster.title,
523
+ revalidation_set_type: cluster.revalidation_set_type,
524
+ source_task_ids: [...(cluster.source_task_ids || [])],
525
+ source_pipeline_ids: [...(cluster.source_pipeline_ids || [])],
526
+ related_affected_pipelines: new Set(cluster.related_affected_pipelines || []),
527
+ affected_pipeline_ids: new Set(),
528
+ cluster_count: 0,
529
+ invalidation_count: 0,
530
+ highest_rerun_priority_score: 0,
531
+ });
532
+ }
533
+ const entry = grouped.get(cluster.causal_group_id);
534
+ if (cluster.affected_pipeline_id) entry.affected_pipeline_ids.add(cluster.affected_pipeline_id);
535
+ for (const pipelineId of cluster.related_affected_pipelines || []) entry.related_affected_pipelines.add(pipelineId);
536
+ entry.cluster_count += 1;
537
+ entry.invalidation_count += cluster.invalidation_count || 0;
538
+ entry.highest_rerun_priority_score = Math.max(entry.highest_rerun_priority_score, cluster.rerun_priority_score || 0);
539
+ }
540
+
541
+ return [...grouped.values()]
542
+ .map((entry) => ({
543
+ ...entry,
544
+ affected_pipeline_ids: [...entry.affected_pipeline_ids],
545
+ related_affected_pipelines: [...entry.related_affected_pipelines],
546
+ }))
547
+ .sort((a, b) =>
548
+ b.highest_rerun_priority_score - a.highest_rerun_priority_score
549
+ || b.cluster_count - a.cluster_count
550
+ || b.invalidation_count - a.invalidation_count
551
+ || String(a.summary).localeCompare(String(b.summary)));
552
+ }
553
+
554
+ export function getPipelineStaleWaveContext(db, pipelineId) {
555
+ if (!pipelineId) {
556
+ return {
557
+ stale_clusters: [],
558
+ stale_causal_waves: [],
559
+ shared_wave_count: 0,
560
+ largest_wave_size: 0,
561
+ primary_wave: null,
562
+ };
563
+ }
564
+
565
+ const staleClusters = buildStaleClusters(listDependencyInvalidations(db), {});
566
+ const staleCausalWaves = summarizeStaleCausalWaves(staleClusters)
567
+ .filter((wave) => wave.affected_pipeline_ids.includes(pipelineId));
568
+
569
+ return {
570
+ stale_clusters: staleClusters.filter((cluster) => cluster.affected_pipeline_id === pipelineId),
571
+ stale_causal_waves: staleCausalWaves,
572
+ shared_wave_count: staleCausalWaves.length,
573
+ largest_wave_size: staleCausalWaves.reduce((max, wave) => Math.max(max, Number(wave.cluster_count || 0)), 0),
574
+ primary_wave: staleCausalWaves[0] || null,
575
+ };
576
+ }
577
+
578
+ export function summarizePipelinePolicyState(db, status, changePolicy, boundaryValidations = []) {
579
+ const tasks = status.tasks || [];
580
+ const implementationTasks = tasks.filter((task) => task.task_spec?.task_type === 'implementation');
581
+ const completedTasks = tasks.filter((task) => task.status === 'done' && task.task_spec?.task_type);
582
+ const completedLeaseByTask = new Map();
583
+ for (const lease of status.leases || []) {
584
+ if (lease.status !== 'completed') continue;
585
+ if (!completedLeaseByTask.has(lease.task_id)) {
586
+ completedLeaseByTask.set(lease.task_id, lease);
587
+ }
588
+ }
589
+ const completionAuditByTask = new Map();
590
+ for (const event of status.audit_events || []) {
591
+ if (event.event_type !== 'task_completed' || !event.task_id) continue;
592
+ if (!completionAuditByTask.has(event.task_id)) {
593
+ completionAuditByTask.set(event.task_id, event);
594
+ }
595
+ }
596
+ const completedTaskTypes = new Set(completedTasks.map((task) => task.task_spec?.task_type).filter(Boolean));
597
+ const governedDomains = uniq(
598
+ implementationTasks
599
+ .flatMap((task) => task.task_spec?.subsystem_tags || [])
600
+ .filter((domain) => changePolicy.domain_rules?.[domain]),
601
+ );
602
+ const activeRules = governedDomains.map((domain) => ({
603
+ domain,
604
+ ...(changePolicy.domain_rules?.[domain] || { required_completed_task_types: [], enforcement: 'none', rationale: [] }),
605
+ }));
606
+ const requiredTaskTypes = uniq([
607
+ ...implementationTasks.flatMap((task) => task.task_spec?.validation_rules?.required_completed_task_types || []),
608
+ ...activeRules.flatMap((rule) => rule.required_completed_task_types || []),
609
+ ]);
610
+ const rawMissingTaskTypes = uniq([
611
+ ...boundaryValidations.flatMap((validation) => validation.missing_task_types || []),
612
+ ...requiredTaskTypes.filter((taskType) => !completedTaskTypes.has(taskType)),
613
+ ]);
614
+ const overrideState = buildPipelinePolicyOverrideState(db, status.pipeline_id, rawMissingTaskTypes);
615
+ const missingTaskTypes = rawMissingTaskTypes.filter((taskType) => !overrideState.active_task_types.includes(taskType));
616
+ const overriddenTaskTypes = rawMissingTaskTypes.filter((taskType) => overrideState.active_task_types.includes(taskType));
617
+ const satisfiedTaskTypes = requiredTaskTypes.filter((taskType) => completedTaskTypes.has(taskType));
618
+ const evidenceState = buildPipelinePolicyEvidence(status, requiredTaskTypes, {
619
+ completedLeaseByTask,
620
+ completionAuditByTask,
621
+ });
622
+ const evidenceByTaskType = evidenceState.evidence_by_task_type;
623
+ const requirementStatus = requiredTaskTypes.map((taskType) => ({
624
+ task_type: taskType,
625
+ satisfied: satisfiedTaskTypes.includes(taskType),
626
+ overridden: overriddenTaskTypes.includes(taskType),
627
+ override_ids: overrideState.active_overrides
628
+ .filter((entry) => (entry.task_types || []).includes(taskType))
629
+ .map((entry) => entry.id),
630
+ evidence: evidenceByTaskType[taskType] || [],
631
+ }));
632
+ const rationale = uniq([
633
+ ...implementationTasks.flatMap((task) => task.task_spec?.validation_rules?.rationale || []),
634
+ ...activeRules.flatMap((rule) => rule.rationale || []),
635
+ ...boundaryValidations.flatMap((validation) => validation.rationale || validation.details?.rationale || []),
636
+ ]);
637
+ const enforcement = [...implementationTasks.map((task) => task.task_spec?.validation_rules?.enforcement || 'none'), ...activeRules.map((rule) => rule.enforcement || 'none')]
638
+ .reduce((highest, current) => rankEnforcement(current) > rankEnforcement(highest) ? current : highest, 'none');
639
+
640
+ const policyState = {
641
+ active: governedDomains.length > 0 || boundaryValidations.length > 0,
642
+ domains: governedDomains,
643
+ active_rules: activeRules,
644
+ required_task_types: requiredTaskTypes,
645
+ satisfied_task_types: satisfiedTaskTypes,
646
+ raw_missing_task_types: rawMissingTaskTypes,
647
+ missing_task_types: missingTaskTypes,
648
+ overridden_task_types: overriddenTaskTypes,
649
+ requirement_status: requirementStatus,
650
+ evidence_by_task_type: evidenceByTaskType,
651
+ evidence_schema_version: 1,
652
+ evidence_objects: evidenceState.evidence_objects,
653
+ satisfaction_history: [],
654
+ overrides: overrideState.active_overrides,
655
+ override_history: overrideState.override_history,
656
+ pipeline_id: status.pipeline_id,
657
+ enforcement,
658
+ rationale,
659
+ blocked_validations: boundaryValidations.filter((validation) => validation.severity === 'blocked').length,
660
+ warned_validations: boundaryValidations.filter((validation) => validation.severity !== 'blocked').length,
661
+ summary: governedDomains.length === 0
662
+ ? 'No explicit governed-domain policy requirements are active for this pipeline.'
663
+ : missingTaskTypes.length > 0
664
+ ? `Governed domains ${governedDomains.join(', ')} still require ${missingTaskTypes.join(', ')} before landing.`
665
+ : overriddenTaskTypes.length > 0
666
+ ? `Governed domains ${governedDomains.join(', ')} are currently allowed to proceed with overrides for ${overriddenTaskTypes.join(', ')}.`
667
+ : `Governed domains ${governedDomains.join(', ')} have satisfied their current policy requirements.`,
668
+ };
669
+
670
+ policyState.satisfaction_history = [
671
+ ...buildPipelinePolicySatisfactionHistory(policyState),
672
+ ...overrideState.override_history,
673
+ ].sort((a, b) => String(b.created_at || '').localeCompare(String(a.created_at || '')));
674
+ return policyState;
675
+ }
676
+
677
+ function buildPipelinePolicyRemediationCommand(status, policyState) {
678
+ if (policyState.missing_task_types.length === 0) {
679
+ return `switchman pipeline status ${status.pipeline_id}`;
680
+ }
681
+
682
+ return `switchman pipeline review ${status.pipeline_id}`;
683
+ }
684
+
685
+ export async function evaluatePipelinePolicyGate(db, repoRoot, pipelineId) {
686
+ const status = getPipelineStatus(db, pipelineId);
687
+ const aiGate = await runAiMergeGate(db, repoRoot);
688
+ const changePolicy = loadChangePolicy(repoRoot);
689
+ const policyState = summarizePipelinePolicyState(db, status, changePolicy, aiGate.boundary_validations || []);
690
+ const blocked = policyState.active
691
+ && policyState.enforcement === 'blocked'
692
+ && policyState.missing_task_types.length > 0;
693
+ const nextAction = blocked ? buildPipelinePolicyRemediationCommand(status, policyState) : null;
694
+ const overrideApplied = policyState.overridden_task_types.length > 0;
695
+ const overrideSummary = overrideApplied
696
+ ? `Policy override active for ${policyState.overridden_task_types.join(', ')} by ${policyState.overrides.map((entry) => entry.approved_by || 'unknown').join(', ')}.`
697
+ : null;
698
+
699
+ return {
700
+ ok: !blocked,
701
+ pipeline_id: pipelineId,
702
+ reason_code: blocked ? 'policy_requirements_incomplete' : null,
703
+ summary: blocked
704
+ ? `Policy blocked landing for governed domains ${policyState.domains.join(', ')} because ${policyState.missing_task_types.join(', ')} is still required.`
705
+ : policyState.summary,
706
+ next_action: nextAction,
707
+ policy_state: policyState,
708
+ override_applied: overrideApplied,
709
+ override_summary: overrideSummary,
710
+ };
711
+ }
712
+
70
713
  export function startPipeline(db, { title, description = null, priority = 5, pipelineId = null, maxTasks = 5 }) {
71
714
  const resolvedPipelineId = pipelineId || makePipelineId();
72
715
  const registeredWorktrees = listWorktrees(db);
@@ -151,6 +794,8 @@ export function getPipelineStatus(db, pipelineId) {
151
794
  description: metadata?.description || null,
152
795
  priority: metadata?.priority || tasks[0].priority,
153
796
  counts,
797
+ leases: listLeases(db),
798
+ audit_events: listAuditEvents(db, { limit: 2000 }).filter((event) => pipelineOwnsAuditEvent(event, pipelineId)),
154
799
  tasks: tasks.map((task) => {
155
800
  const dependencies = parseDependencies(task.description);
156
801
  const blockedBy = dependencies.filter((dependencyId) =>
@@ -174,6 +819,35 @@ export function getPipelineStatus(db, pipelineId) {
174
819
  };
175
820
  }
176
821
 
822
+ export function inferPipelineIdFromBranch(db, branch) {
823
+ const normalizedBranch = String(branch || '').trim();
824
+ if (!normalizedBranch) return null;
825
+
826
+ const landingPrefix = 'switchman/pipeline-landing/';
827
+ if (normalizedBranch.startsWith(landingPrefix)) {
828
+ return normalizedBranch.slice(landingPrefix.length) || null;
829
+ }
830
+
831
+ const worktreesByName = new Map(listWorktrees(db).map((worktree) => [worktree.name, worktree]));
832
+ const matchingPipelines = new Set();
833
+
834
+ for (const task of listTasks(db)) {
835
+ const taskSpec = getTaskSpec(db, task.id);
836
+ const pipelineId = taskSpec?.pipeline_id || null;
837
+ if (!pipelineId || !task.worktree) continue;
838
+ const worktree = worktreesByName.get(task.worktree);
839
+ if (worktree?.branch === normalizedBranch) {
840
+ matchingPipelines.add(pipelineId);
841
+ }
842
+ }
843
+
844
+ if (matchingPipelines.size === 1) {
845
+ return [...matchingPipelines][0];
846
+ }
847
+
848
+ return null;
849
+ }
850
+
177
851
  function parseTaskFailure(description) {
178
852
  const lines = String(description || '')
179
853
  .split('\n')
@@ -490,49 +1164,80 @@ function runPipelineIteration(
490
1164
  retryBackoffMs,
491
1165
  timeoutMs,
492
1166
  });
493
- const beforeHead = getHeadRevision(assignment.worktree_path);
494
- const result = spawnSync(command, args, {
495
- cwd: assignment.worktree_path,
496
- env: buildLaunchEnv(
497
- repoRoot,
498
- { id: assignment.task_id, title: assignment.title, task_spec: assignment.task_spec },
499
- { id: assignment.lease_id },
500
- { name: assignment.worktree, path: assignment.worktree_path },
501
- ),
502
- encoding: 'utf8',
503
- timeout: executionPolicy.timeout_ms > 0 ? executionPolicy.timeout_ms : undefined,
504
- });
505
- const afterHead = getHeadRevision(assignment.worktree_path);
506
-
507
- const timedOut = result.error?.code === 'ETIMEDOUT';
508
- const commandOk = !result.error && result.status === 0;
509
- let evaluation = commandOk
510
- ? evaluateTaskOutcome(db, repoRoot, { leaseId: assignment.lease_id })
511
- : null;
512
- if (commandOk && evaluation?.reason_code === 'no_changes_detected' && beforeHead && afterHead && beforeHead !== afterHead) {
513
- evaluation = {
514
- status: 'accepted',
515
- reason_code: null,
516
- changed_files: [],
517
- claimed_files: [],
518
- findings: ['task created a new commit with no remaining uncommitted diff'],
519
- };
520
- }
521
- const ok = commandOk && evaluation?.status === 'accepted';
1167
+ let result = { status: null, stdout: '', stderr: '', error: null };
1168
+ let timedOut = false;
1169
+ let evaluation = null;
1170
+ let ok = false;
1171
+ let reasonCode = 'agent_command_failed';
522
1172
  let retry = {
523
1173
  retried: false,
524
1174
  retry_attempt: getTaskRetryCount(db, assignment.task_id),
525
1175
  retries_remaining: Math.max(0, executionPolicy.max_retries - getTaskRetryCount(db, assignment.task_id)),
526
1176
  retry_delay_ms: 0,
527
1177
  };
1178
+
1179
+ try {
1180
+ const beforeHead = getHeadRevision(assignment.worktree_path);
1181
+ result = spawnSync(command, args, {
1182
+ cwd: assignment.worktree_path,
1183
+ env: buildLaunchEnv(
1184
+ repoRoot,
1185
+ { id: assignment.task_id, title: assignment.title, task_spec: assignment.task_spec },
1186
+ { id: assignment.lease_id },
1187
+ { name: assignment.worktree, path: assignment.worktree_path },
1188
+ ),
1189
+ encoding: 'utf8',
1190
+ timeout: executionPolicy.timeout_ms > 0 ? executionPolicy.timeout_ms : undefined,
1191
+ });
1192
+ const afterHead = getHeadRevision(assignment.worktree_path);
1193
+
1194
+ timedOut = result.error?.code === 'ETIMEDOUT';
1195
+ const commandOk = !result.error && result.status === 0;
1196
+ evaluation = commandOk
1197
+ ? evaluateTaskOutcome(db, repoRoot, { leaseId: assignment.lease_id })
1198
+ : null;
1199
+ if (commandOk && evaluation?.reason_code === 'no_changes_detected' && beforeHead && afterHead && beforeHead !== afterHead) {
1200
+ evaluation = {
1201
+ status: 'accepted',
1202
+ reason_code: null,
1203
+ changed_files: [],
1204
+ claimed_files: [],
1205
+ findings: ['task created a new commit with no remaining uncommitted diff'],
1206
+ };
1207
+ }
1208
+ ok = commandOk && evaluation?.status === 'accepted';
1209
+ reasonCode = timedOut
1210
+ ? 'task_execution_timeout'
1211
+ : ok
1212
+ ? null
1213
+ : 'agent_command_failed';
1214
+ } catch (err) {
1215
+ result = {
1216
+ status: null,
1217
+ stdout: result.stdout || '',
1218
+ stderr: `${result.stderr || ''}${result.stderr ? '\n' : ''}${err.message}`,
1219
+ error: err,
1220
+ };
1221
+ timedOut = false;
1222
+ evaluation = {
1223
+ status: 'rejected',
1224
+ reason_code: 'pipeline_execution_internal_error',
1225
+ findings: [err.message],
1226
+ };
1227
+ ok = false;
1228
+ reasonCode = 'pipeline_execution_internal_error';
1229
+ }
1230
+
528
1231
  if (ok) {
529
1232
  completeLeaseTask(db, assignment.lease_id);
530
1233
  } else {
531
- const failureReason = !commandOk
532
- ? (timedOut
533
- ? `agent command timed out after ${executionPolicy.timeout_ms}ms`
534
- : (result.error?.message || `agent command exited with status ${result.status}`))
535
- : `${evaluation.reason_code}: ${evaluation.findings.join('; ')}`;
1234
+ const failureReason = result.error && !timedOut && result.status === null
1235
+ ? `${reasonCode}: ${result.error.message}`
1236
+ : result.error || result.status !== 0
1237
+ ? (timedOut
1238
+ ? `agent command timed out after ${executionPolicy.timeout_ms}ms`
1239
+ : (result.error?.message || `agent command exited with status ${result.status}`))
1240
+ : `${evaluation.reason_code}: ${evaluation.findings.join('; ')}`;
536
1241
  failLeaseTask(db, assignment.lease_id, failureReason);
537
1242
  retry = scheduleTaskRetry(db, {
538
1243
  pipelineId,
@@ -562,7 +1267,7 @@ function runPipelineIteration(
562
1267
  logAuditEvent(db, {
563
1268
  eventType: 'pipeline_task_executed',
564
1269
  status: ok ? 'allowed' : 'denied',
565
- reasonCode: ok ? null : (timedOut ? 'task_execution_timeout' : 'agent_command_failed'),
1270
+ reasonCode,
566
1271
  worktree: assignment.worktree,
567
1272
  taskId: assignment.task_id,
568
1273
  leaseId: assignment.lease_id,
@@ -593,6 +1298,7 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
593
1298
  const status = getPipelineStatus(db, pipelineId);
594
1299
  const report = await scanAllWorktrees(db, repoRoot);
595
1300
  const aiGate = await runAiMergeGate(db, repoRoot);
1301
+ const changePolicy = loadChangePolicy(repoRoot);
596
1302
  const allLeases = listLeases(db);
597
1303
  const ciGateOk = report.conflicts.length === 0
598
1304
  && report.fileConflicts.length === 0
@@ -629,6 +1335,15 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
629
1335
  }));
630
1336
  const changedFiles = uniq(worktreeChanges.flatMap((entry) => entry.files));
631
1337
  const subsystemTags = uniq(completedTasks.flatMap((task) => task.task_spec?.subsystem_tags || []));
1338
+ const policyState = summarizePipelinePolicyState(db, status, changePolicy, aiGate.boundary_validations || []);
1339
+ const policyOverrideSummary = policyState.overridden_task_types.length > 0
1340
+ ? `Landing currently relies on policy overrides for ${policyState.overridden_task_types.join(', ')}.`
1341
+ : null;
1342
+ const staleClusters = buildStaleClusters(aiGate.dependency_invalidations || [], { pipelineId });
1343
+ const staleCausalWaves = summarizeStaleCausalWaves(staleClusters);
1344
+ const trustAudit = [...policyState.satisfaction_history, ...buildPipelineTrustAudit(db, pipelineId)]
1345
+ .sort((a, b) => String(b.created_at || '').localeCompare(String(a.created_at || '')))
1346
+ .slice(0, 8);
632
1347
  const riskNotes = [];
633
1348
  if (!ciGateOk) riskNotes.push('Repo gate is blocked by conflicts, unmanaged changes, or stale worktrees.');
634
1349
  if (aiGate.status !== 'pass') riskNotes.push(aiGate.summary);
@@ -647,10 +1362,15 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
647
1362
  const reviewerChecklist = [
648
1363
  ciGateOk ? 'Repo gate passed' : 'Resolve repo gate failures before merge',
649
1364
  aiGate.status === 'pass' ? 'AI merge gate passed' : `Review AI merge gate findings: ${aiGate.summary}`,
1365
+ policyState.active
1366
+ ? (policyState.missing_task_types.length > 0
1367
+ ? `Complete policy requirements: ${policyState.missing_task_types.join(', ')}`
1368
+ : `Confirm governed domains remain satisfied: ${policyState.domains.join(', ')}`)
1369
+ : null,
650
1370
  completedTasks.some((task) => task.task_spec?.risk_level === 'high')
651
1371
  ? 'Confirm high-risk tasks have the expected tests and docs'
652
1372
  : 'Review changed files and task outcomes',
653
- ];
1373
+ ].filter(Boolean);
654
1374
  const prTitle = status.title.startsWith('Implement:')
655
1375
  ? status.title.replace(/^Implement:\s*/i, '')
656
1376
  : status.title;
@@ -663,6 +1383,33 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
663
1383
  '## Validation',
664
1384
  `- Repo gate: ${ciGateOk ? 'pass' : 'blocked'}`,
665
1385
  `- AI merge gate: ${aiGate.status}`,
1386
+ ...(policyState.active
1387
+ ? [
1388
+ `- Policy domains: ${policyState.domains.join(', ')}`,
1389
+ `- Policy enforcement: ${policyState.enforcement}`,
1390
+ `- Policy requirements: ${policyState.required_task_types.join(', ') || 'none'}`,
1391
+ `- Policy evidence: ${policyState.satisfied_task_types.map((taskType) => `${taskType}:${(policyState.evidence_by_task_type?.[taskType] || []).map((entry) => entry.task_id).join(',')}`).join(' | ') || 'none'}`,
1392
+ `- Policy evidence objects: ${policyState.evidence_objects.map((entry) => `${entry.evidence_id}:${entry.satisfied_by.agent || entry.satisfied_by.worktree || 'task_completion'}`).join(' | ') || 'none'}`,
1393
+ `- Policy missing: ${policyState.missing_task_types.join(', ') || 'none'}`,
1394
+ `- Policy overrides: ${policyState.overrides.map((entry) => `${entry.id}:${(entry.task_types || []).join(',') || 'all'}:${entry.approved_by || 'unknown'}`).join(' | ') || 'none'}`,
1395
+ ...(policyOverrideSummary ? [`- Policy override effect: ${policyOverrideSummary}`] : []),
1396
+ ]
1397
+ : []),
1398
+ ...(staleClusters.length > 0
1399
+ ? [
1400
+ `- Stale clusters: ${staleClusters.map((cluster) => `${cluster.affected_pipeline_id || cluster.affected_task_ids[0]}:${cluster.invalidation_count}`).join(' | ')}`,
1401
+ ]
1402
+ : []),
1403
+ ...(staleCausalWaves.length > 0
1404
+ ? [
1405
+ `- Stale waves: ${staleCausalWaves.map((wave) => `${wave.summary}:${wave.affected_pipeline_ids.join(',') || 'unknown'}`).join(' | ')}`,
1406
+ ]
1407
+ : []),
1408
+ ...(trustAudit.length > 0
1409
+ ? [
1410
+ `- Trust audit: ${trustAudit.slice(0, 3).map((entry) => `${entry.category}:${entry.summary}`).join(' | ')}`,
1411
+ ]
1412
+ : []),
666
1413
  '',
667
1414
  '## Reviewer Checklist',
668
1415
  ...reviewerChecklist.map((item) => `- ${item}`),
@@ -702,6 +1449,45 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
702
1449
  '## Reviewer Notes',
703
1450
  ...reviewerChecklist.map((item) => `- ${item}`),
704
1451
  '',
1452
+ ...(policyState.active
1453
+ ? [
1454
+ '## Policy Requirements',
1455
+ `- Domains: ${policyState.domains.join(', ')}`,
1456
+ `- Enforcement: ${policyState.enforcement}`,
1457
+ `- Required task types: ${policyState.required_task_types.join(', ') || 'none'}`,
1458
+ `- Satisfied task types: ${policyState.satisfied_task_types.join(', ') || 'none'}`,
1459
+ `- Missing task types: ${policyState.missing_task_types.join(', ') || 'none'}`,
1460
+ `- Overridden task types: ${policyState.overridden_task_types.join(', ') || 'none'}`,
1461
+ ...policyState.requirement_status
1462
+ .filter((requirement) => requirement.evidence.length > 0)
1463
+ .map((requirement) => `- Evidence for ${requirement.task_type}: ${requirement.evidence.map((entry) => entry.artifact_path ? `${entry.task_id} (${entry.artifact_path}, by ${entry.satisfied_by?.agent || entry.satisfied_by?.worktree || 'task_completion'})` : `${entry.task_id} (by ${entry.satisfied_by?.agent || entry.satisfied_by?.worktree || 'task_completion'})`).join(', ')}`),
1464
+ ...policyState.overrides.slice(0, 5).map((entry) => `- Override ${entry.id}: ${(entry.task_types || []).join(', ') || 'all requirements'} by ${entry.approved_by || 'unknown'} (${entry.reason})`),
1465
+ ...policyState.satisfaction_history.slice(0, 5).map((entry) => `- Satisfaction history: ${entry.summary}`),
1466
+ ...policyState.rationale.slice(0, 5).map((item) => `- ${item}`),
1467
+ '',
1468
+ ]
1469
+ : []),
1470
+ ...(staleClusters.length > 0
1471
+ ? [
1472
+ '## Stale Clusters',
1473
+ ...staleClusters.map((cluster) => `- ${cluster.title}: ${cluster.detail} -> ${cluster.next_action}`),
1474
+ '',
1475
+ ]
1476
+ : []),
1477
+ ...(staleCausalWaves.length > 0
1478
+ ? [
1479
+ '## Stale Waves',
1480
+ ...staleCausalWaves.map((wave) => `- ${wave.summary}: affects ${wave.affected_pipeline_ids.join(', ') || 'unknown'} -> ${wave.cluster_count} cluster(s), ${wave.invalidation_count} invalidation(s)`),
1481
+ '',
1482
+ ]
1483
+ : []),
1484
+ ...(trustAudit.length > 0
1485
+ ? [
1486
+ '## Policy & Stale Audit',
1487
+ ...trustAudit.map((entry) => `- ${entry.created_at}: [${entry.category}] ${entry.summary} -> ${entry.next_action}`),
1488
+ '',
1489
+ ]
1490
+ : []),
705
1491
  '## Provenance',
706
1492
  ...provenance.map((entry) => `- ${entry.task_id}: ${entry.title} (${entry.task_type || 'unknown'}, ${entry.worktree || 'unassigned'}, lease ${entry.lease_id || 'none'})`),
707
1493
  ...(provenance.length === 0 ? ['- No completed task provenance yet'] : []),
@@ -736,6 +1522,11 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
736
1522
  risk_notes: riskNotes,
737
1523
  changed_files: changedFiles,
738
1524
  subsystem_tags: subsystemTags,
1525
+ policy_state: policyState,
1526
+ policy_override_summary: policyOverrideSummary,
1527
+ stale_clusters: staleClusters,
1528
+ stale_causal_waves: staleCausalWaves,
1529
+ trust_audit: trustAudit,
739
1530
  },
740
1531
  counts: status.counts,
741
1532
  ci_gate: {
@@ -747,23 +1538,210 @@ export async function buildPipelinePrSummary(db, repoRoot, pipelineId) {
747
1538
  status: aiGate.status,
748
1539
  summary: aiGate.summary,
749
1540
  },
1541
+ policy_state: policyState,
1542
+ stale_clusters: staleClusters,
1543
+ stale_causal_waves: staleCausalWaves,
1544
+ trust_audit: trustAudit,
750
1545
  worktree_changes: worktreeChanges,
751
1546
  markdown,
752
1547
  };
753
1548
  }
754
1549
 
1550
+ export async function buildPipelineLandingSummary(db, repoRoot, pipelineId) {
1551
+ const status = getPipelineStatus(db, pipelineId);
1552
+ const changePolicy = loadChangePolicy(repoRoot);
1553
+ const aiGate = await runAiMergeGate(db, repoRoot);
1554
+ const policyState = summarizePipelinePolicyState(db, status, changePolicy, aiGate.boundary_validations || []);
1555
+ const policyOverrideSummary = policyState.overridden_task_types.length > 0
1556
+ ? `Landing currently relies on policy overrides for ${policyState.overridden_task_types.join(', ')}.`
1557
+ : null;
1558
+ const staleClusters = buildStaleClusters(listDependencyInvalidations(db, { pipelineId }), { pipelineId });
1559
+ const staleCausalWaves = summarizeStaleCausalWaves(staleClusters);
1560
+ const trustAudit = [...policyState.satisfaction_history, ...buildPipelineTrustAudit(db, pipelineId)]
1561
+ .sort((a, b) => String(b.created_at || '').localeCompare(String(a.created_at || '')))
1562
+ .slice(0, 8);
1563
+ let landing;
1564
+ let landingError = null;
1565
+ try {
1566
+ landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
1567
+ requireCompleted: false,
1568
+ });
1569
+ } catch (err) {
1570
+ landingError = String(err.message || 'Landing branch is not ready yet.');
1571
+ landing = {
1572
+ branch: null,
1573
+ strategy: 'unavailable',
1574
+ synthetic: false,
1575
+ component_branches: [],
1576
+ stale: false,
1577
+ last_failure: null,
1578
+ };
1579
+ }
1580
+ const readyToQueue = status.counts.failed === 0
1581
+ && status.counts.pending === 0
1582
+ && status.counts.in_progress === 0
1583
+ && status.counts.done > 0
1584
+ && !landingError
1585
+ && !landing.stale
1586
+ && staleClusters.length === 0
1587
+ && !landing.last_failure;
1588
+ const nextAction = staleClusters[0]?.next_action
1589
+ || (landingError
1590
+ ? `switchman pipeline status ${pipelineId}`
1591
+ : landing.last_failure?.command
1592
+ || (landing.stale
1593
+ ? `switchman pipeline land ${pipelineId} --refresh`
1594
+ : `switchman queue add --pipeline ${pipelineId}`));
1595
+ const queueItems = listMergeQueue(db)
1596
+ .filter((item) => item.source_pipeline_id === pipelineId)
1597
+ .sort((a, b) => Number.parseInt(b.id.slice(1), 10) - Number.parseInt(a.id.slice(1), 10));
1598
+ const currentQueueItem = queueItems[0] || null;
1599
+ const queueState = {
1600
+ status: currentQueueItem?.status || 'not_queued',
1601
+ item_id: currentQueueItem?.id || null,
1602
+ target_branch: currentQueueItem?.target_branch || 'main',
1603
+ merged_commit: currentQueueItem?.merged_commit || null,
1604
+ next_action: currentQueueItem?.next_action || (currentQueueItem
1605
+ ? null
1606
+ : (readyToQueue ? `switchman queue add --pipeline ${pipelineId}` : nextAction)),
1607
+ last_error_code: currentQueueItem?.last_error_code || null,
1608
+ last_error_summary: currentQueueItem?.last_error_summary || null,
1609
+ policy_override_summary: policyOverrideSummary,
1610
+ };
1611
+ const recoveryState = landing.last_recovery
1612
+ ? {
1613
+ status: landing.last_recovery.state?.status || 'prepared',
1614
+ recovery_path: landing.last_recovery.recovery_path || null,
1615
+ inspect_command: landing.last_recovery.inspect_command || null,
1616
+ resume_command: landing.last_recovery.resume_command || null,
1617
+ }
1618
+ : null;
1619
+
1620
+ const markdown = [
1621
+ `# Pipeline Landing Summary: ${status.title}`,
1622
+ '',
1623
+ `- Pipeline: \`${pipelineId}\``,
1624
+ `- Ready to queue: ${readyToQueue ? 'yes' : 'no'}`,
1625
+ `- Landing branch: ${landing.branch ? `\`${landing.branch}\`` : 'not resolved yet'}`,
1626
+ `- Strategy: ${landing.strategy}`,
1627
+ `- Synthetic: ${landing.synthetic ? 'yes' : 'no'}`,
1628
+ '',
1629
+ '## Component Branches',
1630
+ ...(landing.component_branches.length > 0
1631
+ ? landing.component_branches.map((branch) => `- ${branch}`)
1632
+ : ['- None inferred yet']),
1633
+ '',
1634
+ '## Landing State',
1635
+ ...(landingError
1636
+ ? [`- ${landingError}`]
1637
+ : landing.last_failure
1638
+ ? [
1639
+ `- Failure: ${landing.last_failure.reason_code || 'landing_branch_materialization_failed'}`,
1640
+ ...(landing.last_failure.failed_branch ? [`- Failed branch: ${landing.last_failure.failed_branch}`] : []),
1641
+ ...(landing.last_failure.conflicting_files?.length ? [`- Conflicts: ${landing.last_failure.conflicting_files.join(', ')}`] : []),
1642
+ ]
1643
+ : landing.stale
1644
+ ? landing.stale_reasons.map((reason) => `- Stale: ${reason.summary}`)
1645
+ : staleClusters.length > 0
1646
+ ? staleClusters.map((cluster) => `- Stale cluster: ${cluster.title}`)
1647
+ : ['- Current and ready for queueing']),
1648
+ '',
1649
+ '## Recovery State',
1650
+ ...(recoveryState
1651
+ ? [
1652
+ `- Status: ${recoveryState.status}`,
1653
+ ...(recoveryState.recovery_path ? [`- Path: ${recoveryState.recovery_path}`] : []),
1654
+ ...(recoveryState.resume_command ? [`- Resume: ${recoveryState.resume_command}`] : []),
1655
+ ]
1656
+ : ['- No active recovery worktree']),
1657
+ '',
1658
+ ...(staleClusters.length > 0
1659
+ ? [
1660
+ '## Stale Clusters',
1661
+ ...staleClusters.map((cluster) => `- ${cluster.title}: ${cluster.detail} -> ${cluster.next_action}`),
1662
+ '',
1663
+ ]
1664
+ : []),
1665
+ ...(staleCausalWaves.length > 0
1666
+ ? [
1667
+ '## Stale Waves',
1668
+ ...staleCausalWaves.map((wave) => `- ${wave.summary}: affects ${wave.affected_pipeline_ids.join(', ') || 'unknown'} -> ${wave.cluster_count} cluster(s), ${wave.invalidation_count} invalidation(s)`),
1669
+ '',
1670
+ ]
1671
+ : []),
1672
+ ...(trustAudit.length > 0
1673
+ ? [
1674
+ '## Policy & Stale Audit',
1675
+ ...trustAudit.map((entry) => `- ${entry.created_at}: [${entry.category}] ${entry.summary} -> ${entry.next_action}`),
1676
+ '',
1677
+ ]
1678
+ : []),
1679
+ '',
1680
+ ...(policyState.active
1681
+ ? [
1682
+ '## Policy Record',
1683
+ `- Domains: ${policyState.domains.join(', ')}`,
1684
+ `- Enforcement: ${policyState.enforcement}`,
1685
+ `- Required task types: ${policyState.required_task_types.join(', ') || 'none'}`,
1686
+ `- Missing task types: ${policyState.missing_task_types.join(', ') || 'none'}`,
1687
+ `- Overridden task types: ${policyState.overridden_task_types.join(', ') || 'none'}`,
1688
+ ...policyState.requirement_status
1689
+ .filter((requirement) => requirement.evidence.length > 0)
1690
+ .map((requirement) => `- Evidence for ${requirement.task_type}: ${requirement.evidence.map((entry) => `${entry.task_id} by ${entry.satisfied_by?.agent || entry.satisfied_by?.worktree || 'task_completion'}`).join(', ')}`),
1691
+ ...policyState.overrides.slice(0, 5).map((entry) => `- Override ${entry.id}: ${(entry.task_types || []).join(', ') || 'all requirements'} by ${entry.approved_by || 'unknown'} (${entry.reason})`),
1692
+ ...(policyOverrideSummary ? [`- Override effect: ${policyOverrideSummary}`] : []),
1693
+ '',
1694
+ ]
1695
+ : []),
1696
+ '',
1697
+ '## Queue State',
1698
+ `- Status: ${queueState.status}`,
1699
+ ...(queueState.item_id ? [`- Item: ${queueState.item_id}`] : []),
1700
+ `- Target branch: ${queueState.target_branch}`,
1701
+ ...(queueState.merged_commit ? [`- Merged commit: ${queueState.merged_commit}`] : []),
1702
+ ...(queueState.last_error_summary ? [`- Queue error: ${queueState.last_error_summary}`] : []),
1703
+ ...(queueState.policy_override_summary ? [`- Policy override: ${queueState.policy_override_summary}`] : []),
1704
+ '',
1705
+ `## Next Action`,
1706
+ `- ${queueState.next_action || nextAction}`,
1707
+ ].join('\n');
1708
+
1709
+ return {
1710
+ pipeline_id: pipelineId,
1711
+ title: status.title,
1712
+ ready_to_queue: readyToQueue,
1713
+ counts: status.counts,
1714
+ policy_state: policyState,
1715
+ landing,
1716
+ recovery_state: recoveryState,
1717
+ stale_clusters: staleClusters,
1718
+ stale_causal_waves: staleCausalWaves,
1719
+ trust_audit: trustAudit,
1720
+ queue_state: queueState,
1721
+ policy_override_summary: policyOverrideSummary,
1722
+ landing_error: landingError,
1723
+ next_action: queueState.next_action || nextAction,
1724
+ markdown,
1725
+ };
1726
+ }
1727
+
755
1728
  export async function exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir = null) {
756
1729
  const summary = await buildPipelinePrSummary(db, repoRoot, pipelineId);
1730
+ const landingSummary = await buildPipelineLandingSummary(db, repoRoot, pipelineId);
757
1731
  const bundleDir = outputDir || join(repoRoot, '.switchman', 'pipelines', pipelineId);
758
1732
  mkdirSync(bundleDir, { recursive: true });
759
1733
 
760
1734
  const summaryJsonPath = join(bundleDir, 'pr-summary.json');
761
1735
  const summaryMarkdownPath = join(bundleDir, 'pr-summary.md');
762
1736
  const prBodyPath = join(bundleDir, 'pr-body.md');
1737
+ const landingSummaryJsonPath = join(bundleDir, 'pipeline-landing-summary.json');
1738
+ const landingSummaryMarkdownPath = join(bundleDir, 'pipeline-landing-summary.md');
763
1739
 
764
1740
  writeFileSync(summaryJsonPath, `${JSON.stringify(summary, null, 2)}\n`);
765
1741
  writeFileSync(summaryMarkdownPath, `${summary.markdown}\n`);
766
1742
  writeFileSync(prBodyPath, `${summary.pr_artifact.body}\n`);
1743
+ writeFileSync(landingSummaryJsonPath, `${JSON.stringify(landingSummary, null, 2)}\n`);
1744
+ writeFileSync(landingSummaryMarkdownPath, `${landingSummary.markdown}\n`);
767
1745
 
768
1746
  logAuditEvent(db, {
769
1747
  eventType: 'pipeline_pr_bundle_exported',
@@ -772,7 +1750,7 @@ export async function exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir
772
1750
  details: JSON.stringify({
773
1751
  pipeline_id: pipelineId,
774
1752
  output_dir: bundleDir,
775
- files: [summaryJsonPath, summaryMarkdownPath, prBodyPath],
1753
+ files: [summaryJsonPath, summaryMarkdownPath, prBodyPath, landingSummaryJsonPath, landingSummaryMarkdownPath],
776
1754
  }),
777
1755
  });
778
1756
 
@@ -783,83 +1761,1303 @@ export async function exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir
783
1761
  summary_json: summaryJsonPath,
784
1762
  summary_markdown: summaryMarkdownPath,
785
1763
  pr_body_markdown: prBodyPath,
1764
+ landing_summary_json: landingSummaryJsonPath,
1765
+ landing_summary_markdown: landingSummaryMarkdownPath,
786
1766
  },
787
1767
  summary,
1768
+ landing_summary: landingSummary,
788
1769
  };
789
1770
  }
790
1771
 
791
- function resolvePipelineHeadBranch(db, repoRoot, pipelineStatus, explicitHeadBranch = null) {
792
- if (explicitHeadBranch) return explicitHeadBranch;
1772
+ function resolvePipelineBranchForTask(worktreesByName, task) {
1773
+ const worktreeName = task.worktree || task.suggested_worktree || null;
1774
+ const branch = worktreeName ? worktreesByName.get(worktreeName)?.branch || null : null;
1775
+ return branch && branch !== 'main' && branch !== 'unknown' ? branch : null;
1776
+ }
793
1777
 
1778
+ function collectPipelineLandingCandidates(db, pipelineStatus) {
794
1779
  const worktreesByName = new Map(listWorktrees(db).map((worktree) => [worktree.name, worktree]));
795
- const resolveBranchForTask = (task) => {
796
- const worktreeName = task.worktree || task.suggested_worktree || null;
797
- const branch = worktreeName ? worktreesByName.get(worktreeName)?.branch || null : null;
798
- return branch && branch !== 'main' && branch !== 'unknown' ? branch : null;
799
- };
1780
+ const orderedBranches = [];
1781
+ const branchToWorktree = new Map();
1782
+
1783
+ for (const task of pipelineStatus.tasks) {
1784
+ const branch = resolvePipelineBranchForTask(worktreesByName, task);
1785
+ if (!branch) continue;
1786
+ if (!branchToWorktree.has(branch) && task.worktree) {
1787
+ branchToWorktree.set(branch, task.worktree);
1788
+ }
1789
+ if (!orderedBranches.includes(branch)) {
1790
+ orderedBranches.push(branch);
1791
+ }
1792
+ }
800
1793
 
801
1794
  const implementationBranches = uniq(
802
1795
  pipelineStatus.tasks
803
1796
  .filter((task) => task.task_spec?.task_type === 'implementation')
804
- .map(resolveBranchForTask)
1797
+ .map((task) => resolvePipelineBranchForTask(worktreesByName, task))
805
1798
  .filter(Boolean),
806
1799
  );
807
- if (implementationBranches.length === 1) {
808
- return implementationBranches[0];
809
- }
1800
+ const candidateBranches = uniq(orderedBranches);
1801
+ const prioritizedBranches = [
1802
+ ...implementationBranches,
1803
+ ...candidateBranches.filter((branch) => !implementationBranches.includes(branch)),
1804
+ ];
810
1805
 
811
- const candidateBranches = uniq(
812
- pipelineStatus.tasks
813
- .map(resolveBranchForTask)
814
- .filter(Boolean),
815
- );
1806
+ return {
1807
+ implementationBranches,
1808
+ candidateBranches,
1809
+ prioritizedBranches,
1810
+ branchToWorktree,
1811
+ worktreesByName,
1812
+ };
1813
+ }
816
1814
 
817
- if (candidateBranches.length === 1) {
818
- return candidateBranches[0];
819
- }
1815
+ function getPipelineLandingBranchName(pipelineId, landingBranch = null) {
1816
+ return landingBranch || `switchman/pipeline-landing/${pipelineId}`;
1817
+ }
1818
+
1819
+ function listPipelineLandingEvents(db, pipelineId, branch) {
1820
+ return listAuditEvents(db, {
1821
+ eventType: 'pipeline_landing_branch_materialized',
1822
+ status: 'allowed',
1823
+ limit: 500,
1824
+ }).flatMap((event) => {
1825
+ try {
1826
+ const details = JSON.parse(event.details || '{}');
1827
+ if (details.pipeline_id !== pipelineId || details.branch !== branch) {
1828
+ return [];
1829
+ }
1830
+ return [{
1831
+ audit_id: event.id,
1832
+ created_at: event.created_at,
1833
+ ...details,
1834
+ }];
1835
+ } catch {
1836
+ return [];
1837
+ }
1838
+ });
1839
+ }
1840
+
1841
+ function getLatestLandingResolvedEvent(db, pipelineId, branch) {
1842
+ return listAuditEvents(db, {
1843
+ eventType: 'pipeline_landing_recovery_resumed',
1844
+ status: 'allowed',
1845
+ limit: 200,
1846
+ }).flatMap((event) => {
1847
+ try {
1848
+ const details = JSON.parse(event.details || '{}');
1849
+ if (details.pipeline_id !== pipelineId || details.branch !== branch) {
1850
+ return [];
1851
+ }
1852
+ return [{
1853
+ audit_id: event.id,
1854
+ created_at: event.created_at,
1855
+ ...details,
1856
+ }];
1857
+ } catch {
1858
+ return [];
1859
+ }
1860
+ })[0] || null;
1861
+ }
1862
+
1863
+ function getLatestLandingRecoveryPrepared(db, pipelineId, branch) {
1864
+ return listAuditEvents(db, {
1865
+ eventType: 'pipeline_landing_recovery_prepared',
1866
+ status: 'allowed',
1867
+ limit: 200,
1868
+ }).flatMap((event) => {
1869
+ try {
1870
+ const details = JSON.parse(event.details || '{}');
1871
+ if (details.pipeline_id !== pipelineId || details.branch !== branch) {
1872
+ return [];
1873
+ }
1874
+ return [{
1875
+ audit_id: event.id,
1876
+ created_at: event.created_at,
1877
+ ...details,
1878
+ }];
1879
+ } catch {
1880
+ return [];
1881
+ }
1882
+ })[0] || null;
1883
+ }
1884
+
1885
+ function getLatestLandingRecoveryCleared(db, pipelineId, branch) {
1886
+ return listAuditEvents(db, {
1887
+ eventType: 'pipeline_landing_recovery_cleared',
1888
+ status: 'allowed',
1889
+ limit: 200,
1890
+ }).flatMap((event) => {
1891
+ try {
1892
+ const details = JSON.parse(event.details || '{}');
1893
+ if (details.pipeline_id !== pipelineId || details.branch !== branch) {
1894
+ return [];
1895
+ }
1896
+ return [{
1897
+ audit_id: event.id,
1898
+ created_at: event.created_at,
1899
+ ...details,
1900
+ }];
1901
+ } catch {
1902
+ return [];
1903
+ }
1904
+ })[0] || null;
1905
+ }
820
1906
 
821
- const currentBranch = getWorktreeBranch(repoRoot);
822
- if (currentBranch && currentBranch !== 'main') {
823
- return currentBranch;
1907
+ function buildRecoveryState(repoRoot, branch, recoveryRecord, latestResolved) {
1908
+ if (!recoveryRecord) return null;
1909
+ const recoveryPath = recoveryRecord.recovery_path || null;
1910
+ const pathExists = recoveryPath ? existsSync(recoveryPath) : false;
1911
+ const gitWorktrees = listGitWorktrees(repoRoot);
1912
+ const normalizedRecoveryPath = recoveryPath
1913
+ ? (pathExists ? realpathSync(recoveryPath) : recoveryPath)
1914
+ : null;
1915
+ const tracked = recoveryPath
1916
+ ? gitWorktrees.some((worktree) => worktree.path === recoveryPath || worktree.path === normalizedRecoveryPath)
1917
+ : false;
1918
+ const branchWorktree = branch
1919
+ ? gitWorktrees.find((worktree) => worktree.branch === branch) || null
1920
+ : null;
1921
+ const resolved = Boolean(latestResolved && latestResolved.audit_id > recoveryRecord.audit_id);
1922
+ let status;
1923
+
1924
+ if (resolved) {
1925
+ if (tracked) {
1926
+ status = 'resolved';
1927
+ } else if (branchWorktree) {
1928
+ status = 'resolved_moved';
1929
+ } else if (pathExists) {
1930
+ status = 'resolved_untracked';
1931
+ } else {
1932
+ status = 'resolved_missing';
1933
+ }
1934
+ } else if (tracked) {
1935
+ status = 'active';
1936
+ } else if (branchWorktree) {
1937
+ status = 'moved';
1938
+ } else if (pathExists) {
1939
+ status = 'untracked';
1940
+ } else {
1941
+ status = 'missing';
824
1942
  }
825
1943
 
826
- return null;
1944
+ return {
1945
+ path: recoveryPath,
1946
+ exists: pathExists,
1947
+ tracked,
1948
+ branch_worktree_path: branchWorktree?.path || null,
1949
+ status,
1950
+ };
827
1951
  }
828
1952
 
829
- export async function publishPipelinePr(
1953
+ function getLatestLandingFailure(db, pipelineId, branch) {
1954
+ return listAuditEvents(db, {
1955
+ eventType: 'pipeline_landing_branch_materialized',
1956
+ status: 'denied',
1957
+ limit: 200,
1958
+ }).flatMap((event) => {
1959
+ try {
1960
+ const details = JSON.parse(event.details || '{}');
1961
+ if (details.pipeline_id !== pipelineId || details.branch !== branch) {
1962
+ return [];
1963
+ }
1964
+ return [{
1965
+ audit_id: event.id,
1966
+ created_at: event.created_at,
1967
+ reason_code: event.reason_code || null,
1968
+ ...details,
1969
+ }];
1970
+ } catch {
1971
+ return [];
1972
+ }
1973
+ })[0] || null;
1974
+ }
1975
+
1976
+ function collectBranchHeadCommits(repoRoot, branches) {
1977
+ return Object.fromEntries(
1978
+ branches.map((branch) => [branch, gitRevParse(repoRoot, branch)]),
1979
+ );
1980
+ }
1981
+
1982
+ export function getPipelineLandingBranchStatus(
830
1983
  db,
831
1984
  repoRoot,
832
1985
  pipelineId,
833
1986
  {
834
1987
  baseBranch = 'main',
835
- headBranch = null,
836
- draft = false,
837
- ghCommand = 'gh',
838
- outputDir = null,
1988
+ landingBranch = null,
1989
+ requireCompleted = true,
839
1990
  } = {},
840
1991
  ) {
841
- const bundle = await exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir);
842
- const status = getPipelineStatus(db, pipelineId);
843
- const resolvedHeadBranch = resolvePipelineHeadBranch(db, repoRoot, status, headBranch);
844
-
845
- if (!resolvedHeadBranch) {
846
- throw new Error(`Could not determine a head branch for pipeline ${pipelineId}. Pass --head <branch>.`);
1992
+ const pipelineStatus = getPipelineStatus(db, pipelineId);
1993
+ if (requireCompleted) {
1994
+ const unfinishedTasks = pipelineStatus.tasks.filter((task) => task.status !== 'done');
1995
+ if (unfinishedTasks.length > 0) {
1996
+ throw new Error(`Pipeline ${pipelineId} is not ready to land. Complete remaining tasks first: ${unfinishedTasks.map((task) => task.id).join(', ')}.`);
1997
+ }
847
1998
  }
848
1999
 
849
- const args = [
850
- 'pr',
851
- 'create',
852
- '--base',
853
- baseBranch,
854
- '--head',
855
- resolvedHeadBranch,
856
- '--title',
857
- bundle.summary.pr_artifact.title,
858
- '--body-file',
859
- bundle.files.pr_body_markdown,
860
- ];
2000
+ const { candidateBranches, prioritizedBranches } = collectPipelineLandingCandidates(db, pipelineStatus);
2001
+ if (candidateBranches.length === 0) {
2002
+ throw new Error(`Pipeline ${pipelineId} has no landed worktree branch to materialize.`);
2003
+ }
861
2004
 
862
- if (draft) {
2005
+ if (candidateBranches.length === 1) {
2006
+ const branch = candidateBranches[0];
2007
+ return {
2008
+ pipeline_id: pipelineId,
2009
+ branch,
2010
+ base_branch: baseBranch,
2011
+ synthetic: false,
2012
+ branch_exists: Boolean(gitRevParse(repoRoot, branch)),
2013
+ branch_head_commit: gitRevParse(repoRoot, branch),
2014
+ component_branches: [branch],
2015
+ component_commits: { [branch]: gitRevParse(repoRoot, branch) },
2016
+ strategy: 'single_branch',
2017
+ stale: false,
2018
+ stale_reasons: [],
2019
+ last_materialized: null,
2020
+ };
2021
+ }
2022
+
2023
+ const resolvedLandingBranch = getPipelineLandingBranchName(pipelineId, landingBranch);
2024
+ const branchExists = gitBranchExists(repoRoot, resolvedLandingBranch);
2025
+ const branchHeadCommit = branchExists ? gitRevParse(repoRoot, resolvedLandingBranch) : null;
2026
+ const baseCommit = gitRevParse(repoRoot, baseBranch);
2027
+ const componentCommits = collectBranchHeadCommits(repoRoot, prioritizedBranches);
2028
+ const latestMaterialized = listPipelineLandingEvents(db, pipelineId, resolvedLandingBranch)[0] || null;
2029
+ const latestResolved = getLatestLandingResolvedEvent(db, pipelineId, resolvedLandingBranch);
2030
+ const latestRecoveryPreparedCandidate = getLatestLandingRecoveryPrepared(db, pipelineId, resolvedLandingBranch);
2031
+ const latestRecoveryCleared = getLatestLandingRecoveryCleared(db, pipelineId, resolvedLandingBranch);
2032
+ const latestRecoveryPrepared = latestRecoveryPreparedCandidate
2033
+ && (!latestRecoveryCleared || latestRecoveryPreparedCandidate.audit_id > latestRecoveryCleared.audit_id)
2034
+ ? latestRecoveryPreparedCandidate
2035
+ : null;
2036
+ const recoveryState = buildRecoveryState(repoRoot, resolvedLandingBranch, latestRecoveryPrepared, latestResolved);
2037
+ const latestEvent = latestResolved && (!latestMaterialized || latestResolved.audit_id > latestMaterialized.audit_id)
2038
+ ? latestResolved
2039
+ : latestMaterialized;
2040
+ const latestFailureCandidate = getLatestLandingFailure(db, pipelineId, resolvedLandingBranch);
2041
+ const latestFailure = latestFailureCandidate && (!latestEvent || latestFailureCandidate.audit_id > latestEvent.audit_id)
2042
+ ? latestFailureCandidate
2043
+ : null;
2044
+ const staleReasons = [];
2045
+
2046
+ if (!latestEvent) {
2047
+ staleReasons.push({
2048
+ code: 'not_materialized',
2049
+ summary: 'Landing branch has not been materialized yet.',
2050
+ });
2051
+ } else {
2052
+ if (!branchExists) {
2053
+ staleReasons.push({
2054
+ code: 'landing_branch_missing',
2055
+ summary: `Landing branch ${resolvedLandingBranch} no longer exists.`,
2056
+ });
2057
+ }
2058
+
2059
+ const recordedComponents = Array.isArray(latestEvent.component_branches)
2060
+ ? latestEvent.component_branches
2061
+ : [];
2062
+ if (recordedComponents.join('\n') !== prioritizedBranches.join('\n')) {
2063
+ staleReasons.push({
2064
+ code: 'component_set_changed',
2065
+ summary: 'The pipeline now resolves to a different set of component branches.',
2066
+ });
2067
+ }
2068
+
2069
+ if (latestEvent.base_commit && baseCommit && latestEvent.base_commit !== baseCommit) {
2070
+ staleReasons.push({
2071
+ code: 'base_branch_moved',
2072
+ summary: `${baseBranch} moved from ${latestEvent.base_commit.slice(0, 12)} to ${baseCommit.slice(0, 12)}.`,
2073
+ });
2074
+ }
2075
+
2076
+ const recordedCommits = latestEvent.component_commits || {};
2077
+ for (const branch of prioritizedBranches) {
2078
+ const previousCommit = recordedCommits[branch] || null;
2079
+ const currentCommit = componentCommits[branch] || null;
2080
+ if (previousCommit && currentCommit && previousCommit !== currentCommit) {
2081
+ staleReasons.push({
2082
+ code: 'component_branch_moved',
2083
+ branch,
2084
+ summary: `${branch} moved from ${previousCommit.slice(0, 12)} to ${currentCommit.slice(0, 12)}.`,
2085
+ });
2086
+ }
2087
+ }
2088
+
2089
+ if (
2090
+ latestEvent.head_commit &&
2091
+ branchHeadCommit &&
2092
+ latestEvent.head_commit !== branchHeadCommit
2093
+ ) {
2094
+ staleReasons.push({
2095
+ code: 'landing_branch_drifted',
2096
+ summary: `Landing branch head changed from ${latestEvent.head_commit.slice(0, 12)} to ${branchHeadCommit.slice(0, 12)} outside Switchman.`,
2097
+ });
2098
+ }
2099
+ }
2100
+
2101
+ return {
2102
+ pipeline_id: pipelineId,
2103
+ branch: resolvedLandingBranch,
2104
+ base_branch: baseBranch,
2105
+ synthetic: true,
2106
+ branch_exists: branchExists,
2107
+ branch_head_commit: branchHeadCommit,
2108
+ component_branches: prioritizedBranches,
2109
+ component_commits: componentCommits,
2110
+ strategy: 'synthetic_integration_branch',
2111
+ stale: staleReasons.some((reason) => reason.code !== 'not_materialized'),
2112
+ stale_reasons: staleReasons.filter((reason) => reason.code !== 'not_materialized'),
2113
+ last_failure: latestFailure ? {
2114
+ audit_id: latestFailure.audit_id,
2115
+ created_at: latestFailure.created_at,
2116
+ reason_code: latestFailure.reason_code || null,
2117
+ failed_branch: latestFailure.failed_branch || null,
2118
+ conflicting_files: Array.isArray(latestFailure.conflicting_files) ? latestFailure.conflicting_files : [],
2119
+ output: latestFailure.output || null,
2120
+ command: latestFailure.command || null,
2121
+ next_action: latestFailure.next_action || null,
2122
+ } : null,
2123
+ last_recovery: latestRecoveryPrepared ? {
2124
+ audit_id: latestRecoveryPrepared.audit_id,
2125
+ created_at: latestRecoveryPrepared.created_at,
2126
+ recovery_path: latestRecoveryPrepared.recovery_path || null,
2127
+ failed_branch: latestRecoveryPrepared.failed_branch || null,
2128
+ conflicting_files: Array.isArray(latestRecoveryPrepared.conflicting_files) ? latestRecoveryPrepared.conflicting_files : [],
2129
+ inspect_command: latestRecoveryPrepared.inspect_command || null,
2130
+ resume_command: latestRecoveryPrepared.resume_command || null,
2131
+ state: recoveryState,
2132
+ } : null,
2133
+ last_materialized: latestEvent ? {
2134
+ audit_id: latestEvent.audit_id,
2135
+ created_at: latestEvent.created_at,
2136
+ head_commit: latestEvent.head_commit || null,
2137
+ base_commit: latestEvent.base_commit || null,
2138
+ component_branches: Array.isArray(latestEvent.component_branches) ? latestEvent.component_branches : [],
2139
+ component_commits: latestEvent.component_commits || {},
2140
+ } : null,
2141
+ };
2142
+ }
2143
+
2144
+ export function resolvePipelineLandingTarget(
2145
+ db,
2146
+ repoRoot,
2147
+ pipelineStatus,
2148
+ {
2149
+ explicitHeadBranch = null,
2150
+ requireCompleted = false,
2151
+ allowCurrentBranchFallback = true,
2152
+ } = {},
2153
+ ) {
2154
+ if (explicitHeadBranch) {
2155
+ return {
2156
+ branch: explicitHeadBranch,
2157
+ worktree: null,
2158
+ strategy: 'explicit',
2159
+ };
2160
+ }
2161
+
2162
+ if (requireCompleted) {
2163
+ const unfinishedTasks = pipelineStatus.tasks.filter((task) => task.status !== 'done');
2164
+ if (unfinishedTasks.length > 0) {
2165
+ throw new Error(`Pipeline ${pipelineStatus.pipeline_id} is not ready to queue. Complete remaining tasks first: ${unfinishedTasks.map((task) => task.id).join(', ')}.`);
2166
+ }
2167
+ }
2168
+
2169
+ const { implementationBranches, candidateBranches, branchToWorktree } = collectPipelineLandingCandidates(db, pipelineStatus);
2170
+ if (implementationBranches.length === 1) {
2171
+ const branch = implementationBranches[0];
2172
+ const worktree = branchToWorktree.get(branch) || null;
2173
+ return { branch, worktree, strategy: 'implementation_branch' };
2174
+ }
2175
+
2176
+ if (candidateBranches.length === 1) {
2177
+ const branch = candidateBranches[0];
2178
+ const worktree = branchToWorktree.get(branch) || null;
2179
+ return { branch, worktree, strategy: 'single_branch' };
2180
+ }
2181
+
2182
+ if (allowCurrentBranchFallback) {
2183
+ const currentBranch = getWorktreeBranch(repoRoot);
2184
+ if (currentBranch && currentBranch !== 'main') {
2185
+ return { branch: currentBranch, worktree: null, strategy: 'current_branch' };
2186
+ }
2187
+ }
2188
+
2189
+ throw new Error(`Pipeline ${pipelineStatus.pipeline_id} spans multiple branches (${candidateBranches.join(', ') || 'none inferred'}). Queue a branch or worktree explicitly.`);
2190
+ }
2191
+
2192
+ export function materializePipelineLandingBranch(
2193
+ db,
2194
+ repoRoot,
2195
+ pipelineId,
2196
+ {
2197
+ baseBranch = 'main',
2198
+ landingBranch = null,
2199
+ requireCompleted = true,
2200
+ refresh = false,
2201
+ } = {},
2202
+ ) {
2203
+ const pipelineStatus = getPipelineStatus(db, pipelineId);
2204
+ const { candidateBranches, prioritizedBranches, branchToWorktree } = collectPipelineLandingCandidates(db, pipelineStatus);
2205
+ const landingStatus = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2206
+ baseBranch,
2207
+ landingBranch,
2208
+ requireCompleted,
2209
+ });
2210
+
2211
+ if (candidateBranches.length === 1) {
2212
+ const branch = candidateBranches[0];
2213
+ return {
2214
+ pipeline_id: pipelineId,
2215
+ branch,
2216
+ base_branch: baseBranch,
2217
+ worktree: branchToWorktree.get(branch) || null,
2218
+ synthetic: false,
2219
+ component_branches: [branch],
2220
+ strategy: 'single_branch',
2221
+ head_commit: null,
2222
+ };
2223
+ }
2224
+
2225
+ if (landingStatus.last_materialized && !landingStatus.stale) {
2226
+ return {
2227
+ pipeline_id: pipelineId,
2228
+ branch: landingStatus.branch,
2229
+ base_branch: baseBranch,
2230
+ worktree: null,
2231
+ synthetic: true,
2232
+ component_branches: landingStatus.component_branches,
2233
+ component_commits: landingStatus.component_commits,
2234
+ strategy: 'synthetic_integration_branch',
2235
+ head_commit: landingStatus.branch_head_commit,
2236
+ refreshed: false,
2237
+ reused_existing: true,
2238
+ stale: false,
2239
+ stale_reasons: [],
2240
+ last_materialized: landingStatus.last_materialized,
2241
+ };
2242
+ }
2243
+
2244
+ if (landingStatus.stale && !refresh) {
2245
+ const summaries = landingStatus.stale_reasons.map((reason) => reason.summary).join(' ');
2246
+ throw new Error(`Landing branch ${landingStatus.branch} is stale. ${summaries} Run \`switchman pipeline land ${pipelineId} --refresh${landingBranch ? ` --branch ${landingBranch}` : ''}\` to rebuild it.`);
2247
+ }
2248
+
2249
+ const resolvedLandingBranch = getPipelineLandingBranchName(pipelineId, landingBranch);
2250
+ const tempWorktreePath = join(tmpdir(), `switchman-landing-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`);
2251
+ let materialized;
2252
+ const landingOperation = startOperationJournalEntry(db, {
2253
+ scopeType: 'pipeline',
2254
+ scopeId: pipelineId,
2255
+ operationType: 'landing_materialize',
2256
+ details: JSON.stringify({
2257
+ pipeline_id: pipelineId,
2258
+ branch: resolvedLandingBranch,
2259
+ base_branch: baseBranch,
2260
+ component_branches: prioritizedBranches,
2261
+ refresh: Boolean(refresh),
2262
+ }),
2263
+ });
2264
+ const landingTempResource = createTempResource(db, {
2265
+ scopeType: 'pipeline',
2266
+ scopeId: pipelineId,
2267
+ operationId: landingOperation.id,
2268
+ resourceType: 'landing_temp_worktree',
2269
+ path: tempWorktreePath,
2270
+ branch: resolvedLandingBranch,
2271
+ details: stringifyResourceDetails({
2272
+ pipeline_id: pipelineId,
2273
+ branch: resolvedLandingBranch,
2274
+ base_branch: baseBranch,
2275
+ component_branches: prioritizedBranches,
2276
+ operation_type: 'landing_materialize',
2277
+ }),
2278
+ });
2279
+ try {
2280
+ materialized = gitMaterializeIntegrationBranch(repoRoot, {
2281
+ branch: resolvedLandingBranch,
2282
+ baseBranch,
2283
+ mergeBranches: prioritizedBranches,
2284
+ tempWorktreePath,
2285
+ });
2286
+ finishOperationJournalEntry(db, landingOperation.id, {
2287
+ status: 'completed',
2288
+ details: JSON.stringify({
2289
+ pipeline_id: pipelineId,
2290
+ branch: resolvedLandingBranch,
2291
+ base_branch: baseBranch,
2292
+ component_branches: prioritizedBranches,
2293
+ head_commit: materialized.head_commit,
2294
+ refresh: Boolean(refresh),
2295
+ }),
2296
+ });
2297
+ } catch (err) {
2298
+ finishOperationJournalEntry(db, landingOperation.id, {
2299
+ status: 'failed',
2300
+ details: JSON.stringify({
2301
+ pipeline_id: pipelineId,
2302
+ branch: resolvedLandingBranch,
2303
+ base_branch: baseBranch,
2304
+ component_branches: prioritizedBranches,
2305
+ reason_code: err?.code || 'landing_branch_materialization_failed',
2306
+ failed_branch: err?.details?.failed_branch || null,
2307
+ error: String(err?.message || err),
2308
+ }),
2309
+ });
2310
+ const reasonCode = err?.code || 'landing_branch_materialization_failed';
2311
+ const nextAction = reasonCode === 'landing_branch_merge_conflict'
2312
+ ? `open a recovery worktree with switchman pipeline land ${pipelineId} --recover`
2313
+ : reasonCode === 'landing_branch_missing_component'
2314
+ ? `restore the missing branch and rerun switchman pipeline land ${pipelineId} --refresh`
2315
+ : reasonCode === 'landing_branch_missing_base'
2316
+ ? `restore ${baseBranch} and rerun switchman pipeline land ${pipelineId} --refresh`
2317
+ : `inspect the landing failure and rerun switchman pipeline land ${pipelineId} --refresh`;
2318
+ logAuditEvent(db, {
2319
+ eventType: 'pipeline_landing_branch_materialized',
2320
+ status: 'denied',
2321
+ reasonCode,
2322
+ details: JSON.stringify({
2323
+ pipeline_id: pipelineId,
2324
+ branch: resolvedLandingBranch,
2325
+ base_branch: baseBranch,
2326
+ component_branches: prioritizedBranches,
2327
+ failed_branch: err?.details?.failed_branch || null,
2328
+ conflicting_files: err?.details?.conflicting_files || [],
2329
+ output: err?.details?.output || String(err.message || '').slice(0, 1000),
2330
+ command: reasonCode === 'landing_branch_merge_conflict'
2331
+ ? `switchman pipeline land ${pipelineId} --recover`
2332
+ : `switchman pipeline land ${pipelineId} --refresh`,
2333
+ next_action: nextAction,
2334
+ }),
2335
+ });
2336
+ const wrapped = new Error(`${String(err.message || 'Landing branch materialization failed.')}\nnext: switchman explain landing ${pipelineId}`);
2337
+ wrapped.code = reasonCode;
2338
+ throw wrapped;
2339
+ } finally {
2340
+ updateTempResource(db, landingTempResource.id, {
2341
+ status: existsSync(tempWorktreePath) ? 'active' : 'released',
2342
+ details: stringifyResourceDetails({
2343
+ pipeline_id: pipelineId,
2344
+ branch: resolvedLandingBranch,
2345
+ base_branch: baseBranch,
2346
+ component_branches: prioritizedBranches,
2347
+ operation_type: 'landing_materialize',
2348
+ released_by: existsSync(tempWorktreePath) ? null : 'materialize_cleanup',
2349
+ }),
2350
+ });
2351
+ }
2352
+ const baseCommit = gitRevParse(repoRoot, baseBranch);
2353
+ const componentCommits = collectBranchHeadCommits(repoRoot, prioritizedBranches);
2354
+
2355
+ logAuditEvent(db, {
2356
+ eventType: 'pipeline_landing_branch_materialized',
2357
+ status: 'allowed',
2358
+ details: JSON.stringify({
2359
+ pipeline_id: pipelineId,
2360
+ branch: resolvedLandingBranch,
2361
+ base_branch: baseBranch,
2362
+ base_commit: baseCommit,
2363
+ component_branches: prioritizedBranches,
2364
+ component_commits: componentCommits,
2365
+ head_commit: materialized.head_commit,
2366
+ }),
2367
+ });
2368
+
2369
+ return {
2370
+ pipeline_id: pipelineId,
2371
+ branch: resolvedLandingBranch,
2372
+ base_branch: baseBranch,
2373
+ worktree: null,
2374
+ synthetic: true,
2375
+ component_branches: prioritizedBranches,
2376
+ component_commits: componentCommits,
2377
+ strategy: 'synthetic_integration_branch',
2378
+ head_commit: materialized.head_commit,
2379
+ refreshed: refresh || Boolean(landingStatus.last_materialized),
2380
+ reused_existing: false,
2381
+ stale: false,
2382
+ stale_reasons: [],
2383
+ last_materialized: {
2384
+ head_commit: materialized.head_commit,
2385
+ base_commit: baseCommit,
2386
+ component_branches: prioritizedBranches,
2387
+ component_commits: componentCommits,
2388
+ },
2389
+ };
2390
+ }
2391
+
2392
+ export function getPipelineLandingExplainReport(
2393
+ db,
2394
+ repoRoot,
2395
+ pipelineId,
2396
+ options = {},
2397
+ ) {
2398
+ const landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2399
+ requireCompleted: false,
2400
+ ...options,
2401
+ });
2402
+ const nextAction = landing.last_failure?.next_action
2403
+ || (landing.stale
2404
+ ? `switchman pipeline land ${pipelineId} --refresh`
2405
+ : landing.synthetic
2406
+ ? `switchman queue add --pipeline ${pipelineId}`
2407
+ : `switchman queue add ${landing.branch}`);
2408
+ return {
2409
+ pipeline_id: pipelineId,
2410
+ landing,
2411
+ next_action: nextAction,
2412
+ };
2413
+ }
2414
+
2415
+ export function preparePipelineLandingRecovery(
2416
+ db,
2417
+ repoRoot,
2418
+ pipelineId,
2419
+ {
2420
+ baseBranch = 'main',
2421
+ landingBranch = null,
2422
+ recoveryPath = null,
2423
+ replaceExisting = false,
2424
+ } = {},
2425
+ ) {
2426
+ const landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2427
+ baseBranch,
2428
+ landingBranch,
2429
+ requireCompleted: true,
2430
+ });
2431
+ if (!landing.synthetic) {
2432
+ throw new Error(`Pipeline ${pipelineId} does not need a synthetic landing recovery worktree.`);
2433
+ }
2434
+ if (landing.last_failure?.reason_code !== 'landing_branch_merge_conflict') {
2435
+ if (
2436
+ !replaceExisting
2437
+ && landing.last_recovery?.state?.status === 'active'
2438
+ && landing.last_recovery?.recovery_path
2439
+ ) {
2440
+ return {
2441
+ pipeline_id: pipelineId,
2442
+ branch: landing.branch,
2443
+ base_branch: baseBranch,
2444
+ recovery_path: landing.last_recovery.recovery_path,
2445
+ failed_branch: landing.last_recovery.failed_branch || null,
2446
+ conflicting_files: landing.last_recovery.conflicting_files || [],
2447
+ inspect_command: landing.last_recovery.inspect_command || `git -C ${JSON.stringify(landing.last_recovery.recovery_path)} status`,
2448
+ resume_command: landing.last_recovery.resume_command || `switchman queue add --pipeline ${pipelineId}`,
2449
+ reused_existing: true,
2450
+ };
2451
+ }
2452
+ throw new Error(`Pipeline ${pipelineId} does not have a merge-conflict landing failure to recover.`);
2453
+ }
2454
+ if (landing.last_recovery?.state?.status && !replaceExisting) {
2455
+ if (landing.last_recovery.state.status === 'active' && landing.last_recovery.recovery_path) {
2456
+ return {
2457
+ pipeline_id: pipelineId,
2458
+ branch: landing.branch,
2459
+ base_branch: baseBranch,
2460
+ recovery_path: landing.last_recovery.recovery_path,
2461
+ failed_branch: landing.last_recovery.failed_branch || null,
2462
+ conflicting_files: landing.last_recovery.conflicting_files || [],
2463
+ inspect_command: landing.last_recovery.inspect_command || `git -C ${JSON.stringify(landing.last_recovery.recovery_path)} status`,
2464
+ resume_command: landing.last_recovery.resume_command || `switchman queue add --pipeline ${pipelineId}`,
2465
+ reused_existing: true,
2466
+ };
2467
+ }
2468
+ throw new Error(`Recovery worktree already exists for ${pipelineId} at ${landing.last_recovery.recovery_path}. Reuse it or rerun with \`switchman pipeline land ${pipelineId} --recover --replace-recovery\`.`);
2469
+ }
2470
+ if (landing.last_recovery?.state?.path && replaceExisting) {
2471
+ cleanupPipelineLandingRecovery(db, repoRoot, pipelineId, {
2472
+ baseBranch,
2473
+ landingBranch,
2474
+ recoveryPath: landing.last_recovery.state.path,
2475
+ reason: 'replaced',
2476
+ });
2477
+ }
2478
+
2479
+ const resolvedRecoveryPath = recoveryPath || join(
2480
+ tmpdir(),
2481
+ `${basename(repoRoot)}-landing-recover-${pipelineId}-${Date.now()}`,
2482
+ );
2483
+ const recoveryPrepareOperation = startOperationJournalEntry(db, {
2484
+ scopeType: 'pipeline',
2485
+ scopeId: pipelineId,
2486
+ operationType: 'landing_recovery_prepare',
2487
+ details: JSON.stringify({
2488
+ pipeline_id: pipelineId,
2489
+ branch: landing.branch,
2490
+ base_branch: baseBranch,
2491
+ recovery_path: resolvedRecoveryPath,
2492
+ }),
2493
+ });
2494
+ const recoveryResource = createTempResource(db, {
2495
+ scopeType: 'pipeline',
2496
+ scopeId: pipelineId,
2497
+ operationId: recoveryPrepareOperation.id,
2498
+ resourceType: 'landing_recovery_worktree',
2499
+ path: resolvedRecoveryPath,
2500
+ branch: landing.branch,
2501
+ details: stringifyResourceDetails({
2502
+ pipeline_id: pipelineId,
2503
+ branch: landing.branch,
2504
+ base_branch: baseBranch,
2505
+ operation_type: 'landing_recovery_prepare',
2506
+ recovery_path: resolvedRecoveryPath,
2507
+ }),
2508
+ });
2509
+ const prepared = gitPrepareIntegrationRecoveryWorktree(repoRoot, {
2510
+ branch: landing.branch,
2511
+ baseBranch,
2512
+ mergeBranches: landing.component_branches,
2513
+ recoveryPath: resolvedRecoveryPath,
2514
+ });
2515
+ if (prepared.ok) {
2516
+ try {
2517
+ gitRemoveWorktree(repoRoot, resolvedRecoveryPath);
2518
+ } catch {
2519
+ // Best-effort cleanup; repair will reconcile any leftover tracked resource.
2520
+ }
2521
+ updateTempResource(db, recoveryResource.id, {
2522
+ status: existsSync(resolvedRecoveryPath) ? 'active' : 'released',
2523
+ details: stringifyResourceDetails({
2524
+ pipeline_id: pipelineId,
2525
+ branch: landing.branch,
2526
+ base_branch: baseBranch,
2527
+ operation_type: 'landing_recovery_prepare',
2528
+ recovery_path: resolvedRecoveryPath,
2529
+ released_by: existsSync(resolvedRecoveryPath) ? null : 'recovery_prepare_cleanup',
2530
+ reason: 'conflict_already_resolved',
2531
+ }),
2532
+ });
2533
+ finishOperationJournalEntry(db, recoveryPrepareOperation.id, {
2534
+ status: 'failed',
2535
+ details: JSON.stringify({
2536
+ pipeline_id: pipelineId,
2537
+ branch: landing.branch,
2538
+ base_branch: baseBranch,
2539
+ recovery_path: resolvedRecoveryPath,
2540
+ error: 'Recovery preparation no longer needed because the landing merge conflict is already resolved.',
2541
+ }),
2542
+ });
2543
+ throw new Error(`Pipeline ${pipelineId} no longer has an unresolved landing merge conflict to recover.`);
2544
+ }
2545
+
2546
+ const inspectCommand = `git -C ${JSON.stringify(prepared.recovery_path)} status`;
2547
+ const resumeCommand = `switchman queue add --pipeline ${pipelineId}`;
2548
+
2549
+ logAuditEvent(db, {
2550
+ eventType: 'pipeline_landing_recovery_prepared',
2551
+ status: 'allowed',
2552
+ details: JSON.stringify({
2553
+ pipeline_id: pipelineId,
2554
+ branch: landing.branch,
2555
+ base_branch: baseBranch,
2556
+ recovery_path: prepared.recovery_path,
2557
+ failed_branch: prepared.failed_branch,
2558
+ conflicting_files: prepared.conflicting_files,
2559
+ inspect_command: inspectCommand,
2560
+ resume_command: resumeCommand,
2561
+ }),
2562
+ });
2563
+ finishOperationJournalEntry(db, recoveryPrepareOperation.id, {
2564
+ status: 'completed',
2565
+ details: JSON.stringify({
2566
+ pipeline_id: pipelineId,
2567
+ branch: landing.branch,
2568
+ base_branch: baseBranch,
2569
+ recovery_path: prepared.recovery_path,
2570
+ failed_branch: prepared.failed_branch,
2571
+ conflicting_files: prepared.conflicting_files,
2572
+ }),
2573
+ });
2574
+ updateTempResource(db, recoveryResource.id, {
2575
+ status: 'active',
2576
+ details: stringifyResourceDetails({
2577
+ pipeline_id: pipelineId,
2578
+ branch: landing.branch,
2579
+ base_branch: baseBranch,
2580
+ operation_type: 'landing_recovery_prepare',
2581
+ recovery_path: prepared.recovery_path,
2582
+ failed_branch: prepared.failed_branch,
2583
+ conflicting_files: prepared.conflicting_files,
2584
+ }),
2585
+ });
2586
+
2587
+ return {
2588
+ pipeline_id: pipelineId,
2589
+ branch: landing.branch,
2590
+ base_branch: baseBranch,
2591
+ recovery_path: prepared.recovery_path,
2592
+ failed_branch: prepared.failed_branch,
2593
+ conflicting_files: prepared.conflicting_files,
2594
+ inspect_command: inspectCommand,
2595
+ resume_command: resumeCommand,
2596
+ reused_existing: false,
2597
+ };
2598
+ }
2599
+
2600
+ export function resumePipelineLandingRecovery(
2601
+ db,
2602
+ repoRoot,
2603
+ pipelineId,
2604
+ {
2605
+ baseBranch = 'main',
2606
+ landingBranch = null,
2607
+ recoveryPath = null,
2608
+ } = {},
2609
+ ) {
2610
+ const landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2611
+ baseBranch,
2612
+ landingBranch,
2613
+ requireCompleted: true,
2614
+ });
2615
+ if (!landing.synthetic) {
2616
+ throw new Error(`Pipeline ${pipelineId} does not need a synthetic landing recovery worktree.`);
2617
+ }
2618
+
2619
+ const resolvedRecoveryPath = recoveryPath || landing.last_recovery?.recovery_path || null;
2620
+ if (!landing.last_failure && landing.last_materialized && landing.branch_head_commit) {
2621
+ return {
2622
+ pipeline_id: pipelineId,
2623
+ branch: landing.branch,
2624
+ base_branch: baseBranch,
2625
+ recovery_path: resolvedRecoveryPath,
2626
+ head_commit: landing.branch_head_commit,
2627
+ resume_command: `switchman queue add --pipeline ${pipelineId}`,
2628
+ already_resumed: true,
2629
+ };
2630
+ }
2631
+ if (!resolvedRecoveryPath) {
2632
+ throw new Error(`No recovery worktree is recorded for ${pipelineId}. Run \`switchman pipeline land ${pipelineId} --recover\` first.`);
2633
+ }
2634
+ const recoveryResumeOperation = startOperationJournalEntry(db, {
2635
+ scopeType: 'pipeline',
2636
+ scopeId: pipelineId,
2637
+ operationType: 'landing_recovery_resume',
2638
+ details: JSON.stringify({
2639
+ pipeline_id: pipelineId,
2640
+ branch: landing.branch,
2641
+ recovery_path: resolvedRecoveryPath,
2642
+ }),
2643
+ });
2644
+
2645
+ const currentBranch = getWorktreeBranch(resolvedRecoveryPath);
2646
+ if (currentBranch !== landing.branch) {
2647
+ finishOperationJournalEntry(db, recoveryResumeOperation.id, {
2648
+ status: 'failed',
2649
+ details: JSON.stringify({
2650
+ pipeline_id: pipelineId,
2651
+ branch: landing.branch,
2652
+ recovery_path: resolvedRecoveryPath,
2653
+ error: `Recovery worktree is on ${currentBranch || 'no branch'}.`,
2654
+ }),
2655
+ });
2656
+ throw new Error(`Recovery worktree must be on ${landing.branch}, but is on ${currentBranch || 'no branch'}.`);
2657
+ }
2658
+
2659
+ const statusResult = spawnSync('git', ['status', '--porcelain'], {
2660
+ cwd: resolvedRecoveryPath,
2661
+ encoding: 'utf8',
2662
+ });
2663
+ if (statusResult.status !== 0) {
2664
+ finishOperationJournalEntry(db, recoveryResumeOperation.id, {
2665
+ status: 'failed',
2666
+ details: JSON.stringify({
2667
+ pipeline_id: pipelineId,
2668
+ branch: landing.branch,
2669
+ recovery_path: resolvedRecoveryPath,
2670
+ error: `Could not inspect recovery worktree ${resolvedRecoveryPath}.`,
2671
+ }),
2672
+ });
2673
+ throw new Error(`Could not inspect recovery worktree ${resolvedRecoveryPath}.`);
2674
+ }
2675
+ const pendingChanges = String(statusResult.stdout || '').trim();
2676
+ if (pendingChanges) {
2677
+ finishOperationJournalEntry(db, recoveryResumeOperation.id, {
2678
+ status: 'failed',
2679
+ details: JSON.stringify({
2680
+ pipeline_id: pipelineId,
2681
+ branch: landing.branch,
2682
+ recovery_path: resolvedRecoveryPath,
2683
+ error: 'Recovery worktree still has unresolved or uncommitted changes.',
2684
+ }),
2685
+ });
2686
+ throw new Error(`Recovery worktree ${resolvedRecoveryPath} still has unresolved or uncommitted changes. Commit the resolved landing branch first.`);
2687
+ }
2688
+
2689
+ const recoveredHead = gitRevParse(resolvedRecoveryPath, 'HEAD');
2690
+ const branchHead = gitRevParse(repoRoot, landing.branch);
2691
+ if (!recoveredHead || !branchHead || recoveredHead !== branchHead) {
2692
+ finishOperationJournalEntry(db, recoveryResumeOperation.id, {
2693
+ status: 'failed',
2694
+ details: JSON.stringify({
2695
+ pipeline_id: pipelineId,
2696
+ branch: landing.branch,
2697
+ recovery_path: resolvedRecoveryPath,
2698
+ error: 'Recovery worktree head is not aligned with the landing branch head.',
2699
+ }),
2700
+ });
2701
+ throw new Error(`Recovery worktree ${resolvedRecoveryPath} is not aligned with ${landing.branch}. Push or commit the resolved landing branch there first.`);
2702
+ }
2703
+
2704
+ const componentCommits = collectBranchHeadCommits(repoRoot, landing.component_branches);
2705
+ logAuditEvent(db, {
2706
+ eventType: 'pipeline_landing_recovery_resumed',
2707
+ status: 'allowed',
2708
+ details: JSON.stringify({
2709
+ pipeline_id: pipelineId,
2710
+ branch: landing.branch,
2711
+ base_branch: baseBranch,
2712
+ base_commit: gitRevParse(repoRoot, baseBranch),
2713
+ head_commit: branchHead,
2714
+ component_branches: landing.component_branches,
2715
+ component_commits: componentCommits,
2716
+ recovery_path: resolvedRecoveryPath,
2717
+ resume_command: `switchman queue add --pipeline ${pipelineId}`,
2718
+ }),
2719
+ });
2720
+ finishOperationJournalEntry(db, recoveryResumeOperation.id, {
2721
+ status: 'completed',
2722
+ details: JSON.stringify({
2723
+ pipeline_id: pipelineId,
2724
+ branch: landing.branch,
2725
+ recovery_path: resolvedRecoveryPath,
2726
+ head_commit: branchHead,
2727
+ }),
2728
+ });
2729
+ const activeRecoveryResources = listTempResources(db, {
2730
+ scopeType: 'pipeline',
2731
+ scopeId: pipelineId,
2732
+ resourceType: 'landing_recovery_worktree',
2733
+ status: 'active',
2734
+ limit: 20,
2735
+ }).filter((resource) => resource.path === resolvedRecoveryPath);
2736
+ for (const resource of activeRecoveryResources) {
2737
+ updateTempResource(db, resource.id, {
2738
+ status: 'resolved',
2739
+ details: stringifyResourceDetails({
2740
+ pipeline_id: pipelineId,
2741
+ branch: landing.branch,
2742
+ operation_type: 'landing_recovery_resume',
2743
+ recovery_path: resolvedRecoveryPath,
2744
+ head_commit: branchHead,
2745
+ }),
2746
+ });
2747
+ }
2748
+
2749
+ return {
2750
+ pipeline_id: pipelineId,
2751
+ branch: landing.branch,
2752
+ base_branch: baseBranch,
2753
+ recovery_path: resolvedRecoveryPath,
2754
+ head_commit: branchHead,
2755
+ resume_command: `switchman queue add --pipeline ${pipelineId}`,
2756
+ already_resumed: false,
2757
+ };
2758
+ }
2759
+
2760
+ export function cleanupPipelineLandingRecovery(
2761
+ db,
2762
+ repoRoot,
2763
+ pipelineId,
2764
+ {
2765
+ baseBranch = 'main',
2766
+ landingBranch = null,
2767
+ recoveryPath = null,
2768
+ reason = 'manual_cleanup',
2769
+ } = {},
2770
+ ) {
2771
+ const landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2772
+ baseBranch,
2773
+ landingBranch,
2774
+ requireCompleted: true,
2775
+ });
2776
+ const targetPath = recoveryPath || landing.last_recovery?.recovery_path || null;
2777
+ if (!targetPath) {
2778
+ throw new Error(`No recovery worktree is recorded for ${pipelineId}.`);
2779
+ }
2780
+ const recoveryCleanupOperation = startOperationJournalEntry(db, {
2781
+ scopeType: 'pipeline',
2782
+ scopeId: pipelineId,
2783
+ operationType: 'landing_recovery_cleanup',
2784
+ details: JSON.stringify({
2785
+ pipeline_id: pipelineId,
2786
+ branch: landing.branch,
2787
+ recovery_path: targetPath,
2788
+ reason,
2789
+ }),
2790
+ });
2791
+
2792
+ const exists = existsSync(targetPath);
2793
+ const normalizedTargetPath = exists ? realpathSync(targetPath) : targetPath;
2794
+ const tracked = listGitWorktrees(repoRoot).some((worktree) =>
2795
+ worktree.path === targetPath || worktree.path === normalizedTargetPath,
2796
+ );
2797
+ if (tracked && exists) {
2798
+ gitRemoveWorktree(repoRoot, targetPath);
2799
+ }
2800
+ const trackedRecoveryResources = listTempResources(db, {
2801
+ scopeType: 'pipeline',
2802
+ scopeId: pipelineId,
2803
+ resourceType: 'landing_recovery_worktree',
2804
+ limit: 20,
2805
+ }).filter((resource) => resource.path === targetPath && resource.status !== 'released');
2806
+ for (const resource of trackedRecoveryResources) {
2807
+ const nextStatus = resource.status === 'abandoned' && !existsSync(targetPath)
2808
+ ? 'abandoned'
2809
+ : existsSync(targetPath)
2810
+ ? 'active'
2811
+ : 'released';
2812
+ updateTempResource(db, resource.id, {
2813
+ status: nextStatus,
2814
+ details: stringifyResourceDetails({
2815
+ pipeline_id: pipelineId,
2816
+ branch: landing.branch,
2817
+ operation_type: 'landing_recovery_cleanup',
2818
+ recovery_path: targetPath,
2819
+ removed: tracked && exists,
2820
+ reason,
2821
+ prior_status: resource.status,
2822
+ }),
2823
+ });
2824
+ }
2825
+
2826
+ logAuditEvent(db, {
2827
+ eventType: 'pipeline_landing_recovery_cleared',
2828
+ status: 'allowed',
2829
+ details: JSON.stringify({
2830
+ pipeline_id: pipelineId,
2831
+ branch: landing.branch,
2832
+ recovery_path: targetPath,
2833
+ existed: exists,
2834
+ tracked,
2835
+ reason,
2836
+ }),
2837
+ });
2838
+ finishOperationJournalEntry(db, recoveryCleanupOperation.id, {
2839
+ status: 'completed',
2840
+ details: JSON.stringify({
2841
+ pipeline_id: pipelineId,
2842
+ branch: landing.branch,
2843
+ recovery_path: targetPath,
2844
+ removed: tracked && exists,
2845
+ reason,
2846
+ }),
2847
+ });
2848
+
2849
+ return {
2850
+ pipeline_id: pipelineId,
2851
+ branch: landing.branch,
2852
+ recovery_path: targetPath,
2853
+ existed: exists,
2854
+ tracked,
2855
+ removed: tracked && exists,
2856
+ reason,
2857
+ };
2858
+ }
2859
+
2860
+ export function repairPipelineState(
2861
+ db,
2862
+ repoRoot,
2863
+ pipelineId,
2864
+ {
2865
+ baseBranch = 'main',
2866
+ landingBranch = null,
2867
+ } = {},
2868
+ ) {
2869
+ const actions = [];
2870
+ let notes = [];
2871
+ let landing;
2872
+
2873
+ try {
2874
+ landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2875
+ baseBranch,
2876
+ landingBranch,
2877
+ requireCompleted: false,
2878
+ });
2879
+ } catch (err) {
2880
+ return {
2881
+ pipeline_id: pipelineId,
2882
+ repaired: false,
2883
+ actions,
2884
+ notes: [String(err.message || 'Pipeline repair found no landing state to repair.')],
2885
+ next_action: `switchman pipeline status ${pipelineId}`,
2886
+ };
2887
+ }
2888
+
2889
+ const recoveryStatus = landing.last_recovery?.state?.status || null;
2890
+ if (['missing', 'resolved_missing', 'untracked', 'resolved_untracked', 'moved', 'resolved_moved'].includes(recoveryStatus) && landing.last_recovery?.recovery_path) {
2891
+ const cleared = cleanupPipelineLandingRecovery(db, repoRoot, pipelineId, {
2892
+ baseBranch,
2893
+ landingBranch,
2894
+ recoveryPath: landing.last_recovery.recovery_path,
2895
+ reason: `repair_${recoveryStatus}_recovery`,
2896
+ });
2897
+ actions.push({
2898
+ kind: 'recovery_state_cleared',
2899
+ recovery_path: cleared.recovery_path,
2900
+ removed: cleared.removed,
2901
+ recovery_status: recoveryStatus,
2902
+ branch_worktree_path: landing.last_recovery?.state?.branch_worktree_path || null,
2903
+ });
2904
+ landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2905
+ baseBranch,
2906
+ landingBranch,
2907
+ requireCompleted: false,
2908
+ });
2909
+ }
2910
+
2911
+ const needsLandingRefresh = landing.synthetic
2912
+ && !landing.last_failure
2913
+ && (
2914
+ landing.stale
2915
+ || (landing.branch_exists && !landing.last_materialized)
2916
+ );
2917
+
2918
+ if (needsLandingRefresh) {
2919
+ const refreshed = materializePipelineLandingBranch(db, repoRoot, pipelineId, {
2920
+ baseBranch,
2921
+ landingBranch,
2922
+ requireCompleted: false,
2923
+ refresh: true,
2924
+ });
2925
+ actions.push({
2926
+ kind: landing.last_materialized ? 'landing_branch_refreshed' : 'landing_branch_reconciled',
2927
+ branch: refreshed.branch,
2928
+ head_commit: refreshed.head_commit || null,
2929
+ });
2930
+ landing = getPipelineLandingBranchStatus(db, repoRoot, pipelineId, {
2931
+ baseBranch,
2932
+ landingBranch,
2933
+ requireCompleted: false,
2934
+ });
2935
+ }
2936
+
2937
+ if (actions.length === 0) {
2938
+ notes = ['No repair action was needed.'];
2939
+ }
2940
+
2941
+ const nextAction = landing.last_failure?.next_action
2942
+ || (landing.synthetic
2943
+ ? `switchman queue add --pipeline ${pipelineId}`
2944
+ : landing.branch
2945
+ ? `switchman queue add ${landing.branch}`
2946
+ : `switchman pipeline status ${pipelineId}`);
2947
+
2948
+ return {
2949
+ pipeline_id: pipelineId,
2950
+ repaired: actions.length > 0,
2951
+ actions,
2952
+ notes,
2953
+ landing,
2954
+ next_action: nextAction,
2955
+ };
2956
+ }
2957
+
2958
+ export function preparePipelineLandingTarget(
2959
+ db,
2960
+ repoRoot,
2961
+ pipelineId,
2962
+ {
2963
+ baseBranch = 'main',
2964
+ explicitHeadBranch = null,
2965
+ requireCompleted = false,
2966
+ allowCurrentBranchFallback = true,
2967
+ landingBranch = null,
2968
+ } = {},
2969
+ ) {
2970
+ const pipelineStatus = getPipelineStatus(db, pipelineId);
2971
+ const completedPipeline = pipelineStatus.tasks.length > 0 && pipelineStatus.tasks.every((task) => task.status === 'done');
2972
+ const { candidateBranches } = collectPipelineLandingCandidates(db, pipelineStatus);
2973
+
2974
+ if (!explicitHeadBranch && completedPipeline && candidateBranches.length > 1) {
2975
+ return materializePipelineLandingBranch(db, repoRoot, pipelineId, {
2976
+ baseBranch,
2977
+ landingBranch,
2978
+ requireCompleted: true,
2979
+ refresh: true,
2980
+ });
2981
+ }
2982
+
2983
+ try {
2984
+ const resolved = resolvePipelineLandingTarget(db, repoRoot, pipelineStatus, {
2985
+ explicitHeadBranch,
2986
+ requireCompleted,
2987
+ allowCurrentBranchFallback,
2988
+ });
2989
+ return {
2990
+ pipeline_id: pipelineId,
2991
+ ...resolved,
2992
+ synthetic: false,
2993
+ component_branches: [resolved.branch],
2994
+ head_commit: null,
2995
+ };
2996
+ } catch (err) {
2997
+ if (!String(err.message || '').includes('spans multiple branches')) {
2998
+ throw err;
2999
+ }
3000
+ return materializePipelineLandingBranch(db, repoRoot, pipelineId, {
3001
+ baseBranch,
3002
+ landingBranch,
3003
+ requireCompleted: true,
3004
+ refresh: true,
3005
+ });
3006
+ }
3007
+ }
3008
+
3009
+ export async function publishPipelinePr(
3010
+ db,
3011
+ repoRoot,
3012
+ pipelineId,
3013
+ {
3014
+ baseBranch = 'main',
3015
+ headBranch = null,
3016
+ draft = false,
3017
+ ghCommand = 'gh',
3018
+ outputDir = null,
3019
+ } = {},
3020
+ ) {
3021
+ const policyGate = await evaluatePipelinePolicyGate(db, repoRoot, pipelineId);
3022
+ if (!policyGate.ok) {
3023
+ logAuditEvent(db, {
3024
+ eventType: 'pipeline_pr_published',
3025
+ status: 'denied',
3026
+ reasonCode: policyGate.reason_code,
3027
+ details: JSON.stringify({
3028
+ pipeline_id: pipelineId,
3029
+ base_branch: baseBranch,
3030
+ head_branch: headBranch,
3031
+ policy_state: policyGate.policy_state,
3032
+ next_action: policyGate.next_action,
3033
+ }),
3034
+ });
3035
+ throw new Error(`${policyGate.summary} Next: ${policyGate.next_action}`);
3036
+ }
3037
+
3038
+ const bundle = await exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir);
3039
+ const resolvedLandingTarget = preparePipelineLandingTarget(db, repoRoot, pipelineId, {
3040
+ baseBranch,
3041
+ explicitHeadBranch: headBranch,
3042
+ requireCompleted: false,
3043
+ allowCurrentBranchFallback: true,
3044
+ });
3045
+ const resolvedHeadBranch = resolvedLandingTarget.branch;
3046
+
3047
+ const args = [
3048
+ 'pr',
3049
+ 'create',
3050
+ '--base',
3051
+ baseBranch,
3052
+ '--head',
3053
+ resolvedHeadBranch,
3054
+ '--title',
3055
+ bundle.summary.pr_artifact.title,
3056
+ '--body-file',
3057
+ bundle.files.pr_body_markdown,
3058
+ ];
3059
+
3060
+ if (draft) {
863
3061
  args.push('--draft');
864
3062
  }
865
3063
 
@@ -894,16 +3092,149 @@ export async function publishPipelinePr(
894
3092
  pipeline_id: pipelineId,
895
3093
  base_branch: baseBranch,
896
3094
  head_branch: resolvedHeadBranch,
3095
+ landing_strategy: resolvedLandingTarget.strategy,
897
3096
  draft,
898
3097
  bundle,
899
3098
  output,
900
3099
  };
901
3100
  }
902
3101
 
3102
+ export async function commentPipelinePr(
3103
+ db,
3104
+ repoRoot,
3105
+ pipelineId,
3106
+ {
3107
+ prNumber,
3108
+ ghCommand = 'gh',
3109
+ outputDir = null,
3110
+ updateExisting = false,
3111
+ } = {},
3112
+ ) {
3113
+ if (!prNumber) {
3114
+ throw new Error('A pull request number is required.');
3115
+ }
3116
+
3117
+ const bundle = await exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir);
3118
+ const args = [
3119
+ 'pr',
3120
+ 'comment',
3121
+ String(prNumber),
3122
+ '--body-file',
3123
+ bundle.files.landing_summary_markdown,
3124
+ ];
3125
+
3126
+ if (updateExisting) {
3127
+ args.push('--edit-last', '--create-if-none');
3128
+ }
3129
+
3130
+ const result = spawnSync(ghCommand, args, {
3131
+ cwd: repoRoot,
3132
+ encoding: 'utf8',
3133
+ });
3134
+
3135
+ const ok = !result.error && result.status === 0;
3136
+ const output = `${result.stdout || ''}${result.stderr || ''}`.trim();
3137
+
3138
+ logAuditEvent(db, {
3139
+ eventType: 'pipeline_pr_commented',
3140
+ status: ok ? 'allowed' : 'denied',
3141
+ reasonCode: ok ? null : 'pr_comment_failed',
3142
+ details: JSON.stringify({
3143
+ pipeline_id: pipelineId,
3144
+ pr_number: String(prNumber),
3145
+ gh_command: ghCommand,
3146
+ update_existing: updateExisting,
3147
+ body_file: bundle.files.landing_summary_markdown,
3148
+ exit_code: result.status,
3149
+ output: output.slice(0, 500),
3150
+ }),
3151
+ });
3152
+
3153
+ if (!ok) {
3154
+ throw new Error(result.error?.message || output || `gh pr comment failed with status ${result.status}`);
3155
+ }
3156
+
3157
+ return {
3158
+ pipeline_id: pipelineId,
3159
+ pr_number: String(prNumber),
3160
+ bundle,
3161
+ output,
3162
+ updated_existing: updateExisting,
3163
+ };
3164
+ }
3165
+
3166
+ export async function syncPipelinePr(
3167
+ db,
3168
+ repoRoot,
3169
+ pipelineId,
3170
+ {
3171
+ prNumber,
3172
+ ghCommand = 'gh',
3173
+ outputDir = null,
3174
+ updateExisting = true,
3175
+ } = {},
3176
+ ) {
3177
+ const bundle = await exportPipelinePrBundle(db, repoRoot, pipelineId, outputDir);
3178
+ let comment = null;
3179
+
3180
+ if (prNumber) {
3181
+ const args = [
3182
+ 'pr',
3183
+ 'comment',
3184
+ String(prNumber),
3185
+ '--body-file',
3186
+ bundle.files.landing_summary_markdown,
3187
+ ];
3188
+
3189
+ if (updateExisting) {
3190
+ args.push('--edit-last', '--create-if-none');
3191
+ }
3192
+
3193
+ const result = spawnSync(ghCommand, args, {
3194
+ cwd: repoRoot,
3195
+ encoding: 'utf8',
3196
+ });
3197
+ const ok = !result.error && result.status === 0;
3198
+ const output = `${result.stdout || ''}${result.stderr || ''}`.trim();
3199
+
3200
+ logAuditEvent(db, {
3201
+ eventType: 'pipeline_pr_synced',
3202
+ status: ok ? 'allowed' : 'denied',
3203
+ reasonCode: ok ? null : 'pr_sync_failed',
3204
+ details: JSON.stringify({
3205
+ pipeline_id: pipelineId,
3206
+ pr_number: String(prNumber),
3207
+ gh_command: ghCommand,
3208
+ update_existing: updateExisting,
3209
+ body_file: bundle.files.landing_summary_markdown,
3210
+ exit_code: result.status,
3211
+ output: output.slice(0, 500),
3212
+ }),
3213
+ });
3214
+
3215
+ if (!ok) {
3216
+ throw new Error(result.error?.message || output || `gh pr comment failed with status ${result.status}`);
3217
+ }
3218
+
3219
+ comment = {
3220
+ pr_number: String(prNumber),
3221
+ output,
3222
+ updated_existing: updateExisting,
3223
+ };
3224
+ }
3225
+
3226
+ return {
3227
+ pipeline_id: pipelineId,
3228
+ bundle,
3229
+ comment,
3230
+ };
3231
+ }
3232
+
903
3233
  export async function createPipelineFollowupTasks(db, repoRoot, pipelineId) {
904
3234
  const status = getPipelineStatus(db, pipelineId);
905
3235
  const report = await scanAllWorktrees(db, repoRoot);
906
3236
  const aiGate = await runAiMergeGate(db, repoRoot);
3237
+ const changePolicy = loadChangePolicy(repoRoot);
907
3238
  const existingTitles = new Set(status.tasks.map((task) => task.title));
908
3239
  const hasPlannedTestsTask = status.tasks.some((task) =>
909
3240
  task.task_spec?.task_type === 'tests' && !task.title.startsWith('Add missing tests'),
@@ -973,6 +3304,46 @@ export async function createPipelineFollowupTasks(db, repoRoot, pipelineId) {
973
3304
  }
974
3305
  }
975
3306
 
3307
+ const implementationTasks = status.tasks.filter((task) => task.task_spec?.task_type === 'implementation');
3308
+ for (const task of implementationTasks) {
3309
+ const taskDomains = task.task_spec?.subsystem_tags || [];
3310
+ const requiredTaskTypes = new Set(task.task_spec?.validation_rules?.required_completed_task_types || []);
3311
+ for (const domain of taskDomains) {
3312
+ const rule = changePolicy.domain_rules?.[domain];
3313
+ for (const taskType of rule?.required_completed_task_types || []) {
3314
+ requiredTaskTypes.add(taskType);
3315
+ }
3316
+ }
3317
+
3318
+ const completedTypes = new Set(
3319
+ status.tasks
3320
+ .filter((candidate) => candidate.status === 'done')
3321
+ .map((candidate) => candidate.task_spec?.task_type)
3322
+ .filter(Boolean),
3323
+ );
3324
+
3325
+ if (requiredTaskTypes.has('tests') && !completedTypes.has('tests')) {
3326
+ maybeCreateTask(
3327
+ `Add policy-required tests for ${task.title}`,
3328
+ `Policy-triggered follow-up for ${task.id}. Domains: ${taskDomains.join(', ')}.`,
3329
+ );
3330
+ }
3331
+
3332
+ if (requiredTaskTypes.has('docs') && !completedTypes.has('docs')) {
3333
+ maybeCreateTask(
3334
+ `Add policy-required docs for ${task.title}`,
3335
+ `Policy-triggered follow-up for ${task.id}. Domains: ${taskDomains.join(', ')}.`,
3336
+ );
3337
+ }
3338
+
3339
+ if (requiredTaskTypes.has('governance') && !completedTypes.has('governance')) {
3340
+ maybeCreateTask(
3341
+ `Add policy review for ${task.title}`,
3342
+ `Policy-triggered governance follow-up for ${task.id}. Domains: ${taskDomains.join(', ')}.`,
3343
+ );
3344
+ }
3345
+ }
3346
+
976
3347
  logAuditEvent(db, {
977
3348
  eventType: 'pipeline_followups_created',
978
3349
  status: created.length > 0 ? 'allowed' : 'info',