switchman-dev 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/core/db.js CHANGED
@@ -5,12 +5,16 @@
5
5
 
6
6
  import { createHash, createHmac, randomBytes } from 'node:crypto';
7
7
  import { DatabaseSync } from 'node:sqlite';
8
- import { chmodSync, existsSync, mkdirSync, readFileSync, realpathSync, writeFileSync } from 'fs';
9
- import { join, resolve } from 'path';
8
+ import { chmodSync, existsSync, mkdirSync, readFileSync, realpathSync, rmSync, writeFileSync } from 'fs';
9
+ import { join, posix, resolve } from 'path';
10
+ import { matchesPathPatterns } from './ignore.js';
11
+ import { buildModuleDependencyIndexForPath, buildSemanticIndexForPath, classifySubsystemsForPath, listTrackedFiles } from './semantic.js';
10
12
 
11
13
  const SWITCHMAN_DIR = '.switchman';
12
14
  const DB_FILE = 'switchman.db';
13
15
  const AUDIT_KEY_FILE = 'audit.key';
16
+ const MIGRATION_STATE_FILE = 'migration-state.json';
17
+ const CURRENT_SCHEMA_VERSION = 6;
14
18
 
15
19
  // How long (ms) a writer will wait for a lock before giving up.
16
20
  // 5 seconds is generous for a CLI tool with 3-10 concurrent agents.
@@ -18,6 +22,7 @@ const BUSY_TIMEOUT_MS = 10000;
18
22
  const CLAIM_RETRY_DELAY_MS = 200;
19
23
  const CLAIM_RETRY_ATTEMPTS = 20;
20
24
  export const DEFAULT_STALE_LEASE_MINUTES = 15;
25
+ const DB_PRUNE_RETENTION_DAYS = 30;
21
26
 
22
27
  function sleepSync(ms) {
23
28
  Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
@@ -50,6 +55,22 @@ function normalizeWorktreePath(path) {
50
55
  }
51
56
  }
52
57
 
58
+ function normalizeClaimedFilePath(filePath) {
59
+ const rawPath = String(filePath || '').replace(/\\/g, '/').trim();
60
+ const normalized = posix.normalize(rawPath.replace(/^\.\/+/, ''));
61
+ if (
62
+ normalized === '' ||
63
+ normalized === '.' ||
64
+ normalized === '..' ||
65
+ normalized.startsWith('../') ||
66
+ rawPath.startsWith('/') ||
67
+ /^[A-Za-z]:\//.test(rawPath)
68
+ ) {
69
+ throw new Error('Claimed file paths must stay inside the repository.');
70
+ }
71
+ return normalized;
72
+ }
73
+
53
74
  function makeId(prefix) {
54
75
  return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
55
76
  }
@@ -72,6 +93,39 @@ function getTableColumns(db, tableName) {
72
93
  return db.prepare(`PRAGMA table_info(${tableName})`).all().map((column) => column.name);
73
94
  }
74
95
 
96
+ function getSchemaVersion(db) {
97
+ return Number(db.prepare('PRAGMA user_version').get()?.user_version || 0);
98
+ }
99
+
100
+ function setSchemaVersion(db, version) {
101
+ db.exec(`PRAGMA user_version=${Number(version) || 0}`);
102
+ }
103
+
104
+ function getMigrationStatePath(repoRoot) {
105
+ return join(getSwitchmanDir(repoRoot), MIGRATION_STATE_FILE);
106
+ }
107
+
108
+ function readMigrationState(repoRoot) {
109
+ const path = getMigrationStatePath(repoRoot);
110
+ if (!existsSync(path)) return null;
111
+ try {
112
+ return JSON.parse(readFileSync(path, 'utf8'));
113
+ } catch {
114
+ throw new Error(`Switchman migration state is unreadable at ${path}. Remove or repair it before reopening the database.`);
115
+ }
116
+ }
117
+
118
+ function writeMigrationState(repoRoot, state) {
119
+ writeFileSync(getMigrationStatePath(repoRoot), `${JSON.stringify(state, null, 2)}\n`);
120
+ }
121
+
122
+ function clearMigrationState(repoRoot) {
123
+ const path = getMigrationStatePath(repoRoot);
124
+ if (existsSync(path)) {
125
+ rmSync(path, { force: true });
126
+ }
127
+ }
128
+
75
129
  function getAuditSecret(repoRoot) {
76
130
  const keyPath = join(getSwitchmanDir(repoRoot), AUDIT_KEY_FILE);
77
131
  if (!existsSync(keyPath)) {
@@ -120,7 +174,7 @@ function signAuditEntry(secret, entryHash) {
120
174
  return createHmac('sha256', secret).update(entryHash).digest('hex');
121
175
  }
122
176
 
123
- function ensureSchema(db) {
177
+ function applySchemaVersion1(db) {
124
178
  db.exec(`
125
179
  CREATE TABLE IF NOT EXISTS tasks (
126
180
  id TEXT PRIMARY KEY,
@@ -292,6 +346,8 @@ function ensureSchema(db) {
292
346
  created_at TEXT NOT NULL DEFAULT (datetime('now')),
293
347
  updated_at TEXT NOT NULL DEFAULT (datetime('now')),
294
348
  last_attempt_at TEXT,
349
+ backoff_until TEXT,
350
+ escalated_at TEXT,
295
351
  started_at TEXT,
296
352
  finished_at TEXT
297
353
  );
@@ -376,11 +432,183 @@ function ensureSchema(db) {
376
432
  CREATE INDEX IF NOT EXISTS idx_merge_queue_pipeline_id ON merge_queue(source_pipeline_id);
377
433
  CREATE INDEX IF NOT EXISTS idx_merge_queue_events_item ON merge_queue_events(queue_item_id);
378
434
  `);
435
+ }
436
+
437
+ function applySchemaVersion2(db) {
438
+ db.exec(`
439
+ CREATE TABLE IF NOT EXISTS operation_journal (
440
+ id TEXT PRIMARY KEY,
441
+ scope_type TEXT NOT NULL,
442
+ scope_id TEXT NOT NULL,
443
+ operation_type TEXT NOT NULL,
444
+ status TEXT NOT NULL DEFAULT 'running',
445
+ details TEXT,
446
+ started_at TEXT NOT NULL DEFAULT (datetime('now')),
447
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
448
+ finished_at TEXT
449
+ );
450
+
451
+ CREATE INDEX IF NOT EXISTS idx_operation_journal_scope
452
+ ON operation_journal(scope_type, scope_id, started_at);
453
+ CREATE INDEX IF NOT EXISTS idx_operation_journal_status
454
+ ON operation_journal(status, started_at);
455
+ `);
379
456
 
380
457
  migrateLegacyAuditLog(db);
381
458
  migrateLegacyActiveTasks(db);
382
459
  }
383
460
 
461
+ function applySchemaVersion3(db) {
462
+ db.exec(`
463
+ CREATE TABLE IF NOT EXISTS temp_resources (
464
+ id TEXT PRIMARY KEY,
465
+ scope_type TEXT NOT NULL,
466
+ scope_id TEXT NOT NULL,
467
+ operation_id TEXT,
468
+ resource_type TEXT NOT NULL,
469
+ path TEXT NOT NULL,
470
+ branch TEXT,
471
+ status TEXT NOT NULL DEFAULT 'active',
472
+ details TEXT,
473
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
474
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
475
+ released_at TEXT
476
+ );
477
+
478
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_scope
479
+ ON temp_resources(scope_type, scope_id, created_at);
480
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_status
481
+ ON temp_resources(status, created_at);
482
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_path
483
+ ON temp_resources(path);
484
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_operation
485
+ ON temp_resources(operation_id);
486
+ `);
487
+ }
488
+
489
+ function applySchemaVersion4(db) {
490
+ const mergeQueueColumns = getTableColumns(db, 'merge_queue');
491
+ if (mergeQueueColumns.length > 0 && !mergeQueueColumns.includes('backoff_until')) {
492
+ db.exec(`ALTER TABLE merge_queue ADD COLUMN backoff_until TEXT`);
493
+ }
494
+ if (mergeQueueColumns.length > 0 && !mergeQueueColumns.includes('escalated_at')) {
495
+ db.exec(`ALTER TABLE merge_queue ADD COLUMN escalated_at TEXT`);
496
+ }
497
+
498
+ db.exec(`
499
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_backoff_until ON merge_queue(backoff_until);
500
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_escalated_at ON merge_queue(escalated_at);
501
+ `);
502
+ }
503
+
504
+ function applySchemaVersion5(db) {
505
+ db.exec(`
506
+ CREATE TABLE IF NOT EXISTS policy_overrides (
507
+ id TEXT PRIMARY KEY,
508
+ pipeline_id TEXT NOT NULL,
509
+ requirement_keys TEXT NOT NULL DEFAULT '[]',
510
+ task_types TEXT NOT NULL DEFAULT '[]',
511
+ status TEXT NOT NULL DEFAULT 'active',
512
+ reason TEXT NOT NULL,
513
+ approved_by TEXT,
514
+ details TEXT,
515
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
516
+ revoked_at TEXT,
517
+ revoked_by TEXT,
518
+ revoked_reason TEXT
519
+ );
520
+
521
+ CREATE INDEX IF NOT EXISTS idx_policy_overrides_pipeline
522
+ ON policy_overrides(pipeline_id, created_at);
523
+ CREATE INDEX IF NOT EXISTS idx_policy_overrides_status
524
+ ON policy_overrides(status, created_at);
525
+ `);
526
+ }
527
+
528
+ function applySchemaVersion6(db) {
529
+ db.exec(`
530
+ UPDATE file_claims
531
+ SET released_at = COALESCE(released_at, datetime('now'))
532
+ WHERE released_at IS NULL
533
+ AND id NOT IN (
534
+ SELECT MIN(id)
535
+ FROM file_claims
536
+ WHERE released_at IS NULL
537
+ GROUP BY file_path
538
+ );
539
+
540
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_file_claims_active_path
541
+ ON file_claims(file_path)
542
+ WHERE released_at IS NULL;
543
+ `);
544
+ }
545
+
546
+ function ensureSchemaMigrated(db) {
547
+ const repoRoot = db.__switchmanRepoRoot;
548
+ if (!repoRoot) {
549
+ throw new Error('Database repo root is not configured.');
550
+ }
551
+
552
+ const recordedState = readMigrationState(repoRoot);
553
+ const currentVersion = getSchemaVersion(db);
554
+
555
+ if (currentVersion > CURRENT_SCHEMA_VERSION) {
556
+ throw new Error(`Switchman database schema version ${currentVersion} is newer than this CLI supports (${CURRENT_SCHEMA_VERSION}). Upgrade Switchman before opening this repo.`);
557
+ }
558
+
559
+ if (recordedState?.status === 'running') {
560
+ throw new Error(`Switchman detected an interrupted database migration from version ${recordedState.from_version} to ${recordedState.to_version}. Resolve the migration state in ${getMigrationStatePath(repoRoot)} before reopening the database.`);
561
+ }
562
+
563
+ if (currentVersion === CURRENT_SCHEMA_VERSION) {
564
+ if (recordedState) {
565
+ clearMigrationState(repoRoot);
566
+ }
567
+ return;
568
+ }
569
+
570
+ writeMigrationState(repoRoot, {
571
+ status: 'running',
572
+ from_version: currentVersion,
573
+ to_version: CURRENT_SCHEMA_VERSION,
574
+ started_at: new Date().toISOString(),
575
+ });
576
+
577
+ try {
578
+ withImmediateTransaction(db, () => {
579
+ if (currentVersion < 1) {
580
+ applySchemaVersion1(db);
581
+ }
582
+ if (currentVersion < 2) {
583
+ applySchemaVersion2(db);
584
+ }
585
+ if (currentVersion < 3) {
586
+ applySchemaVersion3(db);
587
+ }
588
+ if (currentVersion < 4) {
589
+ applySchemaVersion4(db);
590
+ }
591
+ if (currentVersion < 5) {
592
+ applySchemaVersion5(db);
593
+ }
594
+ if (currentVersion < 6) {
595
+ applySchemaVersion6(db);
596
+ }
597
+ setSchemaVersion(db, CURRENT_SCHEMA_VERSION);
598
+ });
599
+ clearMigrationState(repoRoot);
600
+ } catch (err) {
601
+ writeMigrationState(repoRoot, {
602
+ status: 'failed',
603
+ from_version: currentVersion,
604
+ to_version: CURRENT_SCHEMA_VERSION,
605
+ failed_at: new Date().toISOString(),
606
+ error: String(err?.message || err),
607
+ });
608
+ throw err;
609
+ }
610
+ }
611
+
384
612
  function normalizeScopeRoot(pattern) {
385
613
  return String(pattern || '')
386
614
  .replace(/\\/g, '/')
@@ -426,6 +654,84 @@ function buildSpecOverlap(sourceSpec = null, affectedSpec = null) {
426
654
  };
427
655
  }
428
656
 
657
+ function taskSpecMatchesObject(taskSpec = null, object = null) {
658
+ if (!taskSpec || !object) return false;
659
+ const allowedPaths = Array.isArray(taskSpec.allowed_paths) ? taskSpec.allowed_paths.filter(Boolean) : [];
660
+ if (allowedPaths.length > 0 && matchesPathPatterns(object.file_path, allowedPaths)) {
661
+ return true;
662
+ }
663
+
664
+ const subsystemTags = Array.isArray(taskSpec.subsystem_tags) ? taskSpec.subsystem_tags.filter(Boolean) : [];
665
+ return subsystemTags.some((tag) => (object.subsystem_tags || []).includes(tag));
666
+ }
667
+
668
+ function taskSpecMatchesFilePath(taskSpec = null, filePath = null) {
669
+ if (!taskSpec || !filePath) return false;
670
+ const allowedPaths = Array.isArray(taskSpec.allowed_paths) ? taskSpec.allowed_paths.filter(Boolean) : [];
671
+ if (allowedPaths.length > 0 && matchesPathPatterns(filePath, allowedPaths)) {
672
+ return true;
673
+ }
674
+
675
+ const subsystemTags = Array.isArray(taskSpec.subsystem_tags) ? taskSpec.subsystem_tags.filter(Boolean) : [];
676
+ const fileSubsystems = classifySubsystemsForPath(filePath);
677
+ return subsystemTags.some((tag) => fileSubsystems.includes(tag));
678
+ }
679
+
680
+ function buildSemanticDependencyOverlap(db, sourceSpec, affectedSpec, changedFiles = []) {
681
+ const repoRoot = db.__switchmanRepoRoot;
682
+ if (!repoRoot || !sourceSpec || !affectedSpec || changedFiles.length === 0) {
683
+ return [];
684
+ }
685
+
686
+ const changedObjects = buildSemanticIndexForPath(repoRoot, changedFiles).objects || [];
687
+ const sourceObjects = changedObjects.filter((object) =>
688
+ changedFiles.includes(object.file_path)
689
+ && taskSpecMatchesObject(sourceSpec, object),
690
+ );
691
+ const trackedSourceFiles = listTrackedFiles(repoRoot, { sourceOnly: true });
692
+ const semanticCandidateFiles = [...new Set([...trackedSourceFiles, ...changedFiles])];
693
+ const affectedObjects = buildSemanticIndexForPath(repoRoot, semanticCandidateFiles).objects
694
+ .filter((object) => taskSpecMatchesObject(affectedSpec, object));
695
+ const affectedKeys = new Set(affectedObjects.map((object) => `${object.kind}:${object.name}`));
696
+ const overlaps = sourceObjects
697
+ .filter((object) => affectedKeys.has(`${object.kind}:${object.name}`))
698
+ .map((object) => ({
699
+ overlap_type: ['interface', 'type'].includes(object.kind) ? 'contract' : 'exported_object',
700
+ kind: object.kind,
701
+ name: object.name,
702
+ file_path: object.file_path,
703
+ area: object.area || null,
704
+ }));
705
+
706
+ const sourceChangedFiles = changedFiles.filter((filePath) => taskSpecMatchesFilePath(sourceSpec, filePath));
707
+ if (sourceChangedFiles.length === 0) return overlaps;
708
+
709
+ const moduleDependencies = buildModuleDependencyIndexForPath(repoRoot, { filePaths: semanticCandidateFiles }).dependencies || [];
710
+ const sharedModuleDependents = moduleDependencies.filter((dependency) =>
711
+ sourceChangedFiles.includes(dependency.imported_path)
712
+ && taskSpecMatchesFilePath(affectedSpec, dependency.file_path)
713
+ && !sourceChangedFiles.includes(dependency.file_path)
714
+ );
715
+ const dependentFiles = [...new Set(sharedModuleDependents.map((item) => item.file_path))];
716
+ const sharedModulePaths = [...new Set(sharedModuleDependents.map((item) => item.imported_path))];
717
+
718
+ if (dependentFiles.length > 0) {
719
+ overlaps.push({
720
+ overlap_type: 'shared_module',
721
+ kind: 'module',
722
+ name: sharedModulePaths[0],
723
+ file_path: sharedModulePaths[0],
724
+ area: sharedModuleDependents[0]?.area || null,
725
+ dependent_files: dependentFiles,
726
+ module_paths: sharedModulePaths,
727
+ subsystem_tags: [...new Set(sharedModuleDependents.flatMap((item) => item.subsystem_tags || []))],
728
+ dependent_areas: [...new Set(sharedModuleDependents.map((item) => item.area).filter(Boolean))],
729
+ });
730
+ }
731
+
732
+ return overlaps;
733
+ }
734
+
429
735
  function buildLeaseScopeReservations(lease, taskSpec) {
430
736
  if (!taskSpec) return [];
431
737
 
@@ -542,8 +848,8 @@ function reserveLeaseScopesTx(db, lease) {
542
848
  const conflicts = findScopeReservationConflicts(reservations, activeReservations);
543
849
  if (conflicts.length > 0) {
544
850
  const summary = conflicts[0].type === 'subsystem'
545
- ? `subsystem:${conflicts[0].subsystem_tag}`
546
- : `${conflicts[0].scope_pattern} overlaps ${conflicts[0].conflicting_scope_pattern}`;
851
+ ? `${conflicts[0].worktree} already owns subsystem:${conflicts[0].subsystem_tag}`
852
+ : `${conflicts[0].worktree} already owns ${conflicts[0].conflicting_scope_pattern} (requested ${conflicts[0].scope_pattern})`;
547
853
  logAuditEventTx(db, {
548
854
  eventType: 'scope_reservation_denied',
549
855
  status: 'denied',
@@ -553,7 +859,7 @@ function reserveLeaseScopesTx(db, lease) {
553
859
  leaseId: lease.id,
554
860
  details: JSON.stringify({ conflicts, summary }),
555
861
  });
556
- throw new Error(`Scope reservation conflict: ${summary}`);
862
+ throw new Error(`Scope ownership conflict: ${summary}`);
557
863
  }
558
864
 
559
865
  const insert = db.prepare(`
@@ -890,7 +1196,7 @@ function resolveDependencyInvalidationsForAffectedTaskTx(db, affectedTaskId, res
890
1196
  `).run(resolvedBy || null, resolvedBy || null, affectedTaskId);
891
1197
  }
892
1198
 
893
- function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
1199
+ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write', context = {}) {
894
1200
  const execution = getLeaseExecutionContext(db, leaseId);
895
1201
  if (!execution?.task || !execution.task_spec) {
896
1202
  return [];
@@ -912,7 +1218,8 @@ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
912
1218
  if ((affectedSpec.pipeline_id || null) === sourcePipelineId) continue;
913
1219
 
914
1220
  const overlap = buildSpecOverlap(sourceSpec, affectedSpec);
915
- if (overlap.shared_subsystems.length === 0 && overlap.shared_scopes.length === 0) continue;
1221
+ const semanticOverlap = buildSemanticDependencyOverlap(db, sourceSpec, affectedSpec, context.changed_files || []);
1222
+ if (overlap.shared_subsystems.length === 0 && overlap.shared_scopes.length === 0 && semanticOverlap.length === 0) continue;
916
1223
 
917
1224
  const affectedWorktree = affectedTask.worktree || null;
918
1225
  for (const subsystemTag of overlap.shared_subsystems) {
@@ -933,6 +1240,8 @@ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
933
1240
  source,
934
1241
  source_task_title: sourceTask.title,
935
1242
  affected_task_title: affectedTask.title,
1243
+ source_task_priority: Number(sourceTask.priority || 0),
1244
+ affected_task_priority: Number(affectedTask.priority || 0),
936
1245
  },
937
1246
  });
938
1247
  }
@@ -955,6 +1264,100 @@ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
955
1264
  source,
956
1265
  source_task_title: sourceTask.title,
957
1266
  affected_task_title: affectedTask.title,
1267
+ source_task_priority: Number(sourceTask.priority || 0),
1268
+ affected_task_priority: Number(affectedTask.priority || 0),
1269
+ },
1270
+ });
1271
+ }
1272
+
1273
+ const semanticContractOverlap = semanticOverlap.filter((item) => item.overlap_type === 'contract');
1274
+ const semanticObjectOverlap = semanticOverlap.filter((item) => item.overlap_type === 'exported_object');
1275
+ const sharedModuleOverlap = semanticOverlap.filter((item) => item.overlap_type === 'shared_module');
1276
+
1277
+ if (semanticContractOverlap.length > 0) {
1278
+ desired.push({
1279
+ source_lease_id: leaseId,
1280
+ source_task_id: sourceTask.id,
1281
+ source_pipeline_id: sourcePipelineId,
1282
+ source_worktree: sourceWorktree,
1283
+ affected_task_id: affectedTask.id,
1284
+ affected_pipeline_id: affectedSpec.pipeline_id || null,
1285
+ affected_worktree: affectedWorktree,
1286
+ status: 'stale',
1287
+ reason_type: 'semantic_contract_drift',
1288
+ subsystem_tag: null,
1289
+ source_scope_pattern: null,
1290
+ affected_scope_pattern: null,
1291
+ details: {
1292
+ source,
1293
+ source_task_title: sourceTask.title,
1294
+ affected_task_title: affectedTask.title,
1295
+ source_task_priority: Number(sourceTask.priority || 0),
1296
+ affected_task_priority: Number(affectedTask.priority || 0),
1297
+ contract_names: [...new Set(semanticContractOverlap.map((item) => item.name))],
1298
+ contract_kinds: [...new Set(semanticContractOverlap.map((item) => item.kind))],
1299
+ contract_files: [...new Set(semanticContractOverlap.map((item) => item.file_path))],
1300
+ revalidation_set: 'contract',
1301
+ severity: 'blocked',
1302
+ },
1303
+ });
1304
+ }
1305
+
1306
+ if (semanticObjectOverlap.length > 0) {
1307
+ desired.push({
1308
+ source_lease_id: leaseId,
1309
+ source_task_id: sourceTask.id,
1310
+ source_pipeline_id: sourcePipelineId,
1311
+ source_worktree: sourceWorktree,
1312
+ affected_task_id: affectedTask.id,
1313
+ affected_pipeline_id: affectedSpec.pipeline_id || null,
1314
+ affected_worktree: affectedWorktree,
1315
+ status: 'stale',
1316
+ reason_type: 'semantic_object_overlap',
1317
+ subsystem_tag: null,
1318
+ source_scope_pattern: null,
1319
+ affected_scope_pattern: null,
1320
+ details: {
1321
+ source,
1322
+ source_task_title: sourceTask.title,
1323
+ affected_task_title: affectedTask.title,
1324
+ source_task_priority: Number(sourceTask.priority || 0),
1325
+ affected_task_priority: Number(affectedTask.priority || 0),
1326
+ object_names: [...new Set(semanticObjectOverlap.map((item) => item.name))],
1327
+ object_kinds: [...new Set(semanticObjectOverlap.map((item) => item.kind))],
1328
+ object_files: [...new Set(semanticObjectOverlap.map((item) => item.file_path))],
1329
+ revalidation_set: 'semantic_object',
1330
+ severity: 'warn',
1331
+ },
1332
+ });
1333
+ }
1334
+
1335
+ if (sharedModuleOverlap.length > 0) {
1336
+ desired.push({
1337
+ source_lease_id: leaseId,
1338
+ source_task_id: sourceTask.id,
1339
+ source_pipeline_id: sourcePipelineId,
1340
+ source_worktree: sourceWorktree,
1341
+ affected_task_id: affectedTask.id,
1342
+ affected_pipeline_id: affectedSpec.pipeline_id || null,
1343
+ affected_worktree: affectedWorktree,
1344
+ status: 'stale',
1345
+ reason_type: 'shared_module_drift',
1346
+ subsystem_tag: null,
1347
+ source_scope_pattern: null,
1348
+ affected_scope_pattern: null,
1349
+ details: {
1350
+ source,
1351
+ source_task_title: sourceTask.title,
1352
+ affected_task_title: affectedTask.title,
1353
+ source_task_priority: Number(sourceTask.priority || 0),
1354
+ affected_task_priority: Number(affectedTask.priority || 0),
1355
+ module_paths: [...new Set(sharedModuleOverlap.flatMap((item) => item.module_paths || [item.file_path]).filter(Boolean))],
1356
+ dependent_files: [...new Set(sharedModuleOverlap.flatMap((item) => item.dependent_files || []))],
1357
+ dependent_areas: [...new Set(sharedModuleOverlap.flatMap((item) => item.dependent_areas || []).filter(Boolean))],
1358
+ dependent_subsystems: [...new Set(sharedModuleOverlap.flatMap((item) => item.subsystem_tags || []).filter(Boolean))],
1359
+ revalidation_set: 'shared_module',
1360
+ severity: 'warn',
958
1361
  },
959
1362
  });
960
1363
  }
@@ -1262,7 +1665,7 @@ export function initDb(repoRoot) {
1262
1665
  db.__switchmanRepoRoot = repoRoot;
1263
1666
  db.__switchmanAuditSecret = getAuditSecret(repoRoot);
1264
1667
  configureDb(db, { initialize: true });
1265
- withBusyRetry(() => ensureSchema(db));
1668
+ withBusyRetry(() => ensureSchemaMigrated(db));
1266
1669
  return db;
1267
1670
  }
1268
1671
 
@@ -1275,7 +1678,7 @@ export function openDb(repoRoot) {
1275
1678
  db.__switchmanRepoRoot = repoRoot;
1276
1679
  db.__switchmanAuditSecret = getAuditSecret(repoRoot);
1277
1680
  configureDb(db);
1278
- withBusyRetry(() => ensureSchema(db));
1681
+ withBusyRetry(() => ensureSchemaMigrated(db));
1279
1682
  return db;
1280
1683
  }
1281
1684
 
@@ -1291,20 +1694,27 @@ export function createTask(db, { id, title, description, priority = 5 }) {
1291
1694
  }
1292
1695
 
1293
1696
  export function startTaskLease(db, taskId, worktree, agent) {
1294
- return withImmediateTransaction(db, () => {
1295
- const task = getTaskTx(db, taskId);
1296
- if (!task || task.status !== 'pending') {
1297
- return null;
1298
- }
1697
+ try {
1698
+ return withImmediateTransaction(db, () => {
1699
+ const task = getTaskTx(db, taskId);
1700
+ if (!task || task.status !== 'pending') {
1701
+ return null;
1702
+ }
1299
1703
 
1300
- db.prepare(`
1301
- UPDATE tasks
1302
- SET status='in_progress', worktree=?, agent=?, updated_at=datetime('now')
1303
- WHERE id=? AND status='pending'
1304
- `).run(worktree, agent || null, taskId);
1704
+ db.prepare(`
1705
+ UPDATE tasks
1706
+ SET status='in_progress', worktree=?, agent=?, updated_at=datetime('now')
1707
+ WHERE id=? AND status='pending'
1708
+ `).run(worktree, agent || null, taskId);
1305
1709
 
1306
- return createLeaseTx(db, { taskId, worktree, agent });
1307
- });
1710
+ return createLeaseTx(db, { taskId, worktree, agent });
1711
+ });
1712
+ } catch (err) {
1713
+ if (String(err?.message || '').startsWith('Scope ownership conflict:')) {
1714
+ return null;
1715
+ }
1716
+ throw err;
1717
+ }
1308
1718
  }
1309
1719
 
1310
1720
  export function assignTask(db, taskId, worktree, agent) {
@@ -1312,14 +1722,52 @@ export function assignTask(db, taskId, worktree, agent) {
1312
1722
  }
1313
1723
 
1314
1724
  export function completeTask(db, taskId) {
1315
- withImmediateTransaction(db, () => {
1725
+ return withImmediateTransaction(db, () => {
1726
+ const task = getTaskTx(db, taskId);
1727
+ if (!task) {
1728
+ throw new Error(`Task ${taskId} does not exist.`);
1729
+ }
1730
+ if (task.status === 'done') {
1731
+ return {
1732
+ ok: false,
1733
+ status: 'already_done',
1734
+ task: getTaskTx(db, taskId),
1735
+ };
1736
+ }
1737
+ if (task.status === 'failed') {
1738
+ return {
1739
+ ok: false,
1740
+ status: 'failed',
1741
+ task: getTaskTx(db, taskId),
1742
+ };
1743
+ }
1744
+ if (task.status !== 'in_progress') {
1745
+ return {
1746
+ ok: false,
1747
+ status: 'not_in_progress',
1748
+ task: getTaskTx(db, taskId),
1749
+ };
1750
+ }
1316
1751
  const activeLease = getActiveLeaseForTaskTx(db, taskId);
1752
+ if (!activeLease) {
1753
+ return {
1754
+ ok: false,
1755
+ status: 'no_active_lease',
1756
+ task: getTaskTx(db, taskId),
1757
+ };
1758
+ }
1317
1759
  finalizeTaskWithLeaseTx(db, taskId, activeLease, {
1318
1760
  taskStatus: 'done',
1319
1761
  leaseStatus: 'completed',
1320
1762
  auditStatus: 'allowed',
1321
1763
  auditEventType: 'task_completed',
1322
1764
  });
1765
+ return {
1766
+ ok: true,
1767
+ status: 'completed',
1768
+ had_active_lease: true,
1769
+ task: getTaskTx(db, taskId),
1770
+ };
1323
1771
  });
1324
1772
  }
1325
1773
 
@@ -1374,7 +1822,13 @@ export function failLeaseTask(db, leaseId, reason) {
1374
1822
  export function retryTask(db, taskId, reason = null) {
1375
1823
  return withImmediateTransaction(db, () => {
1376
1824
  const task = getTaskTx(db, taskId);
1377
- if (!task || !['failed', 'done'].includes(task.status)) {
1825
+ if (!task) {
1826
+ return null;
1827
+ }
1828
+ const activeLease = getActiveLeaseForTaskTx(db, taskId);
1829
+ const retryable = ['failed', 'done'].includes(task.status)
1830
+ || (task.status === 'in_progress' && !activeLease);
1831
+ if (!retryable) {
1378
1832
  return null;
1379
1833
  }
1380
1834
 
@@ -1385,7 +1839,7 @@ export function retryTask(db, taskId, reason = null) {
1385
1839
  agent=NULL,
1386
1840
  completed_at=NULL,
1387
1841
  updated_at=datetime('now')
1388
- WHERE id=? AND status IN ('failed', 'done')
1842
+ WHERE id=? AND status IN ('failed', 'done', 'in_progress')
1389
1843
  `).run(taskId);
1390
1844
 
1391
1845
  logAuditEventTx(db, {
@@ -1422,6 +1876,7 @@ export function enqueueMergeItem(db, {
1422
1876
  targetBranch = 'main',
1423
1877
  maxRetries = 1,
1424
1878
  submittedBy = null,
1879
+ eventDetails = null,
1425
1880
  } = {}) {
1426
1881
  const itemId = id || makeId('mq');
1427
1882
  db.prepare(`
@@ -1450,6 +1905,7 @@ export function enqueueMergeItem(db, {
1450
1905
  source_worktree: sourceWorktree || null,
1451
1906
  source_pipeline_id: sourcePipelineId || null,
1452
1907
  target_branch: targetBranch || 'main',
1908
+ ...(eventDetails || {}),
1453
1909
  }),
1454
1910
  });
1455
1911
 
@@ -1502,10 +1958,350 @@ export function logMergeQueueEvent(db, itemId, {
1502
1958
  `).run(itemId, eventType, status || null, details == null ? null : String(details));
1503
1959
  }
1504
1960
 
1961
+ export function listOperationJournal(db, {
1962
+ scopeType = null,
1963
+ scopeId = null,
1964
+ status = null,
1965
+ limit = 50,
1966
+ } = {}) {
1967
+ const clauses = [];
1968
+ const params = [];
1969
+ if (scopeType) {
1970
+ clauses.push('scope_type=?');
1971
+ params.push(scopeType);
1972
+ }
1973
+ if (scopeId) {
1974
+ clauses.push('scope_id=?');
1975
+ params.push(scopeId);
1976
+ }
1977
+ if (status) {
1978
+ clauses.push('status=?');
1979
+ params.push(status);
1980
+ }
1981
+ const where = clauses.length ? `WHERE ${clauses.join(' AND ')}` : '';
1982
+ return db.prepare(`
1983
+ SELECT *
1984
+ FROM operation_journal
1985
+ ${where}
1986
+ ORDER BY datetime(started_at) DESC, id DESC
1987
+ LIMIT ?
1988
+ `).all(...params, limit);
1989
+ }
1990
+
1991
+ export function startOperationJournalEntry(db, {
1992
+ id = null,
1993
+ scopeType,
1994
+ scopeId,
1995
+ operationType,
1996
+ details = null,
1997
+ } = {}) {
1998
+ const entryId = id || makeId('op');
1999
+ db.prepare(`
2000
+ INSERT INTO operation_journal (id, scope_type, scope_id, operation_type, status, details)
2001
+ VALUES (?, ?, ?, ?, 'running', ?)
2002
+ `).run(entryId, scopeType, scopeId, operationType, details == null ? null : String(details));
2003
+ return db.prepare(`
2004
+ SELECT *
2005
+ FROM operation_journal
2006
+ WHERE id=?
2007
+ `).get(entryId);
2008
+ }
2009
+
2010
+ export function finishOperationJournalEntry(db, entryId, {
2011
+ status = 'completed',
2012
+ details = null,
2013
+ } = {}) {
2014
+ db.prepare(`
2015
+ UPDATE operation_journal
2016
+ SET status=?,
2017
+ details=COALESCE(?, details),
2018
+ updated_at=datetime('now'),
2019
+ finished_at=datetime('now')
2020
+ WHERE id=?
2021
+ `).run(status, details == null ? null : String(details), entryId);
2022
+ return db.prepare(`
2023
+ SELECT *
2024
+ FROM operation_journal
2025
+ WHERE id=?
2026
+ `).get(entryId);
2027
+ }
2028
+
2029
+ export function listTempResources(db, {
2030
+ scopeType = null,
2031
+ scopeId = null,
2032
+ operationId = null,
2033
+ resourceType = null,
2034
+ status = null,
2035
+ limit = 100,
2036
+ } = {}) {
2037
+ const clauses = [];
2038
+ const params = [];
2039
+ if (scopeType) {
2040
+ clauses.push('scope_type=?');
2041
+ params.push(scopeType);
2042
+ }
2043
+ if (scopeId) {
2044
+ clauses.push('scope_id=?');
2045
+ params.push(scopeId);
2046
+ }
2047
+ if (operationId) {
2048
+ clauses.push('operation_id=?');
2049
+ params.push(operationId);
2050
+ }
2051
+ if (resourceType) {
2052
+ clauses.push('resource_type=?');
2053
+ params.push(resourceType);
2054
+ }
2055
+ if (status) {
2056
+ clauses.push('status=?');
2057
+ params.push(status);
2058
+ }
2059
+ const where = clauses.length ? `WHERE ${clauses.join(' AND ')}` : '';
2060
+ return db.prepare(`
2061
+ SELECT *
2062
+ FROM temp_resources
2063
+ ${where}
2064
+ ORDER BY datetime(created_at) DESC, id DESC
2065
+ LIMIT ?
2066
+ `).all(...params, limit);
2067
+ }
2068
+
2069
+ export function getPolicyOverride(db, overrideId) {
2070
+ const entry = db.prepare(`
2071
+ SELECT *
2072
+ FROM policy_overrides
2073
+ WHERE id=?
2074
+ `).get(overrideId);
2075
+ if (!entry) return null;
2076
+ return {
2077
+ ...entry,
2078
+ requirement_keys: (() => {
2079
+ try {
2080
+ return JSON.parse(entry.requirement_keys || '[]');
2081
+ } catch {
2082
+ return [];
2083
+ }
2084
+ })(),
2085
+ task_types: (() => {
2086
+ try {
2087
+ return JSON.parse(entry.task_types || '[]');
2088
+ } catch {
2089
+ return [];
2090
+ }
2091
+ })(),
2092
+ details: (() => {
2093
+ try {
2094
+ return entry.details ? JSON.parse(entry.details) : null;
2095
+ } catch {
2096
+ return null;
2097
+ }
2098
+ })(),
2099
+ };
2100
+ }
2101
+
2102
+ export function listPolicyOverrides(db, {
2103
+ pipelineId = null,
2104
+ status = null,
2105
+ limit = 100,
2106
+ } = {}) {
2107
+ const clauses = [];
2108
+ const params = [];
2109
+ if (pipelineId) {
2110
+ clauses.push('pipeline_id=?');
2111
+ params.push(pipelineId);
2112
+ }
2113
+ if (status) {
2114
+ clauses.push('status=?');
2115
+ params.push(status);
2116
+ }
2117
+ const where = clauses.length ? `WHERE ${clauses.join(' AND ')}` : '';
2118
+ return db.prepare(`
2119
+ SELECT *
2120
+ FROM policy_overrides
2121
+ ${where}
2122
+ ORDER BY datetime(created_at) DESC, id DESC
2123
+ LIMIT ?
2124
+ `).all(...params, limit).map((entry) => ({
2125
+ ...entry,
2126
+ requirement_keys: (() => {
2127
+ try {
2128
+ return JSON.parse(entry.requirement_keys || '[]');
2129
+ } catch {
2130
+ return [];
2131
+ }
2132
+ })(),
2133
+ task_types: (() => {
2134
+ try {
2135
+ return JSON.parse(entry.task_types || '[]');
2136
+ } catch {
2137
+ return [];
2138
+ }
2139
+ })(),
2140
+ details: (() => {
2141
+ try {
2142
+ return entry.details ? JSON.parse(entry.details) : null;
2143
+ } catch {
2144
+ return null;
2145
+ }
2146
+ })(),
2147
+ }));
2148
+ }
2149
+
2150
+ export function createPolicyOverride(db, {
2151
+ pipelineId,
2152
+ requirementKeys = [],
2153
+ taskTypes = [],
2154
+ reason,
2155
+ approvedBy = null,
2156
+ details = null,
2157
+ } = {}) {
2158
+ if (!pipelineId) throw new Error('pipelineId is required for a policy override.');
2159
+ if (!reason || !String(reason).trim()) throw new Error('A policy override reason is required.');
2160
+
2161
+ const overrideId = makeId('po');
2162
+ const normalizedRequirementKeys = [...new Set((requirementKeys || []).map((value) => String(value || '').trim()).filter(Boolean))];
2163
+ const normalizedTaskTypes = [...new Set((taskTypes || []).map((value) => String(value || '').trim()).filter(Boolean))];
2164
+
2165
+ return withImmediateTransaction(db, () => {
2166
+ db.prepare(`
2167
+ INSERT INTO policy_overrides (
2168
+ id, pipeline_id, requirement_keys, task_types, status, reason, approved_by, details
2169
+ )
2170
+ VALUES (?, ?, ?, ?, 'active', ?, ?, ?)
2171
+ `).run(
2172
+ overrideId,
2173
+ pipelineId,
2174
+ JSON.stringify(normalizedRequirementKeys),
2175
+ JSON.stringify(normalizedTaskTypes),
2176
+ String(reason).trim(),
2177
+ approvedBy || null,
2178
+ details == null ? null : JSON.stringify(details),
2179
+ );
2180
+ logAuditEventTx(db, {
2181
+ eventType: 'policy_override_created',
2182
+ status: 'warn',
2183
+ reasonCode: 'policy_override',
2184
+ details: JSON.stringify({
2185
+ override_id: overrideId,
2186
+ pipeline_id: pipelineId,
2187
+ requirement_keys: normalizedRequirementKeys,
2188
+ task_types: normalizedTaskTypes,
2189
+ reason: String(reason).trim(),
2190
+ approved_by: approvedBy || null,
2191
+ details: details || null,
2192
+ }),
2193
+ });
2194
+ return getPolicyOverride(db, overrideId);
2195
+ });
2196
+ }
2197
+
2198
+ export function revokePolicyOverride(db, overrideId, {
2199
+ revokedBy = null,
2200
+ reason = null,
2201
+ } = {}) {
2202
+ return withImmediateTransaction(db, () => {
2203
+ const existing = getPolicyOverride(db, overrideId);
2204
+ if (!existing) {
2205
+ throw new Error(`Policy override ${overrideId} does not exist.`);
2206
+ }
2207
+ if (existing.status !== 'active') {
2208
+ return existing;
2209
+ }
2210
+ db.prepare(`
2211
+ UPDATE policy_overrides
2212
+ SET status='revoked',
2213
+ revoked_at=datetime('now'),
2214
+ revoked_by=?,
2215
+ revoked_reason=?
2216
+ WHERE id=?
2217
+ `).run(revokedBy || null, reason || null, overrideId);
2218
+ logAuditEventTx(db, {
2219
+ eventType: 'policy_override_revoked',
2220
+ status: 'info',
2221
+ reasonCode: 'policy_override_revoked',
2222
+ details: JSON.stringify({
2223
+ override_id: overrideId,
2224
+ pipeline_id: existing.pipeline_id,
2225
+ revoked_by: revokedBy || null,
2226
+ revoked_reason: reason || null,
2227
+ }),
2228
+ });
2229
+ return getPolicyOverride(db, overrideId);
2230
+ });
2231
+ }
2232
+
2233
+ export function createTempResource(db, {
2234
+ id = null,
2235
+ scopeType,
2236
+ scopeId,
2237
+ operationId = null,
2238
+ resourceType,
2239
+ path,
2240
+ branch = null,
2241
+ details = null,
2242
+ status = 'active',
2243
+ } = {}) {
2244
+ const resourceId = id || makeId('res');
2245
+ db.prepare(`
2246
+ INSERT INTO temp_resources (
2247
+ id, scope_type, scope_id, operation_id, resource_type, path, branch, status, details
2248
+ )
2249
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
2250
+ `).run(
2251
+ resourceId,
2252
+ scopeType,
2253
+ scopeId,
2254
+ operationId || null,
2255
+ resourceType,
2256
+ path,
2257
+ branch || null,
2258
+ status,
2259
+ details == null ? null : String(details),
2260
+ );
2261
+ return db.prepare(`
2262
+ SELECT *
2263
+ FROM temp_resources
2264
+ WHERE id=?
2265
+ `).get(resourceId);
2266
+ }
2267
+
2268
+ export function updateTempResource(db, resourceId, {
2269
+ status = null,
2270
+ path = null,
2271
+ branch = null,
2272
+ details = null,
2273
+ } = {}) {
2274
+ db.prepare(`
2275
+ UPDATE temp_resources
2276
+ SET status=COALESCE(?, status),
2277
+ path=COALESCE(?, path),
2278
+ branch=COALESCE(?, branch),
2279
+ details=COALESCE(?, details),
2280
+ updated_at=datetime('now'),
2281
+ released_at=CASE
2282
+ WHEN COALESCE(?, status) = 'active' THEN NULL
2283
+ ELSE COALESCE(released_at, datetime('now'))
2284
+ END
2285
+ WHERE id=?
2286
+ `).run(
2287
+ status,
2288
+ path,
2289
+ branch,
2290
+ details == null ? null : String(details),
2291
+ status,
2292
+ resourceId,
2293
+ );
2294
+ return db.prepare(`
2295
+ SELECT *
2296
+ FROM temp_resources
2297
+ WHERE id=?
2298
+ `).get(resourceId);
2299
+ }
2300
+
1505
2301
  export function startMergeQueueItem(db, itemId) {
1506
2302
  return withImmediateTransaction(db, () => {
1507
2303
  const item = getMergeQueueItem(db, itemId);
1508
- if (!item || !['queued', 'retrying'].includes(item.status)) {
2304
+ if (!item || !['queued', 'retrying', 'held', 'wave_blocked', 'escalated'].includes(item.status)) {
1509
2305
  return null;
1510
2306
  }
1511
2307
 
@@ -1515,7 +2311,7 @@ export function startMergeQueueItem(db, itemId) {
1515
2311
  started_at=COALESCE(started_at, datetime('now')),
1516
2312
  last_attempt_at=datetime('now'),
1517
2313
  updated_at=datetime('now')
1518
- WHERE id=? AND status IN ('queued', 'retrying')
2314
+ WHERE id=? AND status IN ('queued', 'retrying', 'held', 'wave_blocked', 'escalated')
1519
2315
  `).run(itemId);
1520
2316
 
1521
2317
  logMergeQueueEvent(db, itemId, {
@@ -1534,6 +2330,7 @@ export function markMergeQueueState(db, itemId, {
1534
2330
  nextAction = null,
1535
2331
  mergedCommit = null,
1536
2332
  incrementRetry = false,
2333
+ backoffUntil = undefined,
1537
2334
  } = {}) {
1538
2335
  const terminal = ['merged', 'blocked', 'failed', 'canceled'].includes(status);
1539
2336
  db.prepare(`
@@ -1544,6 +2341,15 @@ export function markMergeQueueState(db, itemId, {
1544
2341
  next_action=?,
1545
2342
  merged_commit=COALESCE(?, merged_commit),
1546
2343
  retry_count=retry_count + ?,
2344
+ backoff_until=CASE
2345
+ WHEN ? THEN ?
2346
+ WHEN ? = 'retrying' THEN backoff_until
2347
+ ELSE NULL
2348
+ END,
2349
+ escalated_at=CASE
2350
+ WHEN ? = 'escalated' THEN COALESCE(escalated_at, datetime('now'))
2351
+ ELSE NULL
2352
+ END,
1547
2353
  updated_at=datetime('now'),
1548
2354
  finished_at=CASE WHEN ? THEN datetime('now') ELSE finished_at END
1549
2355
  WHERE id=?
@@ -1554,6 +2360,10 @@ export function markMergeQueueState(db, itemId, {
1554
2360
  nextAction || null,
1555
2361
  mergedCommit || null,
1556
2362
  incrementRetry ? 1 : 0,
2363
+ backoffUntil !== undefined ? 1 : 0,
2364
+ backoffUntil || null,
2365
+ status,
2366
+ status,
1557
2367
  terminal ? 1 : 0,
1558
2368
  itemId,
1559
2369
  );
@@ -1575,7 +2385,7 @@ export function markMergeQueueState(db, itemId, {
1575
2385
 
1576
2386
  export function retryMergeQueueItem(db, itemId) {
1577
2387
  const item = getMergeQueueItem(db, itemId);
1578
- if (!item || !['blocked', 'failed'].includes(item.status)) {
2388
+ if (!item || !['blocked', 'failed', 'held', 'wave_blocked', 'escalated', 'retrying'].includes(item.status)) {
1579
2389
  return null;
1580
2390
  }
1581
2391
 
@@ -1585,6 +2395,8 @@ export function retryMergeQueueItem(db, itemId) {
1585
2395
  last_error_code=NULL,
1586
2396
  last_error_summary=NULL,
1587
2397
  next_action=NULL,
2398
+ backoff_until=NULL,
2399
+ escalated_at=NULL,
1588
2400
  finished_at=NULL,
1589
2401
  updated_at=datetime('now')
1590
2402
  WHERE id=?
@@ -1598,6 +2410,35 @@ export function retryMergeQueueItem(db, itemId) {
1598
2410
  return getMergeQueueItem(db, itemId);
1599
2411
  }
1600
2412
 
2413
+ export function escalateMergeQueueItem(db, itemId, {
2414
+ summary = null,
2415
+ nextAction = null,
2416
+ } = {}) {
2417
+ const item = getMergeQueueItem(db, itemId);
2418
+ if (!item || ['merged', 'canceled'].includes(item.status)) {
2419
+ return null;
2420
+ }
2421
+
2422
+ const desiredSummary = summary || item.last_error_summary || 'Operator escalation requested before this queue item lands.';
2423
+ const desiredNextAction = nextAction || item.next_action || `Run \`switchman explain queue ${itemId}\` to review the risk, then \`switchman queue retry ${itemId}\` when you are ready to land it again.`;
2424
+
2425
+ if (
2426
+ item.status === 'escalated'
2427
+ && (item.last_error_summary || null) === desiredSummary
2428
+ && (item.next_action || null) === desiredNextAction
2429
+ ) {
2430
+ return item;
2431
+ }
2432
+
2433
+ return markMergeQueueState(db, itemId, {
2434
+ status: 'escalated',
2435
+ lastErrorCode: 'queue_escalated_manual',
2436
+ lastErrorSummary: desiredSummary,
2437
+ nextAction: desiredNextAction,
2438
+ backoffUntil: null,
2439
+ });
2440
+ }
2441
+
1601
2442
  export function removeMergeQueueItem(db, itemId) {
1602
2443
  const item = getMergeQueueItem(db, itemId);
1603
2444
  if (!item) return null;
@@ -1755,7 +2596,7 @@ export function listDependencyInvalidations(db, { status = 'stale', pipelineId =
1755
2596
  }));
1756
2597
  }
1757
2598
 
1758
- export function touchBoundaryValidationState(db, leaseId, source = 'write') {
2599
+ export function touchBoundaryValidationState(db, leaseId, source = 'write', context = {}) {
1759
2600
  return withImmediateTransaction(db, () => {
1760
2601
  const state = computeBoundaryValidationStateTx(db, leaseId, { touched: true, source });
1761
2602
  if (state) {
@@ -1774,7 +2615,7 @@ export function touchBoundaryValidationState(db, leaseId, source = 'write') {
1774
2615
  });
1775
2616
  }
1776
2617
 
1777
- const invalidations = syncDependencyInvalidationsForLeaseTx(db, leaseId, source);
2618
+ const invalidations = syncDependencyInvalidationsForLeaseTx(db, leaseId, source, context);
1778
2619
  if (invalidations.length > 0) {
1779
2620
  logAuditEventTx(db, {
1780
2621
  eventType: 'dependency_invalidations_updated',
@@ -1786,6 +2627,8 @@ export function touchBoundaryValidationState(db, leaseId, source = 'write') {
1786
2627
  source,
1787
2628
  stale_count: invalidations.length,
1788
2629
  affected_task_ids: [...new Set(invalidations.map((item) => item.affected_task_id))],
2630
+ reason_types: [...new Set(invalidations.map((item) => item.reason_type))],
2631
+ revalidation_sets: [...new Set(invalidations.map((item) => item.details?.revalidation_set).filter(Boolean))],
1789
2632
  }),
1790
2633
  });
1791
2634
  }
@@ -1854,7 +2697,7 @@ export function getStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINUT
1854
2697
  FROM leases l
1855
2698
  JOIN tasks t ON l.task_id = t.id
1856
2699
  WHERE l.status='active'
1857
- AND l.heartbeat_at < datetime('now', ?)
2700
+ AND l.heartbeat_at <= datetime('now', ?)
1858
2701
  ORDER BY l.heartbeat_at ASC
1859
2702
  `).all(`-${staleAfterMinutes} minutes`);
1860
2703
  }
@@ -1929,24 +2772,72 @@ export function reapStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINU
1929
2772
  });
1930
2773
  }
1931
2774
 
2775
+ export function pruneDatabaseMaintenance(db, { retentionDays = DB_PRUNE_RETENTION_DAYS } = {}) {
2776
+ const retentionWindow = `-${Math.max(1, Number.parseInt(retentionDays, 10) || DB_PRUNE_RETENTION_DAYS)} days`;
2777
+ return withImmediateTransaction(db, () => {
2778
+ const releasedClaims = db.prepare(`
2779
+ DELETE FROM file_claims
2780
+ WHERE released_at IS NOT NULL
2781
+ AND released_at <= datetime('now', ?)
2782
+ `).run(retentionWindow).changes;
2783
+
2784
+ const releasedReservations = db.prepare(`
2785
+ DELETE FROM scope_reservations
2786
+ WHERE released_at IS NOT NULL
2787
+ AND released_at <= datetime('now', ?)
2788
+ `).run(retentionWindow).changes;
2789
+
2790
+ const finishedLeases = db.prepare(`
2791
+ DELETE FROM leases
2792
+ WHERE status != 'active'
2793
+ AND finished_at IS NOT NULL
2794
+ AND finished_at <= datetime('now', ?)
2795
+ `).run(retentionWindow).changes;
2796
+
2797
+ const orphanedSnapshots = db.prepare(`
2798
+ DELETE FROM worktree_snapshots
2799
+ WHERE worktree NOT IN (
2800
+ SELECT name FROM worktrees
2801
+ )
2802
+ `).run().changes;
2803
+
2804
+ return {
2805
+ released_claims_pruned: releasedClaims,
2806
+ finished_leases_pruned: finishedLeases,
2807
+ released_scope_reservations_pruned: releasedReservations,
2808
+ orphaned_snapshots_pruned: orphanedSnapshots,
2809
+ };
2810
+ });
2811
+ }
2812
+
1932
2813
  // ─── File Claims ──────────────────────────────────────────────────────────────
1933
2814
 
1934
2815
  export function claimFiles(db, taskId, worktree, filePaths, agent) {
1935
2816
  return withImmediateTransaction(db, () => {
1936
2817
  const lease = resolveActiveLeaseTx(db, taskId, worktree, agent);
1937
- const findActiveClaim = db.prepare(`
2818
+ const activeClaims = db.prepare(`
1938
2819
  SELECT *
1939
2820
  FROM file_claims
1940
- WHERE file_path=? AND released_at IS NULL
1941
- LIMIT 1
1942
- `);
2821
+ WHERE released_at IS NULL
2822
+ ORDER BY id ASC
2823
+ `).all();
2824
+ const activeClaimByPath = new Map();
2825
+ for (const claim of activeClaims) {
2826
+ const normalizedPath = normalizeClaimedFilePath(claim.file_path);
2827
+ if (!activeClaimByPath.has(normalizedPath)) {
2828
+ activeClaimByPath.set(normalizedPath, claim);
2829
+ }
2830
+ }
2831
+
2832
+ const normalizeAndDeduplicate = [...new Set(filePaths.map((filePath) => normalizeClaimedFilePath(filePath)))];
2833
+ const findClaimOwner = (normalizedPath) => activeClaimByPath.get(normalizedPath) || null;
1943
2834
  const insert = db.prepare(`
1944
2835
  INSERT INTO file_claims (task_id, lease_id, file_path, worktree, agent)
1945
2836
  VALUES (?, ?, ?, ?, ?)
1946
2837
  `);
1947
2838
 
1948
- for (const fp of filePaths) {
1949
- const existing = findActiveClaim.get(fp);
2839
+ for (const normalizedPath of normalizeAndDeduplicate) {
2840
+ const existing = findClaimOwner(normalizedPath);
1950
2841
  if (existing) {
1951
2842
  const sameLease = existing.lease_id === lease.id;
1952
2843
  const sameLegacyOwner = existing.lease_id == null && existing.task_id === taskId && existing.worktree === worktree;
@@ -1958,6 +2849,7 @@ export function claimFiles(db, taskId, worktree, filePaths, agent) {
1958
2849
  SET lease_id=?, agent=COALESCE(?, agent)
1959
2850
  WHERE id=?
1960
2851
  `).run(lease.id, agent || null, existing.id);
2852
+ activeClaimByPath.set(normalizedPath, { ...existing, lease_id: lease.id, agent: agent || existing.agent });
1961
2853
  }
1962
2854
  continue;
1963
2855
  }
@@ -1965,14 +2857,32 @@ export function claimFiles(db, taskId, worktree, filePaths, agent) {
1965
2857
  throw new Error('One or more files are already actively claimed by another task.');
1966
2858
  }
1967
2859
 
1968
- insert.run(taskId, lease.id, fp, worktree, agent || null);
2860
+ try {
2861
+ insert.run(taskId, lease.id, normalizedPath, worktree, agent || null);
2862
+ } catch (err) {
2863
+ const message = String(err?.message || '').toLowerCase();
2864
+ const isActiveClaimConstraint =
2865
+ message.includes('idx_file_claims_active_path')
2866
+ || (message.includes('unique') && message.includes('file_claims'));
2867
+ if (isActiveClaimConstraint) {
2868
+ throw new Error('One or more files are already actively claimed by another task.');
2869
+ }
2870
+ throw err;
2871
+ }
2872
+ activeClaimByPath.set(normalizedPath, {
2873
+ task_id: taskId,
2874
+ lease_id: lease.id,
2875
+ file_path: normalizedPath,
2876
+ worktree,
2877
+ agent: agent || null,
2878
+ });
1969
2879
  logAuditEventTx(db, {
1970
2880
  eventType: 'file_claimed',
1971
2881
  status: 'allowed',
1972
2882
  worktree,
1973
2883
  taskId,
1974
2884
  leaseId: lease.id,
1975
- filePath: fp,
2885
+ filePath: normalizedPath,
1976
2886
  });
1977
2887
  }
1978
2888
 
@@ -2030,19 +2940,27 @@ export function getCompletedFileClaims(db, worktree = null) {
2030
2940
  }
2031
2941
 
2032
2942
  export function checkFileConflicts(db, filePaths, excludeWorktree) {
2943
+ const normalizedPaths = [...new Set(filePaths.map((filePath) => normalizeClaimedFilePath(filePath)))];
2033
2944
  const conflicts = [];
2034
- const stmt = db.prepare(`
2945
+ const claims = db.prepare(`
2035
2946
  SELECT fc.*, t.title as task_title, l.id as lease_id, l.status as lease_status
2036
2947
  FROM file_claims fc
2037
2948
  JOIN tasks t ON fc.task_id = t.id
2038
2949
  LEFT JOIN leases l ON fc.lease_id = l.id
2039
- WHERE fc.file_path=?
2040
- AND fc.released_at IS NULL
2950
+ WHERE fc.released_at IS NULL
2041
2951
  AND fc.worktree != ?
2042
2952
  AND t.status NOT IN ('done','failed')
2043
- `);
2044
- for (const fp of filePaths) {
2045
- const existing = stmt.get(fp, excludeWorktree || '');
2953
+ `).all(excludeWorktree || '');
2954
+ const claimByNormalizedPath = new Map();
2955
+ for (const claim of claims) {
2956
+ const normalizedPath = normalizeClaimedFilePath(claim.file_path);
2957
+ if (!claimByNormalizedPath.has(normalizedPath)) {
2958
+ claimByNormalizedPath.set(normalizedPath, claim);
2959
+ }
2960
+ }
2961
+
2962
+ for (const fp of normalizedPaths) {
2963
+ const existing = claimByNormalizedPath.get(fp);
2046
2964
  if (existing) conflicts.push({ file: fp, claimedBy: existing });
2047
2965
  }
2048
2966
  return conflicts;