switchman-dev 0.1.5 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/core/db.js CHANGED
@@ -5,12 +5,16 @@
5
5
 
6
6
  import { createHash, createHmac, randomBytes } from 'node:crypto';
7
7
  import { DatabaseSync } from 'node:sqlite';
8
- import { chmodSync, existsSync, mkdirSync, readFileSync, realpathSync, writeFileSync } from 'fs';
9
- import { join, resolve } from 'path';
8
+ import { chmodSync, existsSync, mkdirSync, readFileSync, realpathSync, rmSync, writeFileSync } from 'fs';
9
+ import { join, posix, resolve } from 'path';
10
+ import { matchesPathPatterns } from './ignore.js';
11
+ import { buildModuleDependencyIndexForPath, buildSemanticIndexForPath, classifySubsystemsForPath, listTrackedFiles } from './semantic.js';
10
12
 
11
13
  const SWITCHMAN_DIR = '.switchman';
12
14
  const DB_FILE = 'switchman.db';
13
15
  const AUDIT_KEY_FILE = 'audit.key';
16
+ const MIGRATION_STATE_FILE = 'migration-state.json';
17
+ const CURRENT_SCHEMA_VERSION = 5;
14
18
 
15
19
  // How long (ms) a writer will wait for a lock before giving up.
16
20
  // 5 seconds is generous for a CLI tool with 3-10 concurrent agents.
@@ -50,6 +54,22 @@ function normalizeWorktreePath(path) {
50
54
  }
51
55
  }
52
56
 
57
+ function normalizeClaimedFilePath(filePath) {
58
+ const rawPath = String(filePath || '').replace(/\\/g, '/').trim();
59
+ const normalized = posix.normalize(rawPath.replace(/^\.\/+/, ''));
60
+ if (
61
+ normalized === '' ||
62
+ normalized === '.' ||
63
+ normalized === '..' ||
64
+ normalized.startsWith('../') ||
65
+ rawPath.startsWith('/') ||
66
+ /^[A-Za-z]:\//.test(rawPath)
67
+ ) {
68
+ throw new Error('Claimed file paths must stay inside the repository.');
69
+ }
70
+ return normalized;
71
+ }
72
+
53
73
  function makeId(prefix) {
54
74
  return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
55
75
  }
@@ -72,6 +92,39 @@ function getTableColumns(db, tableName) {
72
92
  return db.prepare(`PRAGMA table_info(${tableName})`).all().map((column) => column.name);
73
93
  }
74
94
 
95
+ function getSchemaVersion(db) {
96
+ return Number(db.prepare('PRAGMA user_version').get()?.user_version || 0);
97
+ }
98
+
99
+ function setSchemaVersion(db, version) {
100
+ db.exec(`PRAGMA user_version=${Number(version) || 0}`);
101
+ }
102
+
103
+ function getMigrationStatePath(repoRoot) {
104
+ return join(getSwitchmanDir(repoRoot), MIGRATION_STATE_FILE);
105
+ }
106
+
107
+ function readMigrationState(repoRoot) {
108
+ const path = getMigrationStatePath(repoRoot);
109
+ if (!existsSync(path)) return null;
110
+ try {
111
+ return JSON.parse(readFileSync(path, 'utf8'));
112
+ } catch {
113
+ throw new Error(`Switchman migration state is unreadable at ${path}. Remove or repair it before reopening the database.`);
114
+ }
115
+ }
116
+
117
+ function writeMigrationState(repoRoot, state) {
118
+ writeFileSync(getMigrationStatePath(repoRoot), `${JSON.stringify(state, null, 2)}\n`);
119
+ }
120
+
121
+ function clearMigrationState(repoRoot) {
122
+ const path = getMigrationStatePath(repoRoot);
123
+ if (existsSync(path)) {
124
+ rmSync(path, { force: true });
125
+ }
126
+ }
127
+
75
128
  function getAuditSecret(repoRoot) {
76
129
  const keyPath = join(getSwitchmanDir(repoRoot), AUDIT_KEY_FILE);
77
130
  if (!existsSync(keyPath)) {
@@ -120,7 +173,7 @@ function signAuditEntry(secret, entryHash) {
120
173
  return createHmac('sha256', secret).update(entryHash).digest('hex');
121
174
  }
122
175
 
123
- function ensureSchema(db) {
176
+ function applySchemaVersion1(db) {
124
177
  db.exec(`
125
178
  CREATE TABLE IF NOT EXISTS tasks (
126
179
  id TEXT PRIMARY KEY,
@@ -292,6 +345,8 @@ function ensureSchema(db) {
292
345
  created_at TEXT NOT NULL DEFAULT (datetime('now')),
293
346
  updated_at TEXT NOT NULL DEFAULT (datetime('now')),
294
347
  last_attempt_at TEXT,
348
+ backoff_until TEXT,
349
+ escalated_at TEXT,
295
350
  started_at TEXT,
296
351
  finished_at TEXT
297
352
  );
@@ -376,11 +431,162 @@ function ensureSchema(db) {
376
431
  CREATE INDEX IF NOT EXISTS idx_merge_queue_pipeline_id ON merge_queue(source_pipeline_id);
377
432
  CREATE INDEX IF NOT EXISTS idx_merge_queue_events_item ON merge_queue_events(queue_item_id);
378
433
  `);
434
+ }
435
+
436
+ function applySchemaVersion2(db) {
437
+ db.exec(`
438
+ CREATE TABLE IF NOT EXISTS operation_journal (
439
+ id TEXT PRIMARY KEY,
440
+ scope_type TEXT NOT NULL,
441
+ scope_id TEXT NOT NULL,
442
+ operation_type TEXT NOT NULL,
443
+ status TEXT NOT NULL DEFAULT 'running',
444
+ details TEXT,
445
+ started_at TEXT NOT NULL DEFAULT (datetime('now')),
446
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
447
+ finished_at TEXT
448
+ );
449
+
450
+ CREATE INDEX IF NOT EXISTS idx_operation_journal_scope
451
+ ON operation_journal(scope_type, scope_id, started_at);
452
+ CREATE INDEX IF NOT EXISTS idx_operation_journal_status
453
+ ON operation_journal(status, started_at);
454
+ `);
379
455
 
380
456
  migrateLegacyAuditLog(db);
381
457
  migrateLegacyActiveTasks(db);
382
458
  }
383
459
 
460
+ function applySchemaVersion3(db) {
461
+ db.exec(`
462
+ CREATE TABLE IF NOT EXISTS temp_resources (
463
+ id TEXT PRIMARY KEY,
464
+ scope_type TEXT NOT NULL,
465
+ scope_id TEXT NOT NULL,
466
+ operation_id TEXT,
467
+ resource_type TEXT NOT NULL,
468
+ path TEXT NOT NULL,
469
+ branch TEXT,
470
+ status TEXT NOT NULL DEFAULT 'active',
471
+ details TEXT,
472
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
473
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
474
+ released_at TEXT
475
+ );
476
+
477
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_scope
478
+ ON temp_resources(scope_type, scope_id, created_at);
479
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_status
480
+ ON temp_resources(status, created_at);
481
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_path
482
+ ON temp_resources(path);
483
+ CREATE INDEX IF NOT EXISTS idx_temp_resources_operation
484
+ ON temp_resources(operation_id);
485
+ `);
486
+ }
487
+
488
+ function applySchemaVersion4(db) {
489
+ const mergeQueueColumns = getTableColumns(db, 'merge_queue');
490
+ if (mergeQueueColumns.length > 0 && !mergeQueueColumns.includes('backoff_until')) {
491
+ db.exec(`ALTER TABLE merge_queue ADD COLUMN backoff_until TEXT`);
492
+ }
493
+ if (mergeQueueColumns.length > 0 && !mergeQueueColumns.includes('escalated_at')) {
494
+ db.exec(`ALTER TABLE merge_queue ADD COLUMN escalated_at TEXT`);
495
+ }
496
+
497
+ db.exec(`
498
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_backoff_until ON merge_queue(backoff_until);
499
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_escalated_at ON merge_queue(escalated_at);
500
+ `);
501
+ }
502
+
503
+ function applySchemaVersion5(db) {
504
+ db.exec(`
505
+ CREATE TABLE IF NOT EXISTS policy_overrides (
506
+ id TEXT PRIMARY KEY,
507
+ pipeline_id TEXT NOT NULL,
508
+ requirement_keys TEXT NOT NULL DEFAULT '[]',
509
+ task_types TEXT NOT NULL DEFAULT '[]',
510
+ status TEXT NOT NULL DEFAULT 'active',
511
+ reason TEXT NOT NULL,
512
+ approved_by TEXT,
513
+ details TEXT,
514
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
515
+ revoked_at TEXT,
516
+ revoked_by TEXT,
517
+ revoked_reason TEXT
518
+ );
519
+
520
+ CREATE INDEX IF NOT EXISTS idx_policy_overrides_pipeline
521
+ ON policy_overrides(pipeline_id, created_at);
522
+ CREATE INDEX IF NOT EXISTS idx_policy_overrides_status
523
+ ON policy_overrides(status, created_at);
524
+ `);
525
+ }
526
+
527
+ function ensureSchemaMigrated(db) {
528
+ const repoRoot = db.__switchmanRepoRoot;
529
+ if (!repoRoot) {
530
+ throw new Error('Database repo root is not configured.');
531
+ }
532
+
533
+ const recordedState = readMigrationState(repoRoot);
534
+ const currentVersion = getSchemaVersion(db);
535
+
536
+ if (currentVersion > CURRENT_SCHEMA_VERSION) {
537
+ throw new Error(`Switchman database schema version ${currentVersion} is newer than this CLI supports (${CURRENT_SCHEMA_VERSION}). Upgrade Switchman before opening this repo.`);
538
+ }
539
+
540
+ if (recordedState?.status === 'running') {
541
+ throw new Error(`Switchman detected an interrupted database migration from version ${recordedState.from_version} to ${recordedState.to_version}. Resolve the migration state in ${getMigrationStatePath(repoRoot)} before reopening the database.`);
542
+ }
543
+
544
+ if (currentVersion === CURRENT_SCHEMA_VERSION) {
545
+ if (recordedState) {
546
+ clearMigrationState(repoRoot);
547
+ }
548
+ return;
549
+ }
550
+
551
+ writeMigrationState(repoRoot, {
552
+ status: 'running',
553
+ from_version: currentVersion,
554
+ to_version: CURRENT_SCHEMA_VERSION,
555
+ started_at: new Date().toISOString(),
556
+ });
557
+
558
+ try {
559
+ withImmediateTransaction(db, () => {
560
+ if (currentVersion < 1) {
561
+ applySchemaVersion1(db);
562
+ }
563
+ if (currentVersion < 2) {
564
+ applySchemaVersion2(db);
565
+ }
566
+ if (currentVersion < 3) {
567
+ applySchemaVersion3(db);
568
+ }
569
+ if (currentVersion < 4) {
570
+ applySchemaVersion4(db);
571
+ }
572
+ if (currentVersion < 5) {
573
+ applySchemaVersion5(db);
574
+ }
575
+ setSchemaVersion(db, CURRENT_SCHEMA_VERSION);
576
+ });
577
+ clearMigrationState(repoRoot);
578
+ } catch (err) {
579
+ writeMigrationState(repoRoot, {
580
+ status: 'failed',
581
+ from_version: currentVersion,
582
+ to_version: CURRENT_SCHEMA_VERSION,
583
+ failed_at: new Date().toISOString(),
584
+ error: String(err?.message || err),
585
+ });
586
+ throw err;
587
+ }
588
+ }
589
+
384
590
  function normalizeScopeRoot(pattern) {
385
591
  return String(pattern || '')
386
592
  .replace(/\\/g, '/')
@@ -426,6 +632,84 @@ function buildSpecOverlap(sourceSpec = null, affectedSpec = null) {
426
632
  };
427
633
  }
428
634
 
635
+ function taskSpecMatchesObject(taskSpec = null, object = null) {
636
+ if (!taskSpec || !object) return false;
637
+ const allowedPaths = Array.isArray(taskSpec.allowed_paths) ? taskSpec.allowed_paths.filter(Boolean) : [];
638
+ if (allowedPaths.length > 0 && matchesPathPatterns(object.file_path, allowedPaths)) {
639
+ return true;
640
+ }
641
+
642
+ const subsystemTags = Array.isArray(taskSpec.subsystem_tags) ? taskSpec.subsystem_tags.filter(Boolean) : [];
643
+ return subsystemTags.some((tag) => (object.subsystem_tags || []).includes(tag));
644
+ }
645
+
646
+ function taskSpecMatchesFilePath(taskSpec = null, filePath = null) {
647
+ if (!taskSpec || !filePath) return false;
648
+ const allowedPaths = Array.isArray(taskSpec.allowed_paths) ? taskSpec.allowed_paths.filter(Boolean) : [];
649
+ if (allowedPaths.length > 0 && matchesPathPatterns(filePath, allowedPaths)) {
650
+ return true;
651
+ }
652
+
653
+ const subsystemTags = Array.isArray(taskSpec.subsystem_tags) ? taskSpec.subsystem_tags.filter(Boolean) : [];
654
+ const fileSubsystems = classifySubsystemsForPath(filePath);
655
+ return subsystemTags.some((tag) => fileSubsystems.includes(tag));
656
+ }
657
+
658
+ function buildSemanticDependencyOverlap(db, sourceSpec, affectedSpec, changedFiles = []) {
659
+ const repoRoot = db.__switchmanRepoRoot;
660
+ if (!repoRoot || !sourceSpec || !affectedSpec || changedFiles.length === 0) {
661
+ return [];
662
+ }
663
+
664
+ const changedObjects = buildSemanticIndexForPath(repoRoot, changedFiles).objects || [];
665
+ const sourceObjects = changedObjects.filter((object) =>
666
+ changedFiles.includes(object.file_path)
667
+ && taskSpecMatchesObject(sourceSpec, object),
668
+ );
669
+ const trackedSourceFiles = listTrackedFiles(repoRoot, { sourceOnly: true });
670
+ const semanticCandidateFiles = [...new Set([...trackedSourceFiles, ...changedFiles])];
671
+ const affectedObjects = buildSemanticIndexForPath(repoRoot, semanticCandidateFiles).objects
672
+ .filter((object) => taskSpecMatchesObject(affectedSpec, object));
673
+ const affectedKeys = new Set(affectedObjects.map((object) => `${object.kind}:${object.name}`));
674
+ const overlaps = sourceObjects
675
+ .filter((object) => affectedKeys.has(`${object.kind}:${object.name}`))
676
+ .map((object) => ({
677
+ overlap_type: ['interface', 'type'].includes(object.kind) ? 'contract' : 'exported_object',
678
+ kind: object.kind,
679
+ name: object.name,
680
+ file_path: object.file_path,
681
+ area: object.area || null,
682
+ }));
683
+
684
+ const sourceChangedFiles = changedFiles.filter((filePath) => taskSpecMatchesFilePath(sourceSpec, filePath));
685
+ if (sourceChangedFiles.length === 0) return overlaps;
686
+
687
+ const moduleDependencies = buildModuleDependencyIndexForPath(repoRoot, { filePaths: semanticCandidateFiles }).dependencies || [];
688
+ const sharedModuleDependents = moduleDependencies.filter((dependency) =>
689
+ sourceChangedFiles.includes(dependency.imported_path)
690
+ && taskSpecMatchesFilePath(affectedSpec, dependency.file_path)
691
+ && !sourceChangedFiles.includes(dependency.file_path)
692
+ );
693
+ const dependentFiles = [...new Set(sharedModuleDependents.map((item) => item.file_path))];
694
+ const sharedModulePaths = [...new Set(sharedModuleDependents.map((item) => item.imported_path))];
695
+
696
+ if (dependentFiles.length > 0) {
697
+ overlaps.push({
698
+ overlap_type: 'shared_module',
699
+ kind: 'module',
700
+ name: sharedModulePaths[0],
701
+ file_path: sharedModulePaths[0],
702
+ area: sharedModuleDependents[0]?.area || null,
703
+ dependent_files: dependentFiles,
704
+ module_paths: sharedModulePaths,
705
+ subsystem_tags: [...new Set(sharedModuleDependents.flatMap((item) => item.subsystem_tags || []))],
706
+ dependent_areas: [...new Set(sharedModuleDependents.map((item) => item.area).filter(Boolean))],
707
+ });
708
+ }
709
+
710
+ return overlaps;
711
+ }
712
+
429
713
  function buildLeaseScopeReservations(lease, taskSpec) {
430
714
  if (!taskSpec) return [];
431
715
 
@@ -890,7 +1174,7 @@ function resolveDependencyInvalidationsForAffectedTaskTx(db, affectedTaskId, res
890
1174
  `).run(resolvedBy || null, resolvedBy || null, affectedTaskId);
891
1175
  }
892
1176
 
893
- function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
1177
+ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write', context = {}) {
894
1178
  const execution = getLeaseExecutionContext(db, leaseId);
895
1179
  if (!execution?.task || !execution.task_spec) {
896
1180
  return [];
@@ -912,7 +1196,8 @@ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
912
1196
  if ((affectedSpec.pipeline_id || null) === sourcePipelineId) continue;
913
1197
 
914
1198
  const overlap = buildSpecOverlap(sourceSpec, affectedSpec);
915
- if (overlap.shared_subsystems.length === 0 && overlap.shared_scopes.length === 0) continue;
1199
+ const semanticOverlap = buildSemanticDependencyOverlap(db, sourceSpec, affectedSpec, context.changed_files || []);
1200
+ if (overlap.shared_subsystems.length === 0 && overlap.shared_scopes.length === 0 && semanticOverlap.length === 0) continue;
916
1201
 
917
1202
  const affectedWorktree = affectedTask.worktree || null;
918
1203
  for (const subsystemTag of overlap.shared_subsystems) {
@@ -933,6 +1218,8 @@ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
933
1218
  source,
934
1219
  source_task_title: sourceTask.title,
935
1220
  affected_task_title: affectedTask.title,
1221
+ source_task_priority: Number(sourceTask.priority || 0),
1222
+ affected_task_priority: Number(affectedTask.priority || 0),
936
1223
  },
937
1224
  });
938
1225
  }
@@ -955,6 +1242,100 @@ function syncDependencyInvalidationsForLeaseTx(db, leaseId, source = 'write') {
955
1242
  source,
956
1243
  source_task_title: sourceTask.title,
957
1244
  affected_task_title: affectedTask.title,
1245
+ source_task_priority: Number(sourceTask.priority || 0),
1246
+ affected_task_priority: Number(affectedTask.priority || 0),
1247
+ },
1248
+ });
1249
+ }
1250
+
1251
+ const semanticContractOverlap = semanticOverlap.filter((item) => item.overlap_type === 'contract');
1252
+ const semanticObjectOverlap = semanticOverlap.filter((item) => item.overlap_type === 'exported_object');
1253
+ const sharedModuleOverlap = semanticOverlap.filter((item) => item.overlap_type === 'shared_module');
1254
+
1255
+ if (semanticContractOverlap.length > 0) {
1256
+ desired.push({
1257
+ source_lease_id: leaseId,
1258
+ source_task_id: sourceTask.id,
1259
+ source_pipeline_id: sourcePipelineId,
1260
+ source_worktree: sourceWorktree,
1261
+ affected_task_id: affectedTask.id,
1262
+ affected_pipeline_id: affectedSpec.pipeline_id || null,
1263
+ affected_worktree: affectedWorktree,
1264
+ status: 'stale',
1265
+ reason_type: 'semantic_contract_drift',
1266
+ subsystem_tag: null,
1267
+ source_scope_pattern: null,
1268
+ affected_scope_pattern: null,
1269
+ details: {
1270
+ source,
1271
+ source_task_title: sourceTask.title,
1272
+ affected_task_title: affectedTask.title,
1273
+ source_task_priority: Number(sourceTask.priority || 0),
1274
+ affected_task_priority: Number(affectedTask.priority || 0),
1275
+ contract_names: [...new Set(semanticContractOverlap.map((item) => item.name))],
1276
+ contract_kinds: [...new Set(semanticContractOverlap.map((item) => item.kind))],
1277
+ contract_files: [...new Set(semanticContractOverlap.map((item) => item.file_path))],
1278
+ revalidation_set: 'contract',
1279
+ severity: 'blocked',
1280
+ },
1281
+ });
1282
+ }
1283
+
1284
+ if (semanticObjectOverlap.length > 0) {
1285
+ desired.push({
1286
+ source_lease_id: leaseId,
1287
+ source_task_id: sourceTask.id,
1288
+ source_pipeline_id: sourcePipelineId,
1289
+ source_worktree: sourceWorktree,
1290
+ affected_task_id: affectedTask.id,
1291
+ affected_pipeline_id: affectedSpec.pipeline_id || null,
1292
+ affected_worktree: affectedWorktree,
1293
+ status: 'stale',
1294
+ reason_type: 'semantic_object_overlap',
1295
+ subsystem_tag: null,
1296
+ source_scope_pattern: null,
1297
+ affected_scope_pattern: null,
1298
+ details: {
1299
+ source,
1300
+ source_task_title: sourceTask.title,
1301
+ affected_task_title: affectedTask.title,
1302
+ source_task_priority: Number(sourceTask.priority || 0),
1303
+ affected_task_priority: Number(affectedTask.priority || 0),
1304
+ object_names: [...new Set(semanticObjectOverlap.map((item) => item.name))],
1305
+ object_kinds: [...new Set(semanticObjectOverlap.map((item) => item.kind))],
1306
+ object_files: [...new Set(semanticObjectOverlap.map((item) => item.file_path))],
1307
+ revalidation_set: 'semantic_object',
1308
+ severity: 'warn',
1309
+ },
1310
+ });
1311
+ }
1312
+
1313
+ if (sharedModuleOverlap.length > 0) {
1314
+ desired.push({
1315
+ source_lease_id: leaseId,
1316
+ source_task_id: sourceTask.id,
1317
+ source_pipeline_id: sourcePipelineId,
1318
+ source_worktree: sourceWorktree,
1319
+ affected_task_id: affectedTask.id,
1320
+ affected_pipeline_id: affectedSpec.pipeline_id || null,
1321
+ affected_worktree: affectedWorktree,
1322
+ status: 'stale',
1323
+ reason_type: 'shared_module_drift',
1324
+ subsystem_tag: null,
1325
+ source_scope_pattern: null,
1326
+ affected_scope_pattern: null,
1327
+ details: {
1328
+ source,
1329
+ source_task_title: sourceTask.title,
1330
+ affected_task_title: affectedTask.title,
1331
+ source_task_priority: Number(sourceTask.priority || 0),
1332
+ affected_task_priority: Number(affectedTask.priority || 0),
1333
+ module_paths: [...new Set(sharedModuleOverlap.flatMap((item) => item.module_paths || [item.file_path]).filter(Boolean))],
1334
+ dependent_files: [...new Set(sharedModuleOverlap.flatMap((item) => item.dependent_files || []))],
1335
+ dependent_areas: [...new Set(sharedModuleOverlap.flatMap((item) => item.dependent_areas || []).filter(Boolean))],
1336
+ dependent_subsystems: [...new Set(sharedModuleOverlap.flatMap((item) => item.subsystem_tags || []).filter(Boolean))],
1337
+ revalidation_set: 'shared_module',
1338
+ severity: 'warn',
958
1339
  },
959
1340
  });
960
1341
  }
@@ -1262,7 +1643,7 @@ export function initDb(repoRoot) {
1262
1643
  db.__switchmanRepoRoot = repoRoot;
1263
1644
  db.__switchmanAuditSecret = getAuditSecret(repoRoot);
1264
1645
  configureDb(db, { initialize: true });
1265
- withBusyRetry(() => ensureSchema(db));
1646
+ withBusyRetry(() => ensureSchemaMigrated(db));
1266
1647
  return db;
1267
1648
  }
1268
1649
 
@@ -1275,7 +1656,7 @@ export function openDb(repoRoot) {
1275
1656
  db.__switchmanRepoRoot = repoRoot;
1276
1657
  db.__switchmanAuditSecret = getAuditSecret(repoRoot);
1277
1658
  configureDb(db);
1278
- withBusyRetry(() => ensureSchema(db));
1659
+ withBusyRetry(() => ensureSchemaMigrated(db));
1279
1660
  return db;
1280
1661
  }
1281
1662
 
@@ -1422,6 +1803,7 @@ export function enqueueMergeItem(db, {
1422
1803
  targetBranch = 'main',
1423
1804
  maxRetries = 1,
1424
1805
  submittedBy = null,
1806
+ eventDetails = null,
1425
1807
  } = {}) {
1426
1808
  const itemId = id || makeId('mq');
1427
1809
  db.prepare(`
@@ -1450,6 +1832,7 @@ export function enqueueMergeItem(db, {
1450
1832
  source_worktree: sourceWorktree || null,
1451
1833
  source_pipeline_id: sourcePipelineId || null,
1452
1834
  target_branch: targetBranch || 'main',
1835
+ ...(eventDetails || {}),
1453
1836
  }),
1454
1837
  });
1455
1838
 
@@ -1502,10 +1885,350 @@ export function logMergeQueueEvent(db, itemId, {
1502
1885
  `).run(itemId, eventType, status || null, details == null ? null : String(details));
1503
1886
  }
1504
1887
 
1888
+ export function listOperationJournal(db, {
1889
+ scopeType = null,
1890
+ scopeId = null,
1891
+ status = null,
1892
+ limit = 50,
1893
+ } = {}) {
1894
+ const clauses = [];
1895
+ const params = [];
1896
+ if (scopeType) {
1897
+ clauses.push('scope_type=?');
1898
+ params.push(scopeType);
1899
+ }
1900
+ if (scopeId) {
1901
+ clauses.push('scope_id=?');
1902
+ params.push(scopeId);
1903
+ }
1904
+ if (status) {
1905
+ clauses.push('status=?');
1906
+ params.push(status);
1907
+ }
1908
+ const where = clauses.length ? `WHERE ${clauses.join(' AND ')}` : '';
1909
+ return db.prepare(`
1910
+ SELECT *
1911
+ FROM operation_journal
1912
+ ${where}
1913
+ ORDER BY datetime(started_at) DESC, id DESC
1914
+ LIMIT ?
1915
+ `).all(...params, limit);
1916
+ }
1917
+
1918
+ export function startOperationJournalEntry(db, {
1919
+ id = null,
1920
+ scopeType,
1921
+ scopeId,
1922
+ operationType,
1923
+ details = null,
1924
+ } = {}) {
1925
+ const entryId = id || makeId('op');
1926
+ db.prepare(`
1927
+ INSERT INTO operation_journal (id, scope_type, scope_id, operation_type, status, details)
1928
+ VALUES (?, ?, ?, ?, 'running', ?)
1929
+ `).run(entryId, scopeType, scopeId, operationType, details == null ? null : String(details));
1930
+ return db.prepare(`
1931
+ SELECT *
1932
+ FROM operation_journal
1933
+ WHERE id=?
1934
+ `).get(entryId);
1935
+ }
1936
+
1937
+ export function finishOperationJournalEntry(db, entryId, {
1938
+ status = 'completed',
1939
+ details = null,
1940
+ } = {}) {
1941
+ db.prepare(`
1942
+ UPDATE operation_journal
1943
+ SET status=?,
1944
+ details=COALESCE(?, details),
1945
+ updated_at=datetime('now'),
1946
+ finished_at=datetime('now')
1947
+ WHERE id=?
1948
+ `).run(status, details == null ? null : String(details), entryId);
1949
+ return db.prepare(`
1950
+ SELECT *
1951
+ FROM operation_journal
1952
+ WHERE id=?
1953
+ `).get(entryId);
1954
+ }
1955
+
1956
+ export function listTempResources(db, {
1957
+ scopeType = null,
1958
+ scopeId = null,
1959
+ operationId = null,
1960
+ resourceType = null,
1961
+ status = null,
1962
+ limit = 100,
1963
+ } = {}) {
1964
+ const clauses = [];
1965
+ const params = [];
1966
+ if (scopeType) {
1967
+ clauses.push('scope_type=?');
1968
+ params.push(scopeType);
1969
+ }
1970
+ if (scopeId) {
1971
+ clauses.push('scope_id=?');
1972
+ params.push(scopeId);
1973
+ }
1974
+ if (operationId) {
1975
+ clauses.push('operation_id=?');
1976
+ params.push(operationId);
1977
+ }
1978
+ if (resourceType) {
1979
+ clauses.push('resource_type=?');
1980
+ params.push(resourceType);
1981
+ }
1982
+ if (status) {
1983
+ clauses.push('status=?');
1984
+ params.push(status);
1985
+ }
1986
+ const where = clauses.length ? `WHERE ${clauses.join(' AND ')}` : '';
1987
+ return db.prepare(`
1988
+ SELECT *
1989
+ FROM temp_resources
1990
+ ${where}
1991
+ ORDER BY datetime(created_at) DESC, id DESC
1992
+ LIMIT ?
1993
+ `).all(...params, limit);
1994
+ }
1995
+
1996
+ export function getPolicyOverride(db, overrideId) {
1997
+ const entry = db.prepare(`
1998
+ SELECT *
1999
+ FROM policy_overrides
2000
+ WHERE id=?
2001
+ `).get(overrideId);
2002
+ if (!entry) return null;
2003
+ return {
2004
+ ...entry,
2005
+ requirement_keys: (() => {
2006
+ try {
2007
+ return JSON.parse(entry.requirement_keys || '[]');
2008
+ } catch {
2009
+ return [];
2010
+ }
2011
+ })(),
2012
+ task_types: (() => {
2013
+ try {
2014
+ return JSON.parse(entry.task_types || '[]');
2015
+ } catch {
2016
+ return [];
2017
+ }
2018
+ })(),
2019
+ details: (() => {
2020
+ try {
2021
+ return entry.details ? JSON.parse(entry.details) : null;
2022
+ } catch {
2023
+ return null;
2024
+ }
2025
+ })(),
2026
+ };
2027
+ }
2028
+
2029
+ export function listPolicyOverrides(db, {
2030
+ pipelineId = null,
2031
+ status = null,
2032
+ limit = 100,
2033
+ } = {}) {
2034
+ const clauses = [];
2035
+ const params = [];
2036
+ if (pipelineId) {
2037
+ clauses.push('pipeline_id=?');
2038
+ params.push(pipelineId);
2039
+ }
2040
+ if (status) {
2041
+ clauses.push('status=?');
2042
+ params.push(status);
2043
+ }
2044
+ const where = clauses.length ? `WHERE ${clauses.join(' AND ')}` : '';
2045
+ return db.prepare(`
2046
+ SELECT *
2047
+ FROM policy_overrides
2048
+ ${where}
2049
+ ORDER BY datetime(created_at) DESC, id DESC
2050
+ LIMIT ?
2051
+ `).all(...params, limit).map((entry) => ({
2052
+ ...entry,
2053
+ requirement_keys: (() => {
2054
+ try {
2055
+ return JSON.parse(entry.requirement_keys || '[]');
2056
+ } catch {
2057
+ return [];
2058
+ }
2059
+ })(),
2060
+ task_types: (() => {
2061
+ try {
2062
+ return JSON.parse(entry.task_types || '[]');
2063
+ } catch {
2064
+ return [];
2065
+ }
2066
+ })(),
2067
+ details: (() => {
2068
+ try {
2069
+ return entry.details ? JSON.parse(entry.details) : null;
2070
+ } catch {
2071
+ return null;
2072
+ }
2073
+ })(),
2074
+ }));
2075
+ }
2076
+
2077
+ export function createPolicyOverride(db, {
2078
+ pipelineId,
2079
+ requirementKeys = [],
2080
+ taskTypes = [],
2081
+ reason,
2082
+ approvedBy = null,
2083
+ details = null,
2084
+ } = {}) {
2085
+ if (!pipelineId) throw new Error('pipelineId is required for a policy override.');
2086
+ if (!reason || !String(reason).trim()) throw new Error('A policy override reason is required.');
2087
+
2088
+ const overrideId = makeId('po');
2089
+ const normalizedRequirementKeys = [...new Set((requirementKeys || []).map((value) => String(value || '').trim()).filter(Boolean))];
2090
+ const normalizedTaskTypes = [...new Set((taskTypes || []).map((value) => String(value || '').trim()).filter(Boolean))];
2091
+
2092
+ return withImmediateTransaction(db, () => {
2093
+ db.prepare(`
2094
+ INSERT INTO policy_overrides (
2095
+ id, pipeline_id, requirement_keys, task_types, status, reason, approved_by, details
2096
+ )
2097
+ VALUES (?, ?, ?, ?, 'active', ?, ?, ?)
2098
+ `).run(
2099
+ overrideId,
2100
+ pipelineId,
2101
+ JSON.stringify(normalizedRequirementKeys),
2102
+ JSON.stringify(normalizedTaskTypes),
2103
+ String(reason).trim(),
2104
+ approvedBy || null,
2105
+ details == null ? null : JSON.stringify(details),
2106
+ );
2107
+ logAuditEventTx(db, {
2108
+ eventType: 'policy_override_created',
2109
+ status: 'warn',
2110
+ reasonCode: 'policy_override',
2111
+ details: JSON.stringify({
2112
+ override_id: overrideId,
2113
+ pipeline_id: pipelineId,
2114
+ requirement_keys: normalizedRequirementKeys,
2115
+ task_types: normalizedTaskTypes,
2116
+ reason: String(reason).trim(),
2117
+ approved_by: approvedBy || null,
2118
+ details: details || null,
2119
+ }),
2120
+ });
2121
+ return getPolicyOverride(db, overrideId);
2122
+ });
2123
+ }
2124
+
2125
+ export function revokePolicyOverride(db, overrideId, {
2126
+ revokedBy = null,
2127
+ reason = null,
2128
+ } = {}) {
2129
+ return withImmediateTransaction(db, () => {
2130
+ const existing = getPolicyOverride(db, overrideId);
2131
+ if (!existing) {
2132
+ throw new Error(`Policy override ${overrideId} does not exist.`);
2133
+ }
2134
+ if (existing.status !== 'active') {
2135
+ return existing;
2136
+ }
2137
+ db.prepare(`
2138
+ UPDATE policy_overrides
2139
+ SET status='revoked',
2140
+ revoked_at=datetime('now'),
2141
+ revoked_by=?,
2142
+ revoked_reason=?
2143
+ WHERE id=?
2144
+ `).run(revokedBy || null, reason || null, overrideId);
2145
+ logAuditEventTx(db, {
2146
+ eventType: 'policy_override_revoked',
2147
+ status: 'info',
2148
+ reasonCode: 'policy_override_revoked',
2149
+ details: JSON.stringify({
2150
+ override_id: overrideId,
2151
+ pipeline_id: existing.pipeline_id,
2152
+ revoked_by: revokedBy || null,
2153
+ revoked_reason: reason || null,
2154
+ }),
2155
+ });
2156
+ return getPolicyOverride(db, overrideId);
2157
+ });
2158
+ }
2159
+
2160
+ export function createTempResource(db, {
2161
+ id = null,
2162
+ scopeType,
2163
+ scopeId,
2164
+ operationId = null,
2165
+ resourceType,
2166
+ path,
2167
+ branch = null,
2168
+ details = null,
2169
+ status = 'active',
2170
+ } = {}) {
2171
+ const resourceId = id || makeId('res');
2172
+ db.prepare(`
2173
+ INSERT INTO temp_resources (
2174
+ id, scope_type, scope_id, operation_id, resource_type, path, branch, status, details
2175
+ )
2176
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
2177
+ `).run(
2178
+ resourceId,
2179
+ scopeType,
2180
+ scopeId,
2181
+ operationId || null,
2182
+ resourceType,
2183
+ path,
2184
+ branch || null,
2185
+ status,
2186
+ details == null ? null : String(details),
2187
+ );
2188
+ return db.prepare(`
2189
+ SELECT *
2190
+ FROM temp_resources
2191
+ WHERE id=?
2192
+ `).get(resourceId);
2193
+ }
2194
+
2195
+ export function updateTempResource(db, resourceId, {
2196
+ status = null,
2197
+ path = null,
2198
+ branch = null,
2199
+ details = null,
2200
+ } = {}) {
2201
+ db.prepare(`
2202
+ UPDATE temp_resources
2203
+ SET status=COALESCE(?, status),
2204
+ path=COALESCE(?, path),
2205
+ branch=COALESCE(?, branch),
2206
+ details=COALESCE(?, details),
2207
+ updated_at=datetime('now'),
2208
+ released_at=CASE
2209
+ WHEN COALESCE(?, status) = 'active' THEN NULL
2210
+ ELSE COALESCE(released_at, datetime('now'))
2211
+ END
2212
+ WHERE id=?
2213
+ `).run(
2214
+ status,
2215
+ path,
2216
+ branch,
2217
+ details == null ? null : String(details),
2218
+ status,
2219
+ resourceId,
2220
+ );
2221
+ return db.prepare(`
2222
+ SELECT *
2223
+ FROM temp_resources
2224
+ WHERE id=?
2225
+ `).get(resourceId);
2226
+ }
2227
+
1505
2228
  export function startMergeQueueItem(db, itemId) {
1506
2229
  return withImmediateTransaction(db, () => {
1507
2230
  const item = getMergeQueueItem(db, itemId);
1508
- if (!item || !['queued', 'retrying'].includes(item.status)) {
2231
+ if (!item || !['queued', 'retrying', 'held', 'wave_blocked', 'escalated'].includes(item.status)) {
1509
2232
  return null;
1510
2233
  }
1511
2234
 
@@ -1515,7 +2238,7 @@ export function startMergeQueueItem(db, itemId) {
1515
2238
  started_at=COALESCE(started_at, datetime('now')),
1516
2239
  last_attempt_at=datetime('now'),
1517
2240
  updated_at=datetime('now')
1518
- WHERE id=? AND status IN ('queued', 'retrying')
2241
+ WHERE id=? AND status IN ('queued', 'retrying', 'held', 'wave_blocked', 'escalated')
1519
2242
  `).run(itemId);
1520
2243
 
1521
2244
  logMergeQueueEvent(db, itemId, {
@@ -1534,6 +2257,7 @@ export function markMergeQueueState(db, itemId, {
1534
2257
  nextAction = null,
1535
2258
  mergedCommit = null,
1536
2259
  incrementRetry = false,
2260
+ backoffUntil = undefined,
1537
2261
  } = {}) {
1538
2262
  const terminal = ['merged', 'blocked', 'failed', 'canceled'].includes(status);
1539
2263
  db.prepare(`
@@ -1544,6 +2268,15 @@ export function markMergeQueueState(db, itemId, {
1544
2268
  next_action=?,
1545
2269
  merged_commit=COALESCE(?, merged_commit),
1546
2270
  retry_count=retry_count + ?,
2271
+ backoff_until=CASE
2272
+ WHEN ? THEN ?
2273
+ WHEN ? = 'retrying' THEN backoff_until
2274
+ ELSE NULL
2275
+ END,
2276
+ escalated_at=CASE
2277
+ WHEN ? = 'escalated' THEN COALESCE(escalated_at, datetime('now'))
2278
+ ELSE NULL
2279
+ END,
1547
2280
  updated_at=datetime('now'),
1548
2281
  finished_at=CASE WHEN ? THEN datetime('now') ELSE finished_at END
1549
2282
  WHERE id=?
@@ -1554,6 +2287,10 @@ export function markMergeQueueState(db, itemId, {
1554
2287
  nextAction || null,
1555
2288
  mergedCommit || null,
1556
2289
  incrementRetry ? 1 : 0,
2290
+ backoffUntil !== undefined ? 1 : 0,
2291
+ backoffUntil || null,
2292
+ status,
2293
+ status,
1557
2294
  terminal ? 1 : 0,
1558
2295
  itemId,
1559
2296
  );
@@ -1575,7 +2312,7 @@ export function markMergeQueueState(db, itemId, {
1575
2312
 
1576
2313
  export function retryMergeQueueItem(db, itemId) {
1577
2314
  const item = getMergeQueueItem(db, itemId);
1578
- if (!item || !['blocked', 'failed'].includes(item.status)) {
2315
+ if (!item || !['blocked', 'failed', 'held', 'wave_blocked', 'escalated', 'retrying'].includes(item.status)) {
1579
2316
  return null;
1580
2317
  }
1581
2318
 
@@ -1585,6 +2322,8 @@ export function retryMergeQueueItem(db, itemId) {
1585
2322
  last_error_code=NULL,
1586
2323
  last_error_summary=NULL,
1587
2324
  next_action=NULL,
2325
+ backoff_until=NULL,
2326
+ escalated_at=NULL,
1588
2327
  finished_at=NULL,
1589
2328
  updated_at=datetime('now')
1590
2329
  WHERE id=?
@@ -1598,6 +2337,35 @@ export function retryMergeQueueItem(db, itemId) {
1598
2337
  return getMergeQueueItem(db, itemId);
1599
2338
  }
1600
2339
 
2340
+ export function escalateMergeQueueItem(db, itemId, {
2341
+ summary = null,
2342
+ nextAction = null,
2343
+ } = {}) {
2344
+ const item = getMergeQueueItem(db, itemId);
2345
+ if (!item || ['merged', 'canceled'].includes(item.status)) {
2346
+ return null;
2347
+ }
2348
+
2349
+ const desiredSummary = summary || item.last_error_summary || 'Operator escalation requested before this queue item lands.';
2350
+ const desiredNextAction = nextAction || item.next_action || `Run \`switchman explain queue ${itemId}\` to review the risk, then \`switchman queue retry ${itemId}\` when you are ready to land it again.`;
2351
+
2352
+ if (
2353
+ item.status === 'escalated'
2354
+ && (item.last_error_summary || null) === desiredSummary
2355
+ && (item.next_action || null) === desiredNextAction
2356
+ ) {
2357
+ return item;
2358
+ }
2359
+
2360
+ return markMergeQueueState(db, itemId, {
2361
+ status: 'escalated',
2362
+ lastErrorCode: 'queue_escalated_manual',
2363
+ lastErrorSummary: desiredSummary,
2364
+ nextAction: desiredNextAction,
2365
+ backoffUntil: null,
2366
+ });
2367
+ }
2368
+
1601
2369
  export function removeMergeQueueItem(db, itemId) {
1602
2370
  const item = getMergeQueueItem(db, itemId);
1603
2371
  if (!item) return null;
@@ -1755,7 +2523,7 @@ export function listDependencyInvalidations(db, { status = 'stale', pipelineId =
1755
2523
  }));
1756
2524
  }
1757
2525
 
1758
- export function touchBoundaryValidationState(db, leaseId, source = 'write') {
2526
+ export function touchBoundaryValidationState(db, leaseId, source = 'write', context = {}) {
1759
2527
  return withImmediateTransaction(db, () => {
1760
2528
  const state = computeBoundaryValidationStateTx(db, leaseId, { touched: true, source });
1761
2529
  if (state) {
@@ -1774,7 +2542,7 @@ export function touchBoundaryValidationState(db, leaseId, source = 'write') {
1774
2542
  });
1775
2543
  }
1776
2544
 
1777
- const invalidations = syncDependencyInvalidationsForLeaseTx(db, leaseId, source);
2545
+ const invalidations = syncDependencyInvalidationsForLeaseTx(db, leaseId, source, context);
1778
2546
  if (invalidations.length > 0) {
1779
2547
  logAuditEventTx(db, {
1780
2548
  eventType: 'dependency_invalidations_updated',
@@ -1786,6 +2554,8 @@ export function touchBoundaryValidationState(db, leaseId, source = 'write') {
1786
2554
  source,
1787
2555
  stale_count: invalidations.length,
1788
2556
  affected_task_ids: [...new Set(invalidations.map((item) => item.affected_task_id))],
2557
+ reason_types: [...new Set(invalidations.map((item) => item.reason_type))],
2558
+ revalidation_sets: [...new Set(invalidations.map((item) => item.details?.revalidation_set).filter(Boolean))],
1789
2559
  }),
1790
2560
  });
1791
2561
  }
@@ -1934,19 +2704,29 @@ export function reapStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINU
1934
2704
  export function claimFiles(db, taskId, worktree, filePaths, agent) {
1935
2705
  return withImmediateTransaction(db, () => {
1936
2706
  const lease = resolveActiveLeaseTx(db, taskId, worktree, agent);
1937
- const findActiveClaim = db.prepare(`
2707
+ const activeClaims = db.prepare(`
1938
2708
  SELECT *
1939
2709
  FROM file_claims
1940
- WHERE file_path=? AND released_at IS NULL
1941
- LIMIT 1
1942
- `);
2710
+ WHERE released_at IS NULL
2711
+ ORDER BY id ASC
2712
+ `).all();
2713
+ const activeClaimByPath = new Map();
2714
+ for (const claim of activeClaims) {
2715
+ const normalizedPath = normalizeClaimedFilePath(claim.file_path);
2716
+ if (!activeClaimByPath.has(normalizedPath)) {
2717
+ activeClaimByPath.set(normalizedPath, claim);
2718
+ }
2719
+ }
2720
+
2721
+ const normalizeAndDeduplicate = [...new Set(filePaths.map((filePath) => normalizeClaimedFilePath(filePath)))];
2722
+ const findClaimOwner = (normalizedPath) => activeClaimByPath.get(normalizedPath) || null;
1943
2723
  const insert = db.prepare(`
1944
2724
  INSERT INTO file_claims (task_id, lease_id, file_path, worktree, agent)
1945
2725
  VALUES (?, ?, ?, ?, ?)
1946
2726
  `);
1947
2727
 
1948
- for (const fp of filePaths) {
1949
- const existing = findActiveClaim.get(fp);
2728
+ for (const normalizedPath of normalizeAndDeduplicate) {
2729
+ const existing = findClaimOwner(normalizedPath);
1950
2730
  if (existing) {
1951
2731
  const sameLease = existing.lease_id === lease.id;
1952
2732
  const sameLegacyOwner = existing.lease_id == null && existing.task_id === taskId && existing.worktree === worktree;
@@ -1958,6 +2738,7 @@ export function claimFiles(db, taskId, worktree, filePaths, agent) {
1958
2738
  SET lease_id=?, agent=COALESCE(?, agent)
1959
2739
  WHERE id=?
1960
2740
  `).run(lease.id, agent || null, existing.id);
2741
+ activeClaimByPath.set(normalizedPath, { ...existing, lease_id: lease.id, agent: agent || existing.agent });
1961
2742
  }
1962
2743
  continue;
1963
2744
  }
@@ -1965,14 +2746,21 @@ export function claimFiles(db, taskId, worktree, filePaths, agent) {
1965
2746
  throw new Error('One or more files are already actively claimed by another task.');
1966
2747
  }
1967
2748
 
1968
- insert.run(taskId, lease.id, fp, worktree, agent || null);
2749
+ insert.run(taskId, lease.id, normalizedPath, worktree, agent || null);
2750
+ activeClaimByPath.set(normalizedPath, {
2751
+ task_id: taskId,
2752
+ lease_id: lease.id,
2753
+ file_path: normalizedPath,
2754
+ worktree,
2755
+ agent: agent || null,
2756
+ });
1969
2757
  logAuditEventTx(db, {
1970
2758
  eventType: 'file_claimed',
1971
2759
  status: 'allowed',
1972
2760
  worktree,
1973
2761
  taskId,
1974
2762
  leaseId: lease.id,
1975
- filePath: fp,
2763
+ filePath: normalizedPath,
1976
2764
  });
1977
2765
  }
1978
2766
 
@@ -2030,19 +2818,27 @@ export function getCompletedFileClaims(db, worktree = null) {
2030
2818
  }
2031
2819
 
2032
2820
  export function checkFileConflicts(db, filePaths, excludeWorktree) {
2821
+ const normalizedPaths = [...new Set(filePaths.map((filePath) => normalizeClaimedFilePath(filePath)))];
2033
2822
  const conflicts = [];
2034
- const stmt = db.prepare(`
2823
+ const claims = db.prepare(`
2035
2824
  SELECT fc.*, t.title as task_title, l.id as lease_id, l.status as lease_status
2036
2825
  FROM file_claims fc
2037
2826
  JOIN tasks t ON fc.task_id = t.id
2038
2827
  LEFT JOIN leases l ON fc.lease_id = l.id
2039
- WHERE fc.file_path=?
2040
- AND fc.released_at IS NULL
2828
+ WHERE fc.released_at IS NULL
2041
2829
  AND fc.worktree != ?
2042
2830
  AND t.status NOT IN ('done','failed')
2043
- `);
2044
- for (const fp of filePaths) {
2045
- const existing = stmt.get(fp, excludeWorktree || '');
2831
+ `).all(excludeWorktree || '');
2832
+ const claimByNormalizedPath = new Map();
2833
+ for (const claim of claims) {
2834
+ const normalizedPath = normalizeClaimedFilePath(claim.file_path);
2835
+ if (!claimByNormalizedPath.has(normalizedPath)) {
2836
+ claimByNormalizedPath.set(normalizedPath, claim);
2837
+ }
2838
+ }
2839
+
2840
+ for (const fp of normalizedPaths) {
2841
+ const existing = claimByNormalizedPath.get(fp);
2046
2842
  if (existing) conflicts.push({ file: fp, claimedBy: existing });
2047
2843
  }
2048
2844
  return conflicts;