switchman-dev 0.1.3 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/core/db.js CHANGED
@@ -273,6 +273,38 @@ function ensureSchema(db) {
273
273
  area TEXT,
274
274
  updated_at TEXT NOT NULL DEFAULT (datetime('now'))
275
275
  );
276
+
277
+ CREATE TABLE IF NOT EXISTS merge_queue (
278
+ id TEXT PRIMARY KEY,
279
+ source_type TEXT NOT NULL,
280
+ source_ref TEXT NOT NULL,
281
+ source_worktree TEXT,
282
+ source_pipeline_id TEXT,
283
+ target_branch TEXT NOT NULL DEFAULT 'main',
284
+ status TEXT NOT NULL DEFAULT 'queued',
285
+ retry_count INTEGER NOT NULL DEFAULT 0,
286
+ max_retries INTEGER NOT NULL DEFAULT 1,
287
+ last_error_code TEXT,
288
+ last_error_summary TEXT,
289
+ next_action TEXT,
290
+ merged_commit TEXT,
291
+ submitted_by TEXT,
292
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
293
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
294
+ last_attempt_at TEXT,
295
+ started_at TEXT,
296
+ finished_at TEXT
297
+ );
298
+
299
+ CREATE TABLE IF NOT EXISTS merge_queue_events (
300
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
301
+ queue_item_id TEXT NOT NULL,
302
+ event_type TEXT NOT NULL,
303
+ status TEXT,
304
+ details TEXT,
305
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
306
+ FOREIGN KEY(queue_item_id) REFERENCES merge_queue(id) ON DELETE CASCADE
307
+ );
276
308
  `);
277
309
 
278
310
  const fileClaimColumns = getTableColumns(db, 'file_claims');
@@ -302,6 +334,11 @@ function ensureSchema(db) {
302
334
  db.exec(`ALTER TABLE audit_log ADD COLUMN signature TEXT`);
303
335
  }
304
336
 
337
+ const mergeQueueColumns = getTableColumns(db, 'merge_queue');
338
+ if (mergeQueueColumns.length > 0 && !mergeQueueColumns.includes('last_attempt_at')) {
339
+ db.exec(`ALTER TABLE merge_queue ADD COLUMN last_attempt_at TEXT`);
340
+ }
341
+
305
342
  db.exec(`
306
343
  CREATE INDEX IF NOT EXISTS idx_tasks_status ON tasks(status);
307
344
  CREATE INDEX IF NOT EXISTS idx_leases_task ON leases(task_id);
@@ -334,6 +371,10 @@ function ensureSchema(db) {
334
371
  CREATE INDEX IF NOT EXISTS idx_dependency_invalidations_status ON dependency_invalidations(status);
335
372
  CREATE INDEX IF NOT EXISTS idx_code_objects_file_path ON code_objects(file_path);
336
373
  CREATE INDEX IF NOT EXISTS idx_code_objects_name ON code_objects(name);
374
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_status ON merge_queue(status);
375
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_created_at ON merge_queue(created_at);
376
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_pipeline_id ON merge_queue(source_pipeline_id);
377
+ CREATE INDEX IF NOT EXISTS idx_merge_queue_events_item ON merge_queue_events(queue_item_id);
337
378
  `);
338
379
 
339
380
  migrateLegacyAuditLog(db);
@@ -1372,6 +1413,198 @@ export function getTask(db, taskId) {
1372
1413
  return db.prepare(`SELECT * FROM tasks WHERE id=?`).get(taskId);
1373
1414
  }
1374
1415
 
1416
+ export function enqueueMergeItem(db, {
1417
+ id = null,
1418
+ sourceType,
1419
+ sourceRef,
1420
+ sourceWorktree = null,
1421
+ sourcePipelineId = null,
1422
+ targetBranch = 'main',
1423
+ maxRetries = 1,
1424
+ submittedBy = null,
1425
+ } = {}) {
1426
+ const itemId = id || makeId('mq');
1427
+ db.prepare(`
1428
+ INSERT INTO merge_queue (
1429
+ id, source_type, source_ref, source_worktree, source_pipeline_id,
1430
+ target_branch, status, retry_count, max_retries, submitted_by
1431
+ )
1432
+ VALUES (?, ?, ?, ?, ?, ?, 'queued', 0, ?, ?)
1433
+ `).run(
1434
+ itemId,
1435
+ sourceType,
1436
+ sourceRef,
1437
+ sourceWorktree || null,
1438
+ sourcePipelineId || null,
1439
+ targetBranch || 'main',
1440
+ Math.max(0, Number.parseInt(maxRetries, 10) || 0),
1441
+ submittedBy || null,
1442
+ );
1443
+
1444
+ logMergeQueueEvent(db, itemId, {
1445
+ eventType: 'merge_queue_enqueued',
1446
+ status: 'queued',
1447
+ details: JSON.stringify({
1448
+ source_type: sourceType,
1449
+ source_ref: sourceRef,
1450
+ source_worktree: sourceWorktree || null,
1451
+ source_pipeline_id: sourcePipelineId || null,
1452
+ target_branch: targetBranch || 'main',
1453
+ }),
1454
+ });
1455
+
1456
+ return getMergeQueueItem(db, itemId);
1457
+ }
1458
+
1459
+ export function listMergeQueue(db, { status = null } = {}) {
1460
+ if (status) {
1461
+ return db.prepare(`
1462
+ SELECT *
1463
+ FROM merge_queue
1464
+ WHERE status=?
1465
+ ORDER BY datetime(created_at) ASC, id ASC
1466
+ `).all(status);
1467
+ }
1468
+
1469
+ return db.prepare(`
1470
+ SELECT *
1471
+ FROM merge_queue
1472
+ ORDER BY datetime(created_at) ASC, id ASC
1473
+ `).all();
1474
+ }
1475
+
1476
+ export function getMergeQueueItem(db, itemId) {
1477
+ return db.prepare(`
1478
+ SELECT *
1479
+ FROM merge_queue
1480
+ WHERE id=?
1481
+ `).get(itemId);
1482
+ }
1483
+
1484
+ export function listMergeQueueEvents(db, itemId, { limit = 10 } = {}) {
1485
+ return db.prepare(`
1486
+ SELECT *
1487
+ FROM merge_queue_events
1488
+ WHERE queue_item_id=?
1489
+ ORDER BY id DESC
1490
+ LIMIT ?
1491
+ `).all(itemId, limit);
1492
+ }
1493
+
1494
+ export function logMergeQueueEvent(db, itemId, {
1495
+ eventType,
1496
+ status = null,
1497
+ details = null,
1498
+ } = {}) {
1499
+ db.prepare(`
1500
+ INSERT INTO merge_queue_events (queue_item_id, event_type, status, details)
1501
+ VALUES (?, ?, ?, ?)
1502
+ `).run(itemId, eventType, status || null, details == null ? null : String(details));
1503
+ }
1504
+
1505
+ export function startMergeQueueItem(db, itemId) {
1506
+ return withImmediateTransaction(db, () => {
1507
+ const item = getMergeQueueItem(db, itemId);
1508
+ if (!item || !['queued', 'retrying'].includes(item.status)) {
1509
+ return null;
1510
+ }
1511
+
1512
+ db.prepare(`
1513
+ UPDATE merge_queue
1514
+ SET status='validating',
1515
+ started_at=COALESCE(started_at, datetime('now')),
1516
+ last_attempt_at=datetime('now'),
1517
+ updated_at=datetime('now')
1518
+ WHERE id=? AND status IN ('queued', 'retrying')
1519
+ `).run(itemId);
1520
+
1521
+ logMergeQueueEvent(db, itemId, {
1522
+ eventType: 'merge_queue_started',
1523
+ status: 'validating',
1524
+ });
1525
+
1526
+ return getMergeQueueItem(db, itemId);
1527
+ });
1528
+ }
1529
+
1530
+ export function markMergeQueueState(db, itemId, {
1531
+ status,
1532
+ lastErrorCode = null,
1533
+ lastErrorSummary = null,
1534
+ nextAction = null,
1535
+ mergedCommit = null,
1536
+ incrementRetry = false,
1537
+ } = {}) {
1538
+ const terminal = ['merged', 'blocked', 'failed', 'canceled'].includes(status);
1539
+ db.prepare(`
1540
+ UPDATE merge_queue
1541
+ SET status=?,
1542
+ last_error_code=?,
1543
+ last_error_summary=?,
1544
+ next_action=?,
1545
+ merged_commit=COALESCE(?, merged_commit),
1546
+ retry_count=retry_count + ?,
1547
+ updated_at=datetime('now'),
1548
+ finished_at=CASE WHEN ? THEN datetime('now') ELSE finished_at END
1549
+ WHERE id=?
1550
+ `).run(
1551
+ status,
1552
+ lastErrorCode || null,
1553
+ lastErrorSummary || null,
1554
+ nextAction || null,
1555
+ mergedCommit || null,
1556
+ incrementRetry ? 1 : 0,
1557
+ terminal ? 1 : 0,
1558
+ itemId,
1559
+ );
1560
+
1561
+ logMergeQueueEvent(db, itemId, {
1562
+ eventType: 'merge_queue_state_changed',
1563
+ status,
1564
+ details: JSON.stringify({
1565
+ last_error_code: lastErrorCode || null,
1566
+ last_error_summary: lastErrorSummary || null,
1567
+ next_action: nextAction || null,
1568
+ merged_commit: mergedCommit || null,
1569
+ increment_retry: incrementRetry,
1570
+ }),
1571
+ });
1572
+
1573
+ return getMergeQueueItem(db, itemId);
1574
+ }
1575
+
1576
+ export function retryMergeQueueItem(db, itemId) {
1577
+ const item = getMergeQueueItem(db, itemId);
1578
+ if (!item || !['blocked', 'failed'].includes(item.status)) {
1579
+ return null;
1580
+ }
1581
+
1582
+ db.prepare(`
1583
+ UPDATE merge_queue
1584
+ SET status='retrying',
1585
+ last_error_code=NULL,
1586
+ last_error_summary=NULL,
1587
+ next_action=NULL,
1588
+ finished_at=NULL,
1589
+ updated_at=datetime('now')
1590
+ WHERE id=?
1591
+ `).run(itemId);
1592
+
1593
+ logMergeQueueEvent(db, itemId, {
1594
+ eventType: 'merge_queue_retried',
1595
+ status: 'retrying',
1596
+ });
1597
+
1598
+ return getMergeQueueItem(db, itemId);
1599
+ }
1600
+
1601
+ export function removeMergeQueueItem(db, itemId) {
1602
+ const item = getMergeQueueItem(db, itemId);
1603
+ if (!item) return null;
1604
+ db.prepare(`DELETE FROM merge_queue WHERE id=?`).run(itemId);
1605
+ return item;
1606
+ }
1607
+
1375
1608
  export function upsertTaskSpec(db, taskId, spec) {
1376
1609
  db.prepare(`
1377
1610
  INSERT INTO task_specs (task_id, spec_json, updated_at)
@@ -1626,7 +1859,7 @@ export function getStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINUT
1626
1859
  `).all(`-${staleAfterMinutes} minutes`);
1627
1860
  }
1628
1861
 
1629
- export function reapStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINUTES) {
1862
+ export function reapStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINUTES, { requeueTask = true } = {}) {
1630
1863
  return withImmediateTransaction(db, () => {
1631
1864
  const staleLeases = getStaleLeases(db, staleAfterMinutes);
1632
1865
  if (!staleLeases.length) {
@@ -1655,11 +1888,28 @@ export function reapStaleLeases(db, staleAfterMinutes = DEFAULT_STALE_LEASE_MINU
1655
1888
  )
1656
1889
  `);
1657
1890
 
1891
+ const failTaskForStaleLease = db.prepare(`
1892
+ UPDATE tasks
1893
+ SET status='failed',
1894
+ description=COALESCE(description,'') || '\nFAILED: lease_expired: stale lease reaped',
1895
+ updated_at=datetime('now')
1896
+ WHERE id=? AND status='in_progress'
1897
+ AND NOT EXISTS (
1898
+ SELECT 1 FROM leases
1899
+ WHERE task_id=?
1900
+ AND status='active'
1901
+ )
1902
+ `);
1903
+
1658
1904
  for (const lease of staleLeases) {
1659
1905
  expireLease.run(lease.id);
1660
1906
  releaseClaimsForLeaseTx(db, lease.id);
1661
1907
  releaseScopeReservationsForLeaseTx(db, lease.id);
1662
- resetTask.run(lease.task_id, lease.task_id);
1908
+ if (requeueTask) {
1909
+ resetTask.run(lease.task_id, lease.task_id);
1910
+ } else {
1911
+ failTaskForStaleLease.run(lease.task_id, lease.task_id);
1912
+ }
1663
1913
  touchWorktreeLeaseState(db, lease.worktree, lease.agent, 'idle');
1664
1914
  logAuditEventTx(db, {
1665
1915
  eventType: 'lease_expired',
package/src/core/git.js CHANGED
@@ -3,7 +3,7 @@
3
3
  * Worktree discovery and conflict detection via git merge-tree
4
4
  */
5
5
 
6
- import { execSync, spawnSync } from 'child_process';
6
+ import { execFileSync, execSync, spawnSync } from 'child_process';
7
7
  import { existsSync, realpathSync } from 'fs';
8
8
  import { join, relative, resolve, basename } from 'path';
9
9
  import { filterIgnoredPaths } from './ignore.js';
@@ -266,6 +266,79 @@ export function getWorktreeBranch(worktreePath) {
266
266
  }
267
267
  }
268
268
 
269
+ export function gitBranchExists(repoRoot, branch) {
270
+ const result = spawnSync('git', ['show-ref', '--verify', '--quiet', `refs/heads/${branch}`], {
271
+ cwd: repoRoot,
272
+ encoding: 'utf8',
273
+ });
274
+ return result.status === 0;
275
+ }
276
+
277
+ export function gitRevParse(repoRoot, ref) {
278
+ try {
279
+ return execFileSync('git', ['rev-parse', ref], {
280
+ cwd: repoRoot,
281
+ encoding: 'utf8',
282
+ stdio: ['pipe', 'pipe', 'pipe'],
283
+ }).trim();
284
+ } catch {
285
+ return null;
286
+ }
287
+ }
288
+
289
+ export function gitGetCurrentBranch(repoRoot) {
290
+ try {
291
+ return execFileSync('git', ['branch', '--show-current'], {
292
+ cwd: repoRoot,
293
+ encoding: 'utf8',
294
+ stdio: ['pipe', 'pipe', 'pipe'],
295
+ }).trim() || null;
296
+ } catch {
297
+ return null;
298
+ }
299
+ }
300
+
301
+ export function gitCheckout(repoRoot, ref) {
302
+ execFileSync('git', ['checkout', ref], {
303
+ cwd: repoRoot,
304
+ encoding: 'utf8',
305
+ stdio: ['ignore', 'pipe', 'pipe'],
306
+ });
307
+ }
308
+
309
+ export function gitRebaseOnto(repoRoot, baseBranch, topicBranch) {
310
+ const previousBranch = gitGetCurrentBranch(repoRoot);
311
+ try {
312
+ gitCheckout(repoRoot, topicBranch);
313
+ execFileSync('git', ['rebase', baseBranch], {
314
+ cwd: repoRoot,
315
+ encoding: 'utf8',
316
+ stdio: ['ignore', 'pipe', 'pipe'],
317
+ });
318
+ } finally {
319
+ if (previousBranch && previousBranch !== topicBranch) {
320
+ try { gitCheckout(repoRoot, previousBranch); } catch { /* no-op */ }
321
+ }
322
+ }
323
+ }
324
+
325
+ export function gitMergeBranchInto(repoRoot, baseBranch, topicBranch) {
326
+ const previousBranch = gitGetCurrentBranch(repoRoot);
327
+ try {
328
+ gitCheckout(repoRoot, baseBranch);
329
+ execFileSync('git', ['merge', '--ff-only', topicBranch], {
330
+ cwd: repoRoot,
331
+ encoding: 'utf8',
332
+ stdio: ['ignore', 'pipe', 'pipe'],
333
+ });
334
+ return gitRevParse(repoRoot, 'HEAD');
335
+ } finally {
336
+ if (previousBranch && previousBranch !== baseBranch) {
337
+ try { gitCheckout(repoRoot, previousBranch); } catch { /* no-op */ }
338
+ }
339
+ }
340
+ }
341
+
269
342
  /**
270
343
  * Create a new git worktree
271
344
  */
@@ -1,6 +1,8 @@
1
1
  export const DEFAULT_SCAN_IGNORE_PATTERNS = [
2
2
  'node_modules/**',
3
3
  '.git/**',
4
+ '.mcp.json',
5
+ '.cursor/mcp.json',
4
6
  '.switchman/**',
5
7
  'dist/**',
6
8
  'build/**',
package/src/core/mcp.js CHANGED
@@ -1,19 +1,23 @@
1
- import { existsSync, readFileSync, writeFileSync } from 'fs';
1
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
2
+ import { homedir } from 'os';
2
3
  import { join } from 'path';
3
4
 
4
- export function getSwitchmanMcpConfig() {
5
+ export function getSwitchmanMcpServers() {
5
6
  return {
6
- mcpServers: {
7
- switchman: {
8
- command: 'switchman-mcp',
9
- args: [],
10
- },
7
+ switchman: {
8
+ command: 'switchman-mcp',
9
+ args: [],
11
10
  },
12
11
  };
13
12
  }
14
13
 
15
- export function upsertProjectMcpConfig(targetDir) {
16
- const configPath = join(targetDir, '.mcp.json');
14
+ export function getSwitchmanMcpConfig() {
15
+ return {
16
+ mcpServers: getSwitchmanMcpServers(),
17
+ };
18
+ }
19
+
20
+ function upsertMcpConfigFile(configPath) {
17
21
  let config = {};
18
22
  let created = true;
19
23
 
@@ -21,13 +25,15 @@ export function upsertProjectMcpConfig(targetDir) {
21
25
  created = false;
22
26
  const raw = readFileSync(configPath, 'utf8').trim();
23
27
  config = raw ? JSON.parse(raw) : {};
28
+ } else {
29
+ mkdirSync(join(configPath, '..'), { recursive: true });
24
30
  }
25
31
 
26
32
  const nextConfig = {
27
33
  ...config,
28
34
  mcpServers: {
29
35
  ...(config.mcpServers || {}),
30
- ...getSwitchmanMcpConfig().mcpServers,
36
+ ...getSwitchmanMcpServers(),
31
37
  },
32
38
  };
33
39
 
@@ -45,3 +51,26 @@ export function upsertProjectMcpConfig(targetDir) {
45
51
  changed,
46
52
  };
47
53
  }
54
+
55
+ export function upsertCursorProjectMcpConfig(targetDir) {
56
+ return upsertMcpConfigFile(join(targetDir, '.cursor', 'mcp.json'));
57
+ }
58
+
59
+ export function upsertAllProjectMcpConfigs(targetDir) {
60
+ return [
61
+ upsertProjectMcpConfig(targetDir),
62
+ upsertCursorProjectMcpConfig(targetDir),
63
+ ];
64
+ }
65
+
66
+ export function upsertProjectMcpConfig(targetDir) {
67
+ return upsertMcpConfigFile(join(targetDir, '.mcp.json'));
68
+ }
69
+
70
+ export function getWindsurfMcpConfigPath(homeDir = homedir()) {
71
+ return join(homeDir, '.codeium', 'mcp_config.json');
72
+ }
73
+
74
+ export function upsertWindsurfMcpConfig(homeDir = homedir()) {
75
+ return upsertMcpConfigFile(getWindsurfMcpConfigPath(homeDir));
76
+ }
@@ -1,4 +1,4 @@
1
- import { getActiveFileClaims, getTask, getTaskSpec, getWorktree } from './db.js';
1
+ import { getActiveFileClaims, getLeaseExecutionContext, getTask, getTaskSpec, getWorktree, touchBoundaryValidationState } from './db.js';
2
2
  import { getWorktreeChangedFiles } from './git.js';
3
3
  import { matchesPathPatterns } from './ignore.js';
4
4
 
@@ -20,19 +20,48 @@ function fileMatchesKeyword(filePath, keyword) {
20
20
  return normalizedKeyword.length >= 3 && normalizedPath.includes(normalizedKeyword);
21
21
  }
22
22
 
23
- export function evaluateTaskOutcome(db, repoRoot, { taskId }) {
23
+ function resolveExecution(db, { taskId = null, leaseId = null } = {}) {
24
+ if (leaseId) {
25
+ const execution = getLeaseExecutionContext(db, leaseId);
26
+ if (!execution?.task) {
27
+ return { task: null, taskSpec: null, worktree: null, leaseId };
28
+ }
29
+ return {
30
+ task: execution.task,
31
+ taskSpec: execution.task_spec,
32
+ worktree: execution.worktree,
33
+ leaseId: execution.lease?.id || leaseId,
34
+ };
35
+ }
36
+
37
+ if (!taskId) {
38
+ return { task: null, taskSpec: null, worktree: null, leaseId: null };
39
+ }
40
+
24
41
  const task = getTask(db, taskId);
25
- const taskSpec = getTaskSpec(db, taskId);
42
+ return {
43
+ task,
44
+ taskSpec: task ? getTaskSpec(db, taskId) : null,
45
+ worktree: task?.worktree ? getWorktree(db, task.worktree) : null,
46
+ leaseId: null,
47
+ };
48
+ }
49
+
50
+ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = null } = {}) {
51
+ const execution = resolveExecution(db, { taskId, leaseId });
52
+ const task = execution.task;
53
+ const taskSpec = execution.taskSpec;
54
+
26
55
  if (!task || !task.worktree) {
27
56
  return {
28
57
  status: 'failed',
29
- reason_code: 'task_not_assigned',
58
+ reason_code: taskId || leaseId ? 'task_not_assigned' : 'task_identity_required',
30
59
  changed_files: [],
31
- findings: ['task has no assigned worktree'],
60
+ findings: [taskId || leaseId ? 'task has no assigned worktree' : 'task outcome requires a taskId or leaseId'],
32
61
  };
33
62
  }
34
63
 
35
- const worktree = getWorktree(db, task.worktree);
64
+ const worktree = execution.worktree;
36
65
  if (!worktree) {
37
66
  return {
38
67
  status: 'failed',
@@ -44,7 +73,7 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId }) {
44
73
 
45
74
  const changedFiles = getWorktreeChangedFiles(worktree.path, repoRoot);
46
75
  const activeClaims = getActiveFileClaims(db)
47
- .filter((claim) => claim.task_id === taskId && claim.worktree === task.worktree)
76
+ .filter((claim) => claim.task_id === task.id && claim.worktree === task.worktree)
48
77
  .map((claim) => claim.file_path);
49
78
  const changedOutsideClaims = changedFiles.filter((filePath) => !activeClaims.includes(filePath));
50
79
  const changedInsideClaims = changedFiles.filter((filePath) => activeClaims.includes(filePath));
@@ -128,9 +157,7 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId }) {
128
157
  const matchedObjectiveKeywords = objectiveKeywords.filter((keyword) =>
129
158
  changedFiles.some((filePath) => fileMatchesKeyword(filePath, keyword)),
130
159
  );
131
- const minimumKeywordMatches = taskSpec?.risk_level === 'high'
132
- ? Math.min(2, objectiveKeywords.length)
133
- : Math.min(1, objectiveKeywords.length);
160
+ const minimumKeywordMatches = Math.min(1, objectiveKeywords.length);
134
161
 
135
162
  if (objectiveKeywords.length > 0 && matchedObjectiveKeywords.length < minimumKeywordMatches) {
136
163
  findings.push(`changed files do not clearly satisfy task objective keywords: ${objectiveKeywords.join(', ')}`);
@@ -138,16 +165,26 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId }) {
138
165
  status: 'needs_followup',
139
166
  reason_code: 'objective_not_evidenced',
140
167
  changed_files: changedFiles,
168
+ task_id: task.id,
169
+ lease_id: execution.leaseId,
141
170
  findings,
142
171
  };
143
172
  }
144
173
 
145
- return {
174
+ const result = {
146
175
  status: 'accepted',
147
176
  reason_code: null,
148
177
  changed_files: changedFiles,
178
+ task_id: task.id,
179
+ lease_id: execution.leaseId,
149
180
  task_spec: taskSpec,
150
181
  claimed_files: activeClaims,
151
182
  findings: changedInsideClaims.length > 0 ? ['changes stayed within claimed scope'] : [],
152
183
  };
184
+
185
+ if (execution.leaseId) {
186
+ touchBoundaryValidationState(db, execution.leaseId, 'task_outcome_accepted');
187
+ }
188
+
189
+ return result;
153
190
  }
@@ -0,0 +1,49 @@
1
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
2
+ import { dirname, join } from 'path';
3
+
4
+ export const DEFAULT_LEASE_POLICY = {
5
+ heartbeat_interval_seconds: 60,
6
+ stale_after_minutes: 15,
7
+ reap_on_status_check: false,
8
+ requeue_task_on_reap: true,
9
+ };
10
+
11
+ export function getLeasePolicyPath(repoRoot) {
12
+ return join(repoRoot, '.switchman', 'lease-policy.json');
13
+ }
14
+
15
+ export function loadLeasePolicy(repoRoot) {
16
+ const policyPath = getLeasePolicyPath(repoRoot);
17
+ if (!existsSync(policyPath)) {
18
+ return { ...DEFAULT_LEASE_POLICY };
19
+ }
20
+
21
+ try {
22
+ const parsed = JSON.parse(readFileSync(policyPath, 'utf8'));
23
+ return {
24
+ ...DEFAULT_LEASE_POLICY,
25
+ ...parsed,
26
+ heartbeat_interval_seconds: Number.isFinite(parsed?.heartbeat_interval_seconds) ? parsed.heartbeat_interval_seconds : DEFAULT_LEASE_POLICY.heartbeat_interval_seconds,
27
+ stale_after_minutes: Number.isFinite(parsed?.stale_after_minutes) ? parsed.stale_after_minutes : DEFAULT_LEASE_POLICY.stale_after_minutes,
28
+ reap_on_status_check: typeof parsed?.reap_on_status_check === 'boolean' ? parsed.reap_on_status_check : DEFAULT_LEASE_POLICY.reap_on_status_check,
29
+ requeue_task_on_reap: typeof parsed?.requeue_task_on_reap === 'boolean' ? parsed.requeue_task_on_reap : DEFAULT_LEASE_POLICY.requeue_task_on_reap,
30
+ };
31
+ } catch {
32
+ return { ...DEFAULT_LEASE_POLICY };
33
+ }
34
+ }
35
+
36
+ export function writeLeasePolicy(repoRoot, policy = {}) {
37
+ const policyPath = getLeasePolicyPath(repoRoot);
38
+ mkdirSync(dirname(policyPath), { recursive: true });
39
+ const normalized = {
40
+ ...DEFAULT_LEASE_POLICY,
41
+ ...policy,
42
+ heartbeat_interval_seconds: Math.max(1, Number.parseInt(policy.heartbeat_interval_seconds, 10) || DEFAULT_LEASE_POLICY.heartbeat_interval_seconds),
43
+ stale_after_minutes: Math.max(1, Number.parseInt(policy.stale_after_minutes, 10) || DEFAULT_LEASE_POLICY.stale_after_minutes),
44
+ reap_on_status_check: typeof policy.reap_on_status_check === 'boolean' ? policy.reap_on_status_check : DEFAULT_LEASE_POLICY.reap_on_status_check,
45
+ requeue_task_on_reap: typeof policy.requeue_task_on_reap === 'boolean' ? policy.requeue_task_on_reap : DEFAULT_LEASE_POLICY.requeue_task_on_reap,
46
+ };
47
+ writeFileSync(policyPath, `${JSON.stringify(normalized, null, 2)}\n`);
48
+ return policyPath;
49
+ }