switchman-dev 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,365 @@
1
+ /**
2
+ * switchman licence module
3
+ * Handles Pro licence validation, credential storage, and caching.
4
+ *
5
+ * Credentials file: ~/.switchman/credentials.json
6
+ * Cache file: ~/.switchman/licence-cache.json
7
+ *
8
+ * The CLI calls checkLicence() before any Pro-gated feature.
9
+ * It returns { valid, plan, email, cached } and never throws —
10
+ * if anything goes wrong it returns { valid: false }.
11
+ */
12
+
13
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
14
+ import { homedir } from 'os';
15
+ import { join } from 'path';
16
+
17
+ // ─── Config ───────────────────────────────────────────────────────────────────
18
+ //
19
+ // Defaults point at the hosted Switchman Pro backend.
20
+ // Override with environment variables if self-hosting:
21
+ // SWITCHMAN_SUPABASE_URL=https://your-project.supabase.co
22
+ // SWITCHMAN_SUPABASE_ANON=your-anon-key
23
+
24
+ const SUPABASE_URL = process.env.SWITCHMAN_SUPABASE_URL
25
+ ?? 'https://afilbolhlkiingnsupgr.supabase.co';
26
+
27
+ const SUPABASE_ANON = process.env.SWITCHMAN_SUPABASE_ANON
28
+ ?? 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImFmaWxib2xobGtpaW5nbnN1cGdyIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NzM1OTIzOTIsImV4cCI6MjA4OTE2ODM5Mn0.8TBfHfRB0vEyKPMWBd6i1DNwx1nS9UqprIAsJf35n88';
29
+ const VALIDATE_URL = `${SUPABASE_URL}/functions/v1/validate-licence`;
30
+ const AUTH_URL = `${SUPABASE_URL}/auth/v1`;
31
+ const PRO_PAGE_URL = 'https://switchman.dev/pro';
32
+
33
+ const FREE_AGENT_LIMIT = 3;
34
+ const FREE_RETENTION_DAYS = 7;
35
+ const PRO_RETENTION_DAYS = 90;
36
+ const CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
37
+ const OFFLINE_GRACE_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
38
+
39
+ // ─── Paths ────────────────────────────────────────────────────────────────────
40
+
41
+ function getSwitchmanConfigDir() {
42
+ return join(homedir(), '.switchman');
43
+ }
44
+
45
+ function getCredentialsPath() {
46
+ return join(getSwitchmanConfigDir(), 'credentials.json');
47
+ }
48
+
49
+ function getLicenceCachePath() {
50
+ return join(getSwitchmanConfigDir(), 'licence-cache.json');
51
+ }
52
+
53
+ function ensureConfigDir() {
54
+ const dir = getSwitchmanConfigDir();
55
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true, mode: 0o700 });
56
+ }
57
+
58
+ // ─── Credentials ─────────────────────────────────────────────────────────────
59
+
60
+ export function readCredentials() {
61
+ try {
62
+ const path = getCredentialsPath();
63
+ if (!existsSync(path)) return null;
64
+ return JSON.parse(readFileSync(path, 'utf8'));
65
+ } catch {
66
+ return null;
67
+ }
68
+ }
69
+
70
+ export function writeCredentials(creds) {
71
+ ensureConfigDir();
72
+ writeFileSync(getCredentialsPath(), JSON.stringify(creds, null, 2), { mode: 0o600 });
73
+ }
74
+
75
+ export function clearCredentials() {
76
+ try {
77
+ const path = getCredentialsPath();
78
+ if (existsSync(path)) {
79
+ writeFileSync(path, JSON.stringify({}), { mode: 0o600 });
80
+ }
81
+ } catch { /* no-op */ }
82
+ }
83
+
84
+ // ─── Cache ────────────────────────────────────────────────────────────────────
85
+
86
+ function readLicenceCache() {
87
+ try {
88
+ const path = getLicenceCachePath();
89
+ if (!existsSync(path)) return null;
90
+ return JSON.parse(readFileSync(path, 'utf8'));
91
+ } catch {
92
+ return null;
93
+ }
94
+ }
95
+
96
+ function writeLicenceCache(result) {
97
+ try {
98
+ ensureConfigDir();
99
+ writeFileSync(getLicenceCachePath(), JSON.stringify({
100
+ ...result,
101
+ cached_at: Date.now(),
102
+ }, null, 2), { mode: 0o600 });
103
+ } catch { /* no-op */ }
104
+ }
105
+
106
+ function clearLicenceCache() {
107
+ try {
108
+ const path = getLicenceCachePath();
109
+ if (existsSync(path)) writeFileSync(path, '{}');
110
+ } catch { /* no-op */ }
111
+ }
112
+
113
+ // ─── Validation ──────────────────────────────────────────────────────────────
114
+
115
+ /**
116
+ * Check whether the current user has a valid Pro licence.
117
+ * Returns { valid, plan, email, cached, offline }
118
+ * Never throws.
119
+ */
120
+ export async function checkLicence() {
121
+ const creds = readCredentials();
122
+ if (!creds?.access_token) {
123
+ return { valid: false, reason: 'not_logged_in' };
124
+ }
125
+
126
+ // Check the 24-hour cache first
127
+ const cache = readLicenceCache();
128
+ if (cache?.valid && cache.cached_at) {
129
+ const age = Date.now() - cache.cached_at;
130
+ if (age < CACHE_TTL_MS) {
131
+ return { ...cache, cached: true, offline: false };
132
+ }
133
+ }
134
+
135
+ // Try live validation
136
+ try {
137
+ const res = await fetch(VALIDATE_URL, {
138
+ headers: {
139
+ 'Authorization': `Bearer ${creds.access_token}`,
140
+ 'apikey': SUPABASE_ANON,
141
+ },
142
+ });
143
+
144
+ if (!res.ok) {
145
+ // If token is expired, try to refresh
146
+ if (res.status === 401 && creds.refresh_token) {
147
+ const refreshed = await refreshToken(creds.refresh_token);
148
+ if (refreshed) {
149
+ return checkLicence(); // retry with new token
150
+ }
151
+ }
152
+ // Fall through to offline grace check
153
+ throw new Error(`HTTP ${res.status}`);
154
+ }
155
+
156
+ const data = await res.json();
157
+ const result = {
158
+ valid: data.valid === true,
159
+ plan: data.plan ?? null,
160
+ email: data.email ?? null,
161
+ current_period_end: data.current_period_end ?? null,
162
+ reason: data.valid ? null : (data.reason ?? 'no_licence'),
163
+ };
164
+
165
+ writeLicenceCache(result);
166
+ return { ...result, cached: false, offline: false };
167
+
168
+ } catch {
169
+ // Network error — fall back to offline grace period
170
+ if (cache?.valid && cache.cached_at) {
171
+ const age = Date.now() - cache.cached_at;
172
+ if (age < OFFLINE_GRACE_MS) {
173
+ return { ...cache, cached: true, offline: true };
174
+ }
175
+ }
176
+ return { valid: false, reason: 'offline', cached: false, offline: true };
177
+ }
178
+ }
179
+
180
+ export async function getRetentionDaysForCurrentPlan() {
181
+ const licence = await checkLicence();
182
+ return licence.valid ? PRO_RETENTION_DAYS : FREE_RETENTION_DAYS;
183
+ }
184
+
185
+ // ─── Token refresh ────────────────────────────────────────────────────────────
186
+
187
+ async function refreshToken(refreshToken) {
188
+ try {
189
+ const res = await fetch(`${AUTH_URL}/token?grant_type=refresh_token`, {
190
+ method: 'POST',
191
+ headers: {
192
+ 'Content-Type': 'application/json',
193
+ 'apikey': SUPABASE_ANON,
194
+ },
195
+ body: JSON.stringify({ refresh_token: refreshToken }),
196
+ });
197
+
198
+ if (!res.ok) return null;
199
+ const data = await res.json();
200
+
201
+ if (data.access_token) {
202
+ const creds = readCredentials() || {};
203
+ writeCredentials({
204
+ ...creds,
205
+ access_token: data.access_token,
206
+ refresh_token: data.refresh_token ?? refreshToken,
207
+ expires_at: Date.now() + (data.expires_in ?? 3600) * 1000,
208
+ });
209
+ clearLicenceCache();
210
+ return true;
211
+ }
212
+ return null;
213
+ } catch {
214
+ return null;
215
+ }
216
+ }
217
+
218
+ // ─── GitHub Device Flow login ─────────────────────────────────────────────────
219
+
220
+ /**
221
+ * Run the GitHub OAuth device flow via Supabase.
222
+ * Opens the browser, polls for the token, saves credentials.
223
+ * Returns { success, email } or { success: false, error }
224
+ */
225
+ export async function loginWithGitHub() {
226
+ // Use Supabase's PKCE/implicit flow by opening the browser
227
+ // with a special device-style URL that redirects to a local callback
228
+ const { default: open } = await import('open');
229
+
230
+ // We use a simple approach: direct the user to the Pro page sign-in
231
+ // which sets the session, then we poll Supabase for the session
232
+ // using a one-time code approach via the CLI callback server
233
+
234
+ // Start a tiny local HTTP server to catch the OAuth callback
235
+ const { createServer } = await import('http');
236
+
237
+ return new Promise((resolve) => {
238
+ let server;
239
+ const timeout = setTimeout(() => {
240
+ server?.close();
241
+ resolve({ success: false, error: 'timeout' });
242
+ }, 5 * 60 * 1000); // 5 minute timeout
243
+
244
+ server = createServer(async (req, res) => {
245
+ const url = new URL(req.url, 'http://localhost:7429');
246
+
247
+ if (url.pathname === '/callback') {
248
+ const code = url.searchParams.get('code');
249
+ const error = url.searchParams.get('error');
250
+
251
+ res.writeHead(200, { 'Content-Type': 'text/html' });
252
+ res.end(`
253
+ <!DOCTYPE html>
254
+ <html>
255
+ <head><style>
256
+ body { background: #0b1020; color: #e6eef8; font-family: monospace;
257
+ display: flex; align-items: center; justify-content: center;
258
+ min-height: 100vh; margin: 0; }
259
+ .box { text-align: center; }
260
+ .ok { color: #4ade80; font-size: 48px; }
261
+ h2 { font-size: 24px; margin: 16px 0 8px; }
262
+ p { color: #5f7189; }
263
+ </style></head>
264
+ <body>
265
+ <div class="box">
266
+ <div class="ok">${error ? '✕' : '✓'}</div>
267
+ <h2>${error ? 'Sign in failed' : 'Signed in successfully'}</h2>
268
+ <p>${error ? 'You can close this tab.' : 'You can close this tab and return to your terminal.'}</p>
269
+ </div>
270
+ </body>
271
+ </html>
272
+ `);
273
+
274
+ clearTimeout(timeout);
275
+ server.close();
276
+
277
+ if (error || !code) {
278
+ resolve({ success: false, error: error || 'no_code' });
279
+ return;
280
+ }
281
+
282
+ // Exchange the code for a session via Supabase
283
+ try {
284
+ const tokenRes = await fetch(`${AUTH_URL}/token?grant_type=pkce`, {
285
+ method: 'POST',
286
+ headers: {
287
+ 'Content-Type': 'application/json',
288
+ 'apikey': SUPABASE_ANON,
289
+ },
290
+ body: JSON.stringify({ auth_code: code }),
291
+ });
292
+
293
+ if (!tokenRes.ok) {
294
+ // Try the standard exchange endpoint
295
+ const exchangeRes = await fetch(`${AUTH_URL}/token?grant_type=authorization_code`, {
296
+ method: 'POST',
297
+ headers: {
298
+ 'Content-Type': 'application/json',
299
+ 'apikey': SUPABASE_ANON,
300
+ },
301
+ body: JSON.stringify({ code }),
302
+ });
303
+
304
+ if (!exchangeRes.ok) {
305
+ resolve({ success: false, error: 'token_exchange_failed' });
306
+ return;
307
+ }
308
+
309
+ const session = await exchangeRes.json();
310
+ saveSession(session);
311
+ resolve({ success: true, email: session.user?.email });
312
+ return;
313
+ }
314
+
315
+ const session = await tokenRes.json();
316
+ saveSession(session);
317
+ resolve({ success: true, email: session.user?.email });
318
+ } catch (err) {
319
+ resolve({ success: false, error: err.message });
320
+ }
321
+ }
322
+ });
323
+
324
+ server.listen(7429, 'localhost', () => {
325
+ // Build the Supabase GitHub OAuth URL with our local callback
326
+ const params = new URLSearchParams({
327
+ provider: 'github',
328
+ redirect_to: 'http://localhost:7429/callback',
329
+ scopes: 'read:user user:email',
330
+ });
331
+
332
+ const loginUrl = `${AUTH_URL}/authorize?${params}`;
333
+ console.log('');
334
+ console.log(' Opening GitHub sign-in in your browser...');
335
+ console.log(` If it doesn\'t open, visit: ${loginUrl}`);
336
+ console.log('');
337
+
338
+ open(loginUrl).catch(() => {
339
+ console.log(` Could not open browser automatically.`);
340
+ console.log(` Please visit: ${loginUrl}`);
341
+ });
342
+ });
343
+
344
+ server.on('error', (err) => {
345
+ clearTimeout(timeout);
346
+ resolve({ success: false, error: err.message });
347
+ });
348
+ });
349
+ }
350
+
351
+ function saveSession(session) {
352
+ if (!session?.access_token) return;
353
+ writeCredentials({
354
+ access_token: session.access_token,
355
+ refresh_token: session.refresh_token ?? null,
356
+ expires_at: Date.now() + (session.expires_in ?? 3600) * 1000,
357
+ email: session.user?.email ?? null,
358
+ user_id: session.user?.id ?? null,
359
+ });
360
+ clearLicenceCache();
361
+ }
362
+
363
+ // ─── Helpers for CLI commands ─────────────────────────────────────────────────
364
+
365
+ export { FREE_AGENT_LIMIT, FREE_RETENTION_DAYS, PRO_PAGE_URL, PRO_RETENTION_DAYS };
package/src/core/mcp.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
2
2
  import { homedir } from 'os';
3
- import { join } from 'path';
3
+ import { dirname, join } from 'path';
4
4
 
5
5
  export function getSwitchmanMcpServers() {
6
6
  return {
@@ -26,7 +26,7 @@ function upsertMcpConfigFile(configPath) {
26
26
  const raw = readFileSync(configPath, 'utf8').trim();
27
27
  config = raw ? JSON.parse(raw) : {};
28
28
  } else {
29
- mkdirSync(join(configPath, '..'), { recursive: true });
29
+ mkdirSync(dirname(configPath), { recursive: true });
30
30
  }
31
31
 
32
32
  const nextConfig = {
@@ -52,6 +52,45 @@ function upsertMcpConfigFile(configPath) {
52
52
  };
53
53
  }
54
54
 
55
+ export function ensureProjectLocalMcpGitExcludes(repoRoot) {
56
+ const excludePath = join(repoRoot, '.git', 'info', 'exclude');
57
+ const requiredEntries = ['.mcp.json', '.cursor/mcp.json'];
58
+ let existing = '';
59
+
60
+ try {
61
+ if (existsSync(excludePath)) {
62
+ existing = readFileSync(excludePath, 'utf8');
63
+ } else {
64
+ mkdirSync(dirname(excludePath), { recursive: true });
65
+ }
66
+ } catch {
67
+ return {
68
+ path: excludePath,
69
+ changed: false,
70
+ managed: false,
71
+ };
72
+ }
73
+
74
+ const lines = existing.split('\n').map((line) => line.trim());
75
+ const missing = requiredEntries.filter((entry) => !lines.includes(entry));
76
+ if (missing.length === 0) {
77
+ return {
78
+ path: excludePath,
79
+ changed: false,
80
+ managed: true,
81
+ };
82
+ }
83
+
84
+ const prefix = existing.length > 0 && !existing.endsWith('\n') ? '\n' : '';
85
+ const next = `${existing}${prefix}${missing.join('\n')}\n`;
86
+ writeFileSync(excludePath, next);
87
+ return {
88
+ path: excludePath,
89
+ changed: true,
90
+ managed: true,
91
+ };
92
+ }
93
+
55
94
  export function upsertCursorProjectMcpConfig(targetDir) {
56
95
  return upsertMcpConfigFile(join(targetDir, '.cursor', 'mcp.json'));
57
96
  }
@@ -189,15 +189,17 @@ function summarizeOverall(pairAnalyses, worktreeAnalyses, boundaryValidations, d
189
189
  summary: `AI merge gate blocked: ${blockedPairs.length} risky pair(s), ${blockedValidations.length} boundary validation issue(s), and ${blockedInvalidations.length} stale dependency issue(s) need resolution.`,
190
190
  };
191
191
  }
192
- if (warnedPairs.length > 0 || riskyWorktrees.length > 0 || warnedValidations.length > 0 || warnedInvalidations.length > 0) {
192
+ if (warnedPairs.length > 0 || warnedValidations.length > 0 || warnedInvalidations.length > 0) {
193
193
  return {
194
194
  status: 'warn',
195
- summary: `AI merge gate warns: ${warnedPairs.length} pair(s), ${riskyWorktrees.length} worktree(s), ${warnedValidations.length} boundary validation issue(s), or ${warnedInvalidations.length} stale dependency issue(s) need review.`,
195
+ summary: `AI merge gate warns: ${warnedPairs.length} pair(s), ${warnedValidations.length} boundary validation issue(s), or ${warnedInvalidations.length} stale dependency issue(s) need review.`,
196
196
  };
197
197
  }
198
198
  return {
199
199
  status: 'pass',
200
- summary: 'AI merge gate passed: no elevated semantic merge risks detected.',
200
+ summary: riskyWorktrees.length > 0
201
+ ? `AI merge gate passed: no cross-worktree merge risks detected. ${riskyWorktrees.length} worktree(s) still have local risk signals worth reviewing if you are about to merge them.`
202
+ : 'AI merge gate passed: no elevated semantic merge risks detected.',
201
203
  };
202
204
  }
203
205
 
@@ -225,10 +227,24 @@ function evaluateDependencyInvalidations(db) {
225
227
  return listDependencyInvalidations(db, { status: 'stale' })
226
228
  .map((state) => {
227
229
  const affectedTask = getTask(db, state.affected_task_id);
228
- const severity = affectedTask?.status === 'done' ? 'blocked' : 'warn';
230
+ const details = state.details || {};
231
+ const severity = details.severity || (affectedTask?.status === 'done' ? 'blocked' : 'warn');
229
232
  const staleArea = state.reason_type === 'subsystem_overlap'
230
233
  ? `subsystem:${state.subsystem_tag}`
234
+ : state.reason_type === 'semantic_contract_drift'
235
+ ? `contract:${(details.contract_names || []).join('|') || 'unknown'}`
236
+ : state.reason_type === 'semantic_object_overlap'
237
+ ? `object:${(details.object_names || []).join('|') || 'unknown'}`
238
+ : state.reason_type === 'shared_module_drift'
239
+ ? `module:${(details.module_paths || []).join('|') || 'unknown'}`
231
240
  : `${state.source_scope_pattern} ↔ ${state.affected_scope_pattern}`;
241
+ const summary = state.reason_type === 'semantic_contract_drift'
242
+ ? `${details.source_task_title || state.source_task_id} changed shared contract ${(details.contract_names || []).join(', ') || 'unknown'}`
243
+ : state.reason_type === 'semantic_object_overlap'
244
+ ? `${details.source_task_title || state.source_task_id} changed shared exported object ${(details.object_names || []).join(', ') || 'unknown'}`
245
+ : state.reason_type === 'shared_module_drift'
246
+ ? `${details.source_task_title || state.source_task_id} changed shared module ${(details.module_paths || []).join(', ') || 'unknown'} used by ${(details.dependent_files || []).join(', ') || state.affected_task_id}`
247
+ : `${affectedTask?.title || state.affected_task_id} is stale because ${details?.source_task_title || state.source_task_id} changed shared ${staleArea}`;
232
248
  return {
233
249
  source_lease_id: state.source_lease_id,
234
250
  source_task_id: state.source_task_id,
@@ -243,9 +259,10 @@ function evaluateDependencyInvalidations(db) {
243
259
  subsystem_tag: state.subsystem_tag,
244
260
  source_scope_pattern: state.source_scope_pattern,
245
261
  affected_scope_pattern: state.affected_scope_pattern,
246
- summary: `${affectedTask?.title || state.affected_task_id} is stale because ${state.details?.source_task_title || state.source_task_id} changed shared ${staleArea}`,
262
+ summary,
247
263
  stale_area: staleArea,
248
264
  created_at: state.created_at,
265
+ details,
249
266
  };
250
267
  });
251
268
  }
@@ -20,60 +20,39 @@ function fileMatchesKeyword(filePath, keyword) {
20
20
  return normalizedKeyword.length >= 3 && normalizedPath.includes(normalizedKeyword);
21
21
  }
22
22
 
23
- function resolveExecution(db, { taskId = null, leaseId = null } = {}) {
24
- if (leaseId) {
25
- const execution = getLeaseExecutionContext(db, leaseId);
26
- if (!execution?.task) {
27
- return { task: null, taskSpec: null, worktree: null, leaseId };
28
- }
29
- return {
30
- task: execution.task,
31
- taskSpec: execution.task_spec,
32
- worktree: execution.worktree,
33
- leaseId: execution.lease?.id || leaseId,
34
- };
35
- }
36
-
37
- if (!taskId) {
38
- return { task: null, taskSpec: null, worktree: null, leaseId: null };
39
- }
40
-
41
- const task = getTask(db, taskId);
42
- return {
43
- task,
44
- taskSpec: task ? getTaskSpec(db, taskId) : null,
45
- worktree: task?.worktree ? getWorktree(db, task.worktree) : null,
46
- leaseId: null,
47
- };
48
- }
49
-
50
- export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = null } = {}) {
51
- const execution = resolveExecution(db, { taskId, leaseId });
52
- const task = execution.task;
53
- const taskSpec = execution.taskSpec;
23
+ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = null }) {
24
+ const execution = leaseId ? getLeaseExecutionContext(db, leaseId) : null;
25
+ const task = execution?.task || (taskId ? getTask(db, taskId) : null);
26
+ const taskSpec = execution?.task_spec || (task ? getTaskSpec(db, task.id) : null);
27
+ const resolvedTaskId = task?.id || taskId || execution?.lease?.task_id || null;
28
+ const resolvedLeaseId = execution?.lease?.id || leaseId || null;
54
29
 
55
30
  if (!task || !task.worktree) {
56
31
  return {
57
32
  status: 'failed',
58
- reason_code: taskId || leaseId ? 'task_not_assigned' : 'task_identity_required',
33
+ reason_code: 'task_not_assigned',
34
+ lease_id: resolvedLeaseId,
35
+ task_id: resolvedTaskId,
59
36
  changed_files: [],
60
37
  findings: [taskId || leaseId ? 'task has no assigned worktree' : 'task outcome requires a taskId or leaseId'],
61
38
  };
62
39
  }
63
40
 
64
- const worktree = execution.worktree;
41
+ const worktree = execution?.worktree || getWorktree(db, task.worktree);
65
42
  if (!worktree) {
66
43
  return {
67
44
  status: 'failed',
68
45
  reason_code: 'worktree_missing',
46
+ lease_id: resolvedLeaseId,
47
+ task_id: resolvedTaskId,
69
48
  changed_files: [],
70
49
  findings: ['assigned worktree is not registered'],
71
50
  };
72
51
  }
73
52
 
74
53
  const changedFiles = getWorktreeChangedFiles(worktree.path, repoRoot);
75
- const activeClaims = getActiveFileClaims(db)
76
- .filter((claim) => claim.task_id === task.id && claim.worktree === task.worktree)
54
+ const activeClaims = (execution?.claims || getActiveFileClaims(db)
55
+ .filter((claim) => claim.task_id === task.id && claim.worktree === task.worktree))
77
56
  .map((claim) => claim.file_path);
78
57
  const changedOutsideClaims = changedFiles.filter((filePath) => !activeClaims.includes(filePath));
79
58
  const changedInsideClaims = changedFiles.filter((filePath) => activeClaims.includes(filePath));
@@ -91,6 +70,8 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
91
70
  return {
92
71
  status: 'needs_followup',
93
72
  reason_code: 'no_changes_detected',
73
+ lease_id: resolvedLeaseId,
74
+ task_id: resolvedTaskId,
94
75
  changed_files: changedFiles,
95
76
  findings,
96
77
  };
@@ -101,6 +82,8 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
101
82
  return {
102
83
  status: 'needs_followup',
103
84
  reason_code: 'changes_outside_claims',
85
+ lease_id: resolvedLeaseId,
86
+ task_id: resolvedTaskId,
104
87
  changed_files: changedFiles,
105
88
  findings,
106
89
  };
@@ -111,6 +94,8 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
111
94
  return {
112
95
  status: 'needs_followup',
113
96
  reason_code: 'changes_outside_task_scope',
97
+ lease_id: resolvedLeaseId,
98
+ task_id: resolvedTaskId,
114
99
  changed_files: changedFiles,
115
100
  findings,
116
101
  };
@@ -129,6 +114,8 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
129
114
  return {
130
115
  status: 'needs_followup',
131
116
  reason_code: 'missing_expected_tests',
117
+ lease_id: resolvedLeaseId,
118
+ task_id: resolvedTaskId,
132
119
  changed_files: changedFiles,
133
120
  findings,
134
121
  };
@@ -139,6 +126,8 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
139
126
  return {
140
127
  status: 'needs_followup',
141
128
  reason_code: 'missing_expected_docs',
129
+ lease_id: resolvedLeaseId,
130
+ task_id: resolvedTaskId,
142
131
  changed_files: changedFiles,
143
132
  findings,
144
133
  };
@@ -149,6 +138,8 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
149
138
  return {
150
139
  status: 'needs_followup',
151
140
  reason_code: 'missing_expected_source_changes',
141
+ lease_id: resolvedLeaseId,
142
+ task_id: resolvedTaskId,
152
143
  changed_files: changedFiles,
153
144
  findings,
154
145
  };
@@ -157,34 +148,42 @@ export function evaluateTaskOutcome(db, repoRoot, { taskId = null, leaseId = nul
157
148
  const matchedObjectiveKeywords = objectiveKeywords.filter((keyword) =>
158
149
  changedFiles.some((filePath) => fileMatchesKeyword(filePath, keyword)),
159
150
  );
160
- const minimumKeywordMatches = Math.min(1, objectiveKeywords.length);
151
+ const minimumKeywordMatches = taskSpec?.task_type === 'governance'
152
+ ? (taskSpec?.risk_level === 'high'
153
+ ? Math.min(2, objectiveKeywords.length)
154
+ : Math.min(1, objectiveKeywords.length))
155
+ : Math.min(1, objectiveKeywords.length);
161
156
 
162
157
  if (objectiveKeywords.length > 0 && matchedObjectiveKeywords.length < minimumKeywordMatches) {
163
158
  findings.push(`changed files do not clearly satisfy task objective keywords: ${objectiveKeywords.join(', ')}`);
164
159
  return {
165
160
  status: 'needs_followup',
166
161
  reason_code: 'objective_not_evidenced',
162
+ lease_id: resolvedLeaseId,
163
+ task_id: resolvedTaskId,
167
164
  changed_files: changedFiles,
168
165
  task_id: task.id,
169
- lease_id: execution.leaseId,
166
+ lease_id: execution.lease?.id,
170
167
  findings,
171
168
  };
172
169
  }
173
170
 
174
- const result = {
171
+ const acceptedResult = {
175
172
  status: 'accepted',
176
173
  reason_code: null,
174
+ lease_id: resolvedLeaseId,
175
+ task_id: resolvedTaskId,
177
176
  changed_files: changedFiles,
178
177
  task_id: task.id,
179
- lease_id: execution.leaseId,
178
+ lease_id: execution.lease?.id,
180
179
  task_spec: taskSpec,
181
180
  claimed_files: activeClaims,
182
181
  findings: changedInsideClaims.length > 0 ? ['changes stayed within claimed scope'] : [],
183
182
  };
184
-
185
- if (execution.leaseId) {
186
- touchBoundaryValidationState(db, execution.leaseId, 'task_outcome_accepted');
183
+ if (resolvedLeaseId) {
184
+ touchBoundaryValidationState(db, resolvedLeaseId, 'outcome:accepted', {
185
+ changed_files: changedFiles,
186
+ });
187
187
  }
188
-
189
- return result;
188
+ return acceptedResult;
190
189
  }