switchman-dev 0.1.2 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +301 -7
- package/examples/README.md +18 -2
- package/examples/walkthrough.sh +12 -16
- package/package.json +1 -1
- package/src/cli/index.js +1699 -37
- package/src/core/ci.js +114 -0
- package/src/core/db.js +1417 -26
- package/src/core/detector.js +109 -7
- package/src/core/enforcement.js +966 -0
- package/src/core/git.js +34 -4
- package/src/core/ignore.js +47 -0
- package/src/core/mcp.js +47 -0
- package/src/core/merge-gate.js +305 -0
- package/src/core/monitor.js +39 -0
- package/src/core/outcome.js +153 -0
- package/src/core/pipeline.js +1113 -0
- package/src/core/planner.js +508 -0
- package/src/core/semantic.js +311 -0
- package/src/mcp/server.js +321 -1
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
import { execSync } from 'child_process';
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
|
|
5
|
+
const SOURCE_EXTENSIONS = new Set(['.js', '.mjs', '.cjs', '.ts', '.tsx', '.jsx']);
|
|
6
|
+
|
|
7
|
+
const SUBSYSTEM_PATTERNS = [
|
|
8
|
+
{ key: 'auth', regex: /(^|\/)(auth|login|session|permissions?|rbac|acl)(\/|$)/i },
|
|
9
|
+
{ key: 'schema', regex: /(^|\/)(schema|migrations?|db|database|sql)(\/|$)|schema\./i },
|
|
10
|
+
{ key: 'config', regex: /(^|\/)(config|configs|settings)(\/|$)|(^|\/)(package\.json|pnpm-lock\.yaml|package-lock\.json|yarn\.lock|tsconfig.*|vite\.config.*|webpack\.config.*)$/i },
|
|
11
|
+
{ key: 'api', regex: /(^|\/)(api|routes?|controllers?)(\/|$)/i },
|
|
12
|
+
{ key: 'payments', regex: /(^|\/)(payments?|billing|invoice|checkout|subscription)(\/|$)/i },
|
|
13
|
+
{ key: 'ui', regex: /(^|\/)(components?|ui|pages?)(\/|$)/i },
|
|
14
|
+
];
|
|
15
|
+
|
|
16
|
+
function uniq(values) {
|
|
17
|
+
return [...new Set(values)];
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function isSourceLikePath(filePath) {
|
|
21
|
+
return [...SOURCE_EXTENSIONS].some((ext) => filePath.endsWith(ext));
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function classifySubsystems(filePath) {
|
|
25
|
+
const tags = SUBSYSTEM_PATTERNS.filter((pattern) => pattern.regex.test(filePath)).map((pattern) => pattern.key);
|
|
26
|
+
return tags.length > 0 ? tags : ['general'];
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function areaForPath(filePath) {
|
|
30
|
+
const parts = String(filePath || '').split('/').filter(Boolean);
|
|
31
|
+
if (parts.length === 0) return 'repo';
|
|
32
|
+
if (parts.length === 1) return parts[0];
|
|
33
|
+
if (['src', 'app', 'lib', 'server', 'client', 'tests', 'test', 'spec', 'specs'].includes(parts[0])) {
|
|
34
|
+
return `${parts[0]}/${parts[1]}`;
|
|
35
|
+
}
|
|
36
|
+
return parts[0];
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function extractExports(content) {
|
|
40
|
+
const objects = [];
|
|
41
|
+
const patterns = [
|
|
42
|
+
{ kind: 'function', regex: /export\s+(?:async\s+)?function\s+([A-Za-z_$][\w$]*)/g },
|
|
43
|
+
{ kind: 'class', regex: /export\s+class\s+([A-Za-z_$][\w$]*)/g },
|
|
44
|
+
{ kind: 'const', regex: /export\s+(?:const|let|var)\s+([A-Za-z_$][\w$]*)/g },
|
|
45
|
+
{ kind: 'type', regex: /export\s+type\s+([A-Za-z_$][\w$]*)/g },
|
|
46
|
+
{ kind: 'interface', regex: /export\s+interface\s+([A-Za-z_$][\w$]*)/g },
|
|
47
|
+
{ kind: 'enum', regex: /export\s+enum\s+([A-Za-z_$][\w$]*)/g },
|
|
48
|
+
];
|
|
49
|
+
|
|
50
|
+
for (const pattern of patterns) {
|
|
51
|
+
for (const match of content.matchAll(pattern.regex)) {
|
|
52
|
+
objects.push({ kind: pattern.kind, name: match[1] });
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (/export\s+default\s+/.test(content)) {
|
|
57
|
+
objects.push({ kind: 'default', name: 'default' });
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
return uniq(objects.map((item) => `${item.kind}:${item.name}`)).map((key) => {
|
|
61
|
+
const [kind, name] = key.split(':');
|
|
62
|
+
return { kind, name };
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function extractExportBlocks(content) {
|
|
67
|
+
const lines = String(content || '').split('\n');
|
|
68
|
+
const blocks = [];
|
|
69
|
+
for (let i = 0; i < lines.length; i++) {
|
|
70
|
+
const line = lines[i];
|
|
71
|
+
if (!line.trim().startsWith('export ')) continue;
|
|
72
|
+
|
|
73
|
+
const blockLines = [line];
|
|
74
|
+
let braceDepth = (line.match(/{/g) || []).length - (line.match(/}/g) || []).length;
|
|
75
|
+
let needsTerminator = !/[;}]$/.test(line.trim());
|
|
76
|
+
|
|
77
|
+
while (i + 1 < lines.length && (braceDepth > 0 || needsTerminator)) {
|
|
78
|
+
i += 1;
|
|
79
|
+
const nextLine = lines[i];
|
|
80
|
+
blockLines.push(nextLine);
|
|
81
|
+
braceDepth += (nextLine.match(/{/g) || []).length - (nextLine.match(/}/g) || []).length;
|
|
82
|
+
if (braceDepth <= 0 && /[;}]$/.test(nextLine.trim())) {
|
|
83
|
+
needsTerminator = false;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
blocks.push(blockLines.join('\n').trim());
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return blocks.filter(Boolean);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function parseFileObjects(repoPath, filePath) {
|
|
94
|
+
const absolutePath = join(repoPath, filePath);
|
|
95
|
+
if (!existsSync(absolutePath) || !isSourceLikePath(filePath)) return [];
|
|
96
|
+
|
|
97
|
+
const content = readFileSync(absolutePath, 'utf8');
|
|
98
|
+
const exports = extractExports(content);
|
|
99
|
+
const exportBlocks = extractExportBlocks(content);
|
|
100
|
+
const subsystemTags = classifySubsystems(filePath);
|
|
101
|
+
const area = areaForPath(filePath);
|
|
102
|
+
|
|
103
|
+
return exports.map((entry, index) => ({
|
|
104
|
+
object_id: `${filePath}#${entry.kind}:${entry.name}`,
|
|
105
|
+
file_path: filePath,
|
|
106
|
+
kind: entry.kind,
|
|
107
|
+
name: entry.name,
|
|
108
|
+
area,
|
|
109
|
+
subsystem_tags: subsystemTags,
|
|
110
|
+
source_text: exportBlocks[index] || `export ${entry.kind} ${entry.name}`,
|
|
111
|
+
}));
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function trackedFiles(repoPath) {
|
|
115
|
+
try {
|
|
116
|
+
const output = execSync('git ls-files', {
|
|
117
|
+
cwd: repoPath,
|
|
118
|
+
encoding: 'utf8',
|
|
119
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
120
|
+
}).trim();
|
|
121
|
+
return output.split('\n').filter(Boolean);
|
|
122
|
+
} catch {
|
|
123
|
+
return [];
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
export function buildSemanticIndexForPath(repoPath, filePaths = null) {
|
|
128
|
+
const files = filePaths || trackedFiles(repoPath);
|
|
129
|
+
const objects = files
|
|
130
|
+
.filter(isSourceLikePath)
|
|
131
|
+
.flatMap((filePath) => parseFileObjects(repoPath, filePath))
|
|
132
|
+
.sort((left, right) =>
|
|
133
|
+
left.file_path.localeCompare(right.file_path)
|
|
134
|
+
|| left.kind.localeCompare(right.kind)
|
|
135
|
+
|| left.name.localeCompare(right.name)
|
|
136
|
+
);
|
|
137
|
+
|
|
138
|
+
return {
|
|
139
|
+
generated_at: new Date().toISOString(),
|
|
140
|
+
object_count: objects.length,
|
|
141
|
+
objects: objects.map(({ source_text, ...object }) => object),
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export function detectSemanticConflicts(semanticIndexes = []) {
|
|
146
|
+
const conflicts = [];
|
|
147
|
+
|
|
148
|
+
for (let i = 0; i < semanticIndexes.length; i++) {
|
|
149
|
+
for (let j = i + 1; j < semanticIndexes.length; j++) {
|
|
150
|
+
const left = semanticIndexes[i];
|
|
151
|
+
const right = semanticIndexes[j];
|
|
152
|
+
const rightByName = new Map();
|
|
153
|
+
for (const object of right.objects) {
|
|
154
|
+
if (!rightByName.has(object.name)) rightByName.set(object.name, []);
|
|
155
|
+
rightByName.get(object.name).push(object);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
for (const leftObject of left.objects) {
|
|
159
|
+
const matching = rightByName.get(leftObject.name) || [];
|
|
160
|
+
for (const rightObject of matching) {
|
|
161
|
+
if (leftObject.object_id === rightObject.object_id) {
|
|
162
|
+
conflicts.push({
|
|
163
|
+
type: 'semantic_object_overlap',
|
|
164
|
+
severity: 'blocked',
|
|
165
|
+
worktreeA: left.worktree,
|
|
166
|
+
worktreeB: right.worktree,
|
|
167
|
+
object_name: leftObject.name,
|
|
168
|
+
object_kind: leftObject.kind,
|
|
169
|
+
fileA: leftObject.file_path,
|
|
170
|
+
fileB: rightObject.file_path,
|
|
171
|
+
area: leftObject.area,
|
|
172
|
+
});
|
|
173
|
+
continue;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const sharedSubsystems = leftObject.subsystem_tags.filter((tag) => rightObject.subsystem_tags.includes(tag));
|
|
177
|
+
if (sharedSubsystems.length > 0 || leftObject.area === rightObject.area) {
|
|
178
|
+
conflicts.push({
|
|
179
|
+
type: 'semantic_name_overlap',
|
|
180
|
+
severity: 'warn',
|
|
181
|
+
worktreeA: left.worktree,
|
|
182
|
+
worktreeB: right.worktree,
|
|
183
|
+
object_name: leftObject.name,
|
|
184
|
+
object_kind_a: leftObject.kind,
|
|
185
|
+
object_kind_b: rightObject.kind,
|
|
186
|
+
fileA: leftObject.file_path,
|
|
187
|
+
fileB: rightObject.file_path,
|
|
188
|
+
shared_subsystems: sharedSubsystems,
|
|
189
|
+
area: leftObject.area === rightObject.area ? leftObject.area : null,
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return uniq(conflicts.map((item) => JSON.stringify(item))).map((item) => JSON.parse(item));
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
export function materializeSemanticIndex(repoRoot, { worktrees = [] } = {}) {
|
|
201
|
+
const semanticIndex = {
|
|
202
|
+
generated_at: new Date().toISOString(),
|
|
203
|
+
worktrees: worktrees
|
|
204
|
+
.map((worktree) => ({
|
|
205
|
+
worktree: worktree.name,
|
|
206
|
+
branch: worktree.branch || 'unknown',
|
|
207
|
+
index: buildSemanticIndexForPath(worktree.path),
|
|
208
|
+
}))
|
|
209
|
+
.sort((left, right) => left.worktree.localeCompare(right.worktree)),
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
const switchmanDir = join(repoRoot, '.switchman');
|
|
213
|
+
if (!existsSync(switchmanDir)) mkdirSync(switchmanDir, { recursive: true });
|
|
214
|
+
const outputPath = join(switchmanDir, 'semantic-index.json');
|
|
215
|
+
writeFileSync(outputPath, `${JSON.stringify(semanticIndex, null, 2)}\n`);
|
|
216
|
+
return {
|
|
217
|
+
output_path: outputPath,
|
|
218
|
+
semantic_index: semanticIndex,
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
function normalizeObjectRow(row) {
|
|
223
|
+
return {
|
|
224
|
+
...row,
|
|
225
|
+
subsystem_tags: JSON.parse(row.subsystem_tags || '[]'),
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
export function importCodeObjectsToStore(db, repoRoot, { filePaths = null } = {}) {
|
|
230
|
+
const files = filePaths || trackedFiles(repoRoot);
|
|
231
|
+
const objects = files
|
|
232
|
+
.filter(isSourceLikePath)
|
|
233
|
+
.flatMap((filePath) => parseFileObjects(repoRoot, filePath));
|
|
234
|
+
|
|
235
|
+
const upsert = db.prepare(`
|
|
236
|
+
INSERT INTO code_objects (object_id, file_path, kind, name, source_text, subsystem_tags, area, updated_at)
|
|
237
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))
|
|
238
|
+
ON CONFLICT(object_id) DO UPDATE SET
|
|
239
|
+
file_path=excluded.file_path,
|
|
240
|
+
kind=excluded.kind,
|
|
241
|
+
name=excluded.name,
|
|
242
|
+
source_text=excluded.source_text,
|
|
243
|
+
subsystem_tags=excluded.subsystem_tags,
|
|
244
|
+
area=excluded.area,
|
|
245
|
+
updated_at=datetime('now')
|
|
246
|
+
`);
|
|
247
|
+
|
|
248
|
+
for (const object of objects) {
|
|
249
|
+
upsert.run(
|
|
250
|
+
object.object_id,
|
|
251
|
+
object.file_path,
|
|
252
|
+
object.kind,
|
|
253
|
+
object.name,
|
|
254
|
+
object.source_text,
|
|
255
|
+
JSON.stringify(object.subsystem_tags || []),
|
|
256
|
+
object.area || null,
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
return listCodeObjects(db);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
export function listCodeObjects(db, { filePath = null } = {}) {
|
|
264
|
+
const where = filePath ? 'WHERE file_path=?' : '';
|
|
265
|
+
const params = filePath ? [filePath] : [];
|
|
266
|
+
return db.prepare(`
|
|
267
|
+
SELECT *
|
|
268
|
+
FROM code_objects
|
|
269
|
+
${where}
|
|
270
|
+
ORDER BY file_path ASC, kind ASC, name ASC
|
|
271
|
+
`).all(...params).map(normalizeObjectRow);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
export function updateCodeObjectSource(db, objectId, sourceText) {
|
|
275
|
+
db.prepare(`
|
|
276
|
+
UPDATE code_objects
|
|
277
|
+
SET source_text=?,
|
|
278
|
+
updated_at=datetime('now')
|
|
279
|
+
WHERE object_id=?
|
|
280
|
+
`).run(sourceText, objectId);
|
|
281
|
+
const row = db.prepare(`SELECT * FROM code_objects WHERE object_id=?`).get(objectId);
|
|
282
|
+
return row ? normalizeObjectRow(row) : null;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
export function materializeCodeObjects(db, repoRoot, { outputRoot = repoRoot } = {}) {
|
|
286
|
+
const objects = listCodeObjects(db);
|
|
287
|
+
const byFile = new Map();
|
|
288
|
+
for (const object of objects) {
|
|
289
|
+
if (!byFile.has(object.file_path)) byFile.set(object.file_path, []);
|
|
290
|
+
byFile.get(object.file_path).push(object);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
const files = [];
|
|
294
|
+
for (const [filePath, entries] of byFile.entries()) {
|
|
295
|
+
const absolutePath = join(outputRoot, filePath);
|
|
296
|
+
const dirPath = absolutePath.split('/').slice(0, -1).join('/');
|
|
297
|
+
if (!existsSync(dirPath)) mkdirSync(dirPath, { recursive: true });
|
|
298
|
+
const ordered = entries
|
|
299
|
+
.slice()
|
|
300
|
+
.sort((left, right) => left.kind.localeCompare(right.kind) || left.name.localeCompare(right.name));
|
|
301
|
+
const content = `${ordered.map((entry) => entry.source_text.trim()).join('\n\n')}\n`;
|
|
302
|
+
writeFileSync(absolutePath, content);
|
|
303
|
+
files.push(filePath);
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
return {
|
|
307
|
+
output_root: outputRoot,
|
|
308
|
+
file_count: files.length,
|
|
309
|
+
files: files.sort(),
|
|
310
|
+
};
|
|
311
|
+
}
|
package/src/mcp/server.js
CHANGED
|
@@ -40,6 +40,8 @@ import {
|
|
|
40
40
|
listWorktrees,
|
|
41
41
|
} from '../core/db.js';
|
|
42
42
|
import { scanAllWorktrees } from '../core/detector.js';
|
|
43
|
+
import { gatewayAppendFile, gatewayMakeDirectory, gatewayMovePath, gatewayRemovePath, gatewayWriteFile, monitorWorktreesOnce } from '../core/enforcement.js';
|
|
44
|
+
import { runAiMergeGate } from '../core/merge-gate.js';
|
|
43
45
|
|
|
44
46
|
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
45
47
|
|
|
@@ -322,6 +324,273 @@ Examples:
|
|
|
322
324
|
},
|
|
323
325
|
);
|
|
324
326
|
|
|
327
|
+
// ── switchman_write_file ──────────────────────────────────────────────────────
|
|
328
|
+
|
|
329
|
+
server.registerTool(
|
|
330
|
+
'switchman_write_file',
|
|
331
|
+
{
|
|
332
|
+
title: 'Write File Through Switchman',
|
|
333
|
+
description: `Replaces a file's contents through the Switchman write gateway.
|
|
334
|
+
|
|
335
|
+
Use this instead of direct filesystem writes when the agent is running in managed mode. The target path must already be claimed by the active lease.
|
|
336
|
+
|
|
337
|
+
Args:
|
|
338
|
+
- lease_id (string): Active lease ID returned by switchman_task_next
|
|
339
|
+
- path (string): Target file path, relative to repo root
|
|
340
|
+
- content (string): Full replacement content
|
|
341
|
+
- worktree (string, optional): Expected worktree name for extra validation
|
|
342
|
+
|
|
343
|
+
Returns JSON:
|
|
344
|
+
{
|
|
345
|
+
"ok": boolean,
|
|
346
|
+
"file_path": string,
|
|
347
|
+
"lease_id": string,
|
|
348
|
+
"bytes_written": number
|
|
349
|
+
}`,
|
|
350
|
+
inputSchema: z.object({
|
|
351
|
+
lease_id: z.string().min(1).describe('Active lease ID returned by switchman_task_next'),
|
|
352
|
+
path: z.string().min(1).describe('Target file path relative to repo root'),
|
|
353
|
+
content: z.string().describe('Full replacement content'),
|
|
354
|
+
worktree: z.string().optional().describe('Optional worktree name for validation'),
|
|
355
|
+
}),
|
|
356
|
+
annotations: {
|
|
357
|
+
readOnlyHint: false,
|
|
358
|
+
destructiveHint: false,
|
|
359
|
+
idempotentHint: false,
|
|
360
|
+
openWorldHint: false,
|
|
361
|
+
},
|
|
362
|
+
},
|
|
363
|
+
async ({ lease_id, path, content, worktree }) => {
|
|
364
|
+
try {
|
|
365
|
+
const { repoRoot, db } = getContext();
|
|
366
|
+
const result = gatewayWriteFile(db, repoRoot, {
|
|
367
|
+
leaseId: lease_id,
|
|
368
|
+
path,
|
|
369
|
+
content,
|
|
370
|
+
worktree: worktree ?? null,
|
|
371
|
+
});
|
|
372
|
+
db.close();
|
|
373
|
+
|
|
374
|
+
if (!result.ok) {
|
|
375
|
+
return toolError(`Write denied for ${result.file_path ?? path}: ${result.reason_code}.`);
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
379
|
+
} catch (err) {
|
|
380
|
+
return toolError(err.message);
|
|
381
|
+
}
|
|
382
|
+
},
|
|
383
|
+
);
|
|
384
|
+
|
|
385
|
+
// ── switchman_remove_path ─────────────────────────────────────────────────────
|
|
386
|
+
|
|
387
|
+
server.registerTool(
|
|
388
|
+
'switchman_remove_path',
|
|
389
|
+
{
|
|
390
|
+
title: 'Remove Path Through Switchman',
|
|
391
|
+
description: `Removes a file or directory through the Switchman write gateway.
|
|
392
|
+
|
|
393
|
+
Use this instead of direct filesystem deletion when the agent is running in managed mode. The target path must already be claimed by the active lease.
|
|
394
|
+
|
|
395
|
+
Args:
|
|
396
|
+
- lease_id (string): Active lease ID returned by switchman_task_next
|
|
397
|
+
- path (string): Target file or directory path, relative to repo root
|
|
398
|
+
- worktree (string, optional): Expected worktree name for extra validation
|
|
399
|
+
|
|
400
|
+
Returns JSON:
|
|
401
|
+
{
|
|
402
|
+
"ok": boolean,
|
|
403
|
+
"file_path": string,
|
|
404
|
+
"lease_id": string,
|
|
405
|
+
"removed": true
|
|
406
|
+
}`,
|
|
407
|
+
inputSchema: z.object({
|
|
408
|
+
lease_id: z.string().min(1).describe('Active lease ID returned by switchman_task_next'),
|
|
409
|
+
path: z.string().min(1).describe('Target path relative to repo root'),
|
|
410
|
+
worktree: z.string().optional().describe('Optional worktree name for validation'),
|
|
411
|
+
}),
|
|
412
|
+
annotations: {
|
|
413
|
+
readOnlyHint: false,
|
|
414
|
+
destructiveHint: true,
|
|
415
|
+
idempotentHint: true,
|
|
416
|
+
openWorldHint: false,
|
|
417
|
+
},
|
|
418
|
+
},
|
|
419
|
+
async ({ lease_id, path, worktree }) => {
|
|
420
|
+
try {
|
|
421
|
+
const { repoRoot, db } = getContext();
|
|
422
|
+
const result = gatewayRemovePath(db, repoRoot, {
|
|
423
|
+
leaseId: lease_id,
|
|
424
|
+
path,
|
|
425
|
+
worktree: worktree ?? null,
|
|
426
|
+
});
|
|
427
|
+
db.close();
|
|
428
|
+
|
|
429
|
+
if (!result.ok) {
|
|
430
|
+
return toolError(`Remove denied for ${result.file_path ?? path}: ${result.reason_code}.`);
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
434
|
+
} catch (err) {
|
|
435
|
+
return toolError(err.message);
|
|
436
|
+
}
|
|
437
|
+
},
|
|
438
|
+
);
|
|
439
|
+
|
|
440
|
+
// ── switchman_append_file ─────────────────────────────────────────────────────
|
|
441
|
+
|
|
442
|
+
server.registerTool(
|
|
443
|
+
'switchman_append_file',
|
|
444
|
+
{
|
|
445
|
+
title: 'Append File Through Switchman',
|
|
446
|
+
description: `Appends content to a claimed file through the Switchman write gateway.`,
|
|
447
|
+
inputSchema: z.object({
|
|
448
|
+
lease_id: z.string().min(1).describe('Active lease ID returned by switchman_task_next'),
|
|
449
|
+
path: z.string().min(1).describe('Target file path relative to repo root'),
|
|
450
|
+
content: z.string().describe('Content to append'),
|
|
451
|
+
worktree: z.string().optional().describe('Optional worktree name for validation'),
|
|
452
|
+
}),
|
|
453
|
+
annotations: {
|
|
454
|
+
readOnlyHint: false,
|
|
455
|
+
destructiveHint: false,
|
|
456
|
+
idempotentHint: false,
|
|
457
|
+
openWorldHint: false,
|
|
458
|
+
},
|
|
459
|
+
},
|
|
460
|
+
async ({ lease_id, path, content, worktree }) => {
|
|
461
|
+
try {
|
|
462
|
+
const { repoRoot, db } = getContext();
|
|
463
|
+
const result = gatewayAppendFile(db, repoRoot, {
|
|
464
|
+
leaseId: lease_id,
|
|
465
|
+
path,
|
|
466
|
+
content,
|
|
467
|
+
worktree: worktree ?? null,
|
|
468
|
+
});
|
|
469
|
+
db.close();
|
|
470
|
+
|
|
471
|
+
if (!result.ok) {
|
|
472
|
+
return toolError(`Append denied for ${result.file_path ?? path}: ${result.reason_code}.`);
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
476
|
+
} catch (err) {
|
|
477
|
+
return toolError(err.message);
|
|
478
|
+
}
|
|
479
|
+
},
|
|
480
|
+
);
|
|
481
|
+
|
|
482
|
+
// ── switchman_move_path ───────────────────────────────────────────────────────
|
|
483
|
+
|
|
484
|
+
server.registerTool(
|
|
485
|
+
'switchman_move_path',
|
|
486
|
+
{
|
|
487
|
+
title: 'Move Path Through Switchman',
|
|
488
|
+
description: `Moves a claimed file to another claimed path through the Switchman write gateway.`,
|
|
489
|
+
inputSchema: z.object({
|
|
490
|
+
lease_id: z.string().min(1).describe('Active lease ID returned by switchman_task_next'),
|
|
491
|
+
source_path: z.string().min(1).describe('Source file path relative to repo root'),
|
|
492
|
+
destination_path: z.string().min(1).describe('Destination file path relative to repo root'),
|
|
493
|
+
worktree: z.string().optional().describe('Optional worktree name for validation'),
|
|
494
|
+
}),
|
|
495
|
+
annotations: {
|
|
496
|
+
readOnlyHint: false,
|
|
497
|
+
destructiveHint: true,
|
|
498
|
+
idempotentHint: false,
|
|
499
|
+
openWorldHint: false,
|
|
500
|
+
},
|
|
501
|
+
},
|
|
502
|
+
async ({ lease_id, source_path, destination_path, worktree }) => {
|
|
503
|
+
try {
|
|
504
|
+
const { repoRoot, db } = getContext();
|
|
505
|
+
const result = gatewayMovePath(db, repoRoot, {
|
|
506
|
+
leaseId: lease_id,
|
|
507
|
+
sourcePath: source_path,
|
|
508
|
+
destinationPath: destination_path,
|
|
509
|
+
worktree: worktree ?? null,
|
|
510
|
+
});
|
|
511
|
+
db.close();
|
|
512
|
+
|
|
513
|
+
if (!result.ok) {
|
|
514
|
+
return toolError(`Move denied for ${result.file_path ?? destination_path}: ${result.reason_code}.`);
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
518
|
+
} catch (err) {
|
|
519
|
+
return toolError(err.message);
|
|
520
|
+
}
|
|
521
|
+
},
|
|
522
|
+
);
|
|
523
|
+
|
|
524
|
+
// ── switchman_make_directory ──────────────────────────────────────────────────
|
|
525
|
+
|
|
526
|
+
server.registerTool(
|
|
527
|
+
'switchman_make_directory',
|
|
528
|
+
{
|
|
529
|
+
title: 'Create Directory Through Switchman',
|
|
530
|
+
description: `Creates a directory through the Switchman write gateway when it is part of a claimed destination path.`,
|
|
531
|
+
inputSchema: z.object({
|
|
532
|
+
lease_id: z.string().min(1).describe('Active lease ID returned by switchman_task_next'),
|
|
533
|
+
path: z.string().min(1).describe('Directory path relative to repo root'),
|
|
534
|
+
worktree: z.string().optional().describe('Optional worktree name for validation'),
|
|
535
|
+
}),
|
|
536
|
+
annotations: {
|
|
537
|
+
readOnlyHint: false,
|
|
538
|
+
destructiveHint: false,
|
|
539
|
+
idempotentHint: true,
|
|
540
|
+
openWorldHint: false,
|
|
541
|
+
},
|
|
542
|
+
},
|
|
543
|
+
async ({ lease_id, path, worktree }) => {
|
|
544
|
+
try {
|
|
545
|
+
const { repoRoot, db } = getContext();
|
|
546
|
+
const result = gatewayMakeDirectory(db, repoRoot, {
|
|
547
|
+
leaseId: lease_id,
|
|
548
|
+
path,
|
|
549
|
+
worktree: worktree ?? null,
|
|
550
|
+
});
|
|
551
|
+
db.close();
|
|
552
|
+
|
|
553
|
+
if (!result.ok) {
|
|
554
|
+
return toolError(`Mkdir denied for ${result.file_path ?? path}: ${result.reason_code}.`);
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
558
|
+
} catch (err) {
|
|
559
|
+
return toolError(err.message);
|
|
560
|
+
}
|
|
561
|
+
},
|
|
562
|
+
);
|
|
563
|
+
|
|
564
|
+
// ── switchman_monitor_once ────────────────────────────────────────────────────
|
|
565
|
+
|
|
566
|
+
server.registerTool(
|
|
567
|
+
'switchman_monitor_once',
|
|
568
|
+
{
|
|
569
|
+
title: 'Observe Runtime File Changes Once',
|
|
570
|
+
description: `Runs one filesystem monitoring pass across all registered worktrees.
|
|
571
|
+
|
|
572
|
+
This compares the current worktree file state against the previous Switchman snapshot, logs observed mutations, and classifies them as allowed or denied based on the active lease and claims.`,
|
|
573
|
+
inputSchema: z.object({}),
|
|
574
|
+
annotations: {
|
|
575
|
+
readOnlyHint: false,
|
|
576
|
+
destructiveHint: false,
|
|
577
|
+
idempotentHint: false,
|
|
578
|
+
openWorldHint: false,
|
|
579
|
+
},
|
|
580
|
+
},
|
|
581
|
+
async () => {
|
|
582
|
+
try {
|
|
583
|
+
const { repoRoot, db } = getContext();
|
|
584
|
+
const worktrees = listWorktrees(db);
|
|
585
|
+
const result = monitorWorktreesOnce(db, repoRoot, worktrees);
|
|
586
|
+
db.close();
|
|
587
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
588
|
+
} catch (err) {
|
|
589
|
+
return toolError(err.message);
|
|
590
|
+
}
|
|
591
|
+
},
|
|
592
|
+
);
|
|
593
|
+
|
|
325
594
|
// ── switchman_task_done ────────────────────────────────────────────────────────
|
|
326
595
|
|
|
327
596
|
server.registerTool(
|
|
@@ -542,8 +811,10 @@ Examples:
|
|
|
542
811
|
name: wt.name,
|
|
543
812
|
branch: wt.branch ?? 'unknown',
|
|
544
813
|
changed_files: (report.fileMap?.[wt.name] ?? []).length,
|
|
814
|
+
compliance_state: report.worktreeCompliance?.find((entry) => entry.worktree === wt.name)?.compliance_state ?? wt.compliance_state ?? 'observed',
|
|
545
815
|
})),
|
|
546
816
|
file_conflicts: report.fileConflicts,
|
|
817
|
+
unclaimed_changes: report.unclaimedChanges,
|
|
547
818
|
branch_conflicts: report.conflicts.map((c) => ({
|
|
548
819
|
type: c.type,
|
|
549
820
|
worktree_a: c.worktreeA,
|
|
@@ -552,7 +823,8 @@ Examples:
|
|
|
552
823
|
branch_b: c.branchB,
|
|
553
824
|
conflicting_files: c.conflictingFiles,
|
|
554
825
|
})),
|
|
555
|
-
|
|
826
|
+
compliance_summary: report.complianceSummary,
|
|
827
|
+
safe_to_proceed: report.conflicts.length === 0 && report.fileConflicts.length === 0 && report.unclaimedChanges.length === 0,
|
|
556
828
|
summary: report.summary,
|
|
557
829
|
};
|
|
558
830
|
return toolOk(JSON.stringify(result, null, 2), result);
|
|
@@ -562,6 +834,52 @@ Examples:
|
|
|
562
834
|
},
|
|
563
835
|
);
|
|
564
836
|
|
|
837
|
+
// ── switchman_merge_gate ──────────────────────────────────────────────────────
|
|
838
|
+
|
|
839
|
+
server.registerTool(
|
|
840
|
+
'switchman_merge_gate',
|
|
841
|
+
{
|
|
842
|
+
title: 'Run AI Merge Gate',
|
|
843
|
+
description: `Evaluates semantic merge risk across active worktrees using Switchman's local change graph.
|
|
844
|
+
|
|
845
|
+
This is an AI-style merge gate implemented as a deterministic local reviewer. It combines:
|
|
846
|
+
1. Existing enforcement signals
|
|
847
|
+
2. Exact file overlaps and git merge conflicts
|
|
848
|
+
3. Shared subsystem overlap
|
|
849
|
+
4. High-risk areas like auth, schema, config, and API changes
|
|
850
|
+
5. Missing-test signals for source-heavy worktrees
|
|
851
|
+
|
|
852
|
+
Args:
|
|
853
|
+
- (none required)
|
|
854
|
+
|
|
855
|
+
Returns JSON:
|
|
856
|
+
{
|
|
857
|
+
"ok": boolean,
|
|
858
|
+
"status": "pass" | "warn" | "blocked",
|
|
859
|
+
"summary": string,
|
|
860
|
+
"worktrees": [{ "worktree": string, "score": number, "findings": string[] }],
|
|
861
|
+
"pairs": [{ "worktree_a": string, "worktree_b": string, "status": string, "score": number, "reasons": string[] }]
|
|
862
|
+
}`,
|
|
863
|
+
inputSchema: z.object({}),
|
|
864
|
+
annotations: {
|
|
865
|
+
readOnlyHint: true,
|
|
866
|
+
destructiveHint: false,
|
|
867
|
+
idempotentHint: true,
|
|
868
|
+
openWorldHint: false,
|
|
869
|
+
},
|
|
870
|
+
},
|
|
871
|
+
async () => {
|
|
872
|
+
try {
|
|
873
|
+
const { repoRoot, db } = getContext();
|
|
874
|
+
const result = await runAiMergeGate(db, repoRoot);
|
|
875
|
+
db.close();
|
|
876
|
+
return toolOk(JSON.stringify(result, null, 2), result);
|
|
877
|
+
} catch (err) {
|
|
878
|
+
return toolError(`AI merge gate failed: ${err.message}. Ensure switchman is initialised ('switchman init').`);
|
|
879
|
+
}
|
|
880
|
+
},
|
|
881
|
+
);
|
|
882
|
+
|
|
565
883
|
// ── switchman_status ───────────────────────────────────────────────────────────
|
|
566
884
|
|
|
567
885
|
server.registerTool(
|
|
@@ -677,6 +995,8 @@ Returns JSON:
|
|
|
677
995
|
branch: wt.branch,
|
|
678
996
|
agent: wt.agent ?? null,
|
|
679
997
|
status: wt.status,
|
|
998
|
+
enforcement_mode: wt.enforcement_mode ?? 'observed',
|
|
999
|
+
compliance_state: wt.compliance_state ?? 'observed',
|
|
680
1000
|
active_lease_id: activeLeaseByWorktree.get(wt.name)?.id ?? null,
|
|
681
1001
|
})),
|
|
682
1002
|
repo_root: repoRoot,
|