switchman-dev 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +26 -0
- package/CHANGELOG.md +36 -0
- package/CLAUDE.md +113 -0
- package/README.md +296 -15
- package/examples/README.md +37 -2
- package/package.json +6 -1
- package/src/cli/index.js +3939 -130
- package/src/core/ci.js +205 -1
- package/src/core/db.js +963 -45
- package/src/core/enforcement.js +140 -15
- package/src/core/git.js +286 -1
- package/src/core/ignore.js +1 -0
- package/src/core/licence.js +365 -0
- package/src/core/mcp.js +41 -2
- package/src/core/merge-gate.js +22 -5
- package/src/core/outcome.js +43 -44
- package/src/core/pipeline.js +2459 -88
- package/src/core/planner.js +35 -11
- package/src/core/policy.js +106 -1
- package/src/core/queue.js +654 -29
- package/src/core/semantic.js +71 -5
- package/src/core/sync.js +216 -0
- package/src/mcp/server.js +18 -6
- package/tests.zip +0 -0
package/src/core/semantic.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { execSync } from 'child_process';
|
|
2
2
|
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
3
|
-
import { join } from 'path';
|
|
3
|
+
import { dirname, join, posix } from 'path';
|
|
4
4
|
|
|
5
5
|
const SOURCE_EXTENSIONS = new Set(['.js', '.mjs', '.cjs', '.ts', '.tsx', '.jsx']);
|
|
6
6
|
|
|
@@ -111,21 +111,22 @@ function parseFileObjects(repoPath, filePath) {
|
|
|
111
111
|
}));
|
|
112
112
|
}
|
|
113
113
|
|
|
114
|
-
function
|
|
114
|
+
export function listTrackedFiles(repoPath, { sourceOnly = false } = {}) {
|
|
115
115
|
try {
|
|
116
116
|
const output = execSync('git ls-files', {
|
|
117
117
|
cwd: repoPath,
|
|
118
118
|
encoding: 'utf8',
|
|
119
119
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
120
120
|
}).trim();
|
|
121
|
-
|
|
121
|
+
const files = output.split('\n').filter(Boolean);
|
|
122
|
+
return sourceOnly ? files.filter(isSourceLikePath) : files;
|
|
122
123
|
} catch {
|
|
123
124
|
return [];
|
|
124
125
|
}
|
|
125
126
|
}
|
|
126
127
|
|
|
127
128
|
export function buildSemanticIndexForPath(repoPath, filePaths = null) {
|
|
128
|
-
const files = filePaths ||
|
|
129
|
+
const files = filePaths || listTrackedFiles(repoPath);
|
|
129
130
|
const objects = files
|
|
130
131
|
.filter(isSourceLikePath)
|
|
131
132
|
.flatMap((filePath) => parseFileObjects(repoPath, filePath))
|
|
@@ -142,6 +143,67 @@ export function buildSemanticIndexForPath(repoPath, filePaths = null) {
|
|
|
142
143
|
};
|
|
143
144
|
}
|
|
144
145
|
|
|
146
|
+
function extractImportSpecifiers(content) {
|
|
147
|
+
const specifiers = new Set();
|
|
148
|
+
const patterns = [
|
|
149
|
+
/import\s+[^'"]*?from\s+['"]([^'"]+)['"]/g,
|
|
150
|
+
/export\s+[^'"]*?from\s+['"]([^'"]+)['"]/g,
|
|
151
|
+
/require\(\s*['"]([^'"]+)['"]\s*\)/g,
|
|
152
|
+
/import\(\s*['"]([^'"]+)['"]\s*\)/g,
|
|
153
|
+
];
|
|
154
|
+
for (const pattern of patterns) {
|
|
155
|
+
for (const match of content.matchAll(pattern)) {
|
|
156
|
+
if (match[1]) specifiers.add(match[1]);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return [...specifiers];
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
function resolveImportTarget(filePath, specifier, trackedSourceFiles) {
|
|
163
|
+
if (!specifier || !specifier.startsWith('.')) return null;
|
|
164
|
+
const fromDir = dirname(filePath);
|
|
165
|
+
const rawTarget = posix.normalize(posix.join(fromDir === '.' ? '' : fromDir, specifier));
|
|
166
|
+
const candidates = [];
|
|
167
|
+
if ([...SOURCE_EXTENSIONS].some((ext) => rawTarget.endsWith(ext))) {
|
|
168
|
+
candidates.push(rawTarget);
|
|
169
|
+
} else {
|
|
170
|
+
for (const ext of SOURCE_EXTENSIONS) {
|
|
171
|
+
candidates.push(`${rawTarget}${ext}`);
|
|
172
|
+
candidates.push(posix.join(rawTarget, `index${ext}`));
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
return candidates.find((candidate) => trackedSourceFiles.has(candidate)) || null;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export function buildModuleDependencyIndexForPath(repoPath, { filePaths = null } = {}) {
|
|
179
|
+
const files = (filePaths || listTrackedFiles(repoPath, { sourceOnly: true })).filter(isSourceLikePath);
|
|
180
|
+
const trackedSourceFiles = new Set(files);
|
|
181
|
+
const dependencies = [];
|
|
182
|
+
|
|
183
|
+
for (const filePath of files) {
|
|
184
|
+
const absolutePath = join(repoPath, filePath);
|
|
185
|
+
if (!existsSync(absolutePath)) continue;
|
|
186
|
+
const content = readFileSync(absolutePath, 'utf8');
|
|
187
|
+
for (const specifier of extractImportSpecifiers(content)) {
|
|
188
|
+
const resolvedPath = resolveImportTarget(filePath, specifier, trackedSourceFiles);
|
|
189
|
+
if (!resolvedPath) continue;
|
|
190
|
+
dependencies.push({
|
|
191
|
+
file_path: filePath,
|
|
192
|
+
imported_path: resolvedPath,
|
|
193
|
+
import_specifier: specifier,
|
|
194
|
+
area: areaForPath(filePath),
|
|
195
|
+
subsystem_tags: classifySubsystems(filePath),
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
generated_at: new Date().toISOString(),
|
|
202
|
+
dependency_count: dependencies.length,
|
|
203
|
+
dependencies,
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
|
|
145
207
|
export function detectSemanticConflicts(semanticIndexes = []) {
|
|
146
208
|
const conflicts = [];
|
|
147
209
|
|
|
@@ -227,7 +289,7 @@ function normalizeObjectRow(row) {
|
|
|
227
289
|
}
|
|
228
290
|
|
|
229
291
|
export function importCodeObjectsToStore(db, repoRoot, { filePaths = null } = {}) {
|
|
230
|
-
const files = filePaths ||
|
|
292
|
+
const files = filePaths || listTrackedFiles(repoRoot);
|
|
231
293
|
const objects = files
|
|
232
294
|
.filter(isSourceLikePath)
|
|
233
295
|
.flatMap((filePath) => parseFileObjects(repoRoot, filePath));
|
|
@@ -309,3 +371,7 @@ export function materializeCodeObjects(db, repoRoot, { outputRoot = repoRoot } =
|
|
|
309
371
|
files: files.sort(),
|
|
310
372
|
};
|
|
311
373
|
}
|
|
374
|
+
|
|
375
|
+
export function classifySubsystemsForPath(filePath) {
|
|
376
|
+
return classifySubsystems(filePath);
|
|
377
|
+
}
|
package/src/core/sync.js
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* switchman cloud sync module
|
|
3
|
+
* Syncs coordination state to Supabase for Pro team users.
|
|
4
|
+
*
|
|
5
|
+
* Only runs when:
|
|
6
|
+
* 1. The user has a valid Pro licence
|
|
7
|
+
* 2. The user is a member of a team
|
|
8
|
+
* 3. Network is available
|
|
9
|
+
*
|
|
10
|
+
* Never throws — all sync operations are best-effort.
|
|
11
|
+
* Local SQLite remains the source of truth.
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { readCredentials } from './licence.js';
|
|
15
|
+
|
|
16
|
+
// ─── Config ───────────────────────────────────────────────────────────────────
|
|
17
|
+
|
|
18
|
+
const SUPABASE_URL = process.env.SWITCHMAN_SUPABASE_URL
|
|
19
|
+
?? 'https://afilbolhlkiingnsupgr.supabase.co';
|
|
20
|
+
|
|
21
|
+
const SUPABASE_ANON = process.env.SWITCHMAN_SUPABASE_ANON
|
|
22
|
+
?? 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImFmaWxib2xobGtpaW5nbnN1cGdyIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NzM1OTIzOTIsImV4cCI6MjA4OTE2ODM5Mn0.8TBfHfRB0vEyKPMWBd6i1DNwx1nS9UqprIAsJf35n88';
|
|
23
|
+
|
|
24
|
+
const SYNC_TIMEOUT_MS = 3000;
|
|
25
|
+
|
|
26
|
+
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
27
|
+
|
|
28
|
+
function getHeaders(accessToken) {
|
|
29
|
+
return {
|
|
30
|
+
'Content-Type': 'application/json',
|
|
31
|
+
'apikey': SUPABASE_ANON,
|
|
32
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async function fetchWithTimeout(url, options, timeoutMs = SYNC_TIMEOUT_MS) {
|
|
37
|
+
const controller = new AbortController();
|
|
38
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
39
|
+
try {
|
|
40
|
+
const res = await fetch(url, { ...options, signal: controller.signal });
|
|
41
|
+
return res;
|
|
42
|
+
} finally {
|
|
43
|
+
clearTimeout(timer);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// ─── Team resolution ──────────────────────────────────────────────────────────
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Get the team ID for the current user.
|
|
51
|
+
* Returns null if not in a team or on error.
|
|
52
|
+
*/
|
|
53
|
+
async function getTeamId(accessToken, userId) {
|
|
54
|
+
try {
|
|
55
|
+
const res = await fetchWithTimeout(
|
|
56
|
+
`${SUPABASE_URL}/rest/v1/team_members?user_id=eq.${userId}&select=team_id&limit=1`,
|
|
57
|
+
{ headers: getHeaders(accessToken) }
|
|
58
|
+
);
|
|
59
|
+
if (!res.ok) return null;
|
|
60
|
+
const rows = await res.json();
|
|
61
|
+
return rows?.[0]?.team_id ?? null;
|
|
62
|
+
} catch {
|
|
63
|
+
return null;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// ─── Push ─────────────────────────────────────────────────────────────────────
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Push a state change event to Supabase.
|
|
71
|
+
* Called after any state-changing command.
|
|
72
|
+
*
|
|
73
|
+
* eventType: 'task_added' | 'task_done' | 'task_failed' | 'lease_acquired' |
|
|
74
|
+
* 'claim_added' | 'claim_released' | 'status_ping'
|
|
75
|
+
* payload: object with relevant fields
|
|
76
|
+
*/
|
|
77
|
+
export async function pushSyncEvent(eventType, payload, { worktree = null } = {}) {
|
|
78
|
+
try {
|
|
79
|
+
const creds = readCredentials();
|
|
80
|
+
if (!creds?.access_token || !creds?.user_id) return;
|
|
81
|
+
|
|
82
|
+
const teamId = await getTeamId(creds.access_token, creds.user_id);
|
|
83
|
+
if (!teamId) return; // Not in a team — no sync needed
|
|
84
|
+
|
|
85
|
+
const resolvedWorktree = worktree
|
|
86
|
+
?? process.cwd().split('/').pop()
|
|
87
|
+
?? 'unknown';
|
|
88
|
+
|
|
89
|
+
await fetchWithTimeout(
|
|
90
|
+
`${SUPABASE_URL}/rest/v1/sync_state`,
|
|
91
|
+
{
|
|
92
|
+
method: 'POST',
|
|
93
|
+
headers: {
|
|
94
|
+
...getHeaders(creds.access_token),
|
|
95
|
+
'Prefer': 'return=minimal',
|
|
96
|
+
},
|
|
97
|
+
body: JSON.stringify({
|
|
98
|
+
team_id: teamId,
|
|
99
|
+
user_id: creds.user_id,
|
|
100
|
+
worktree: resolvedWorktree,
|
|
101
|
+
event_type: eventType,
|
|
102
|
+
payload: {
|
|
103
|
+
...payload,
|
|
104
|
+
email: creds.email ?? null,
|
|
105
|
+
synced_at: new Date().toISOString(),
|
|
106
|
+
},
|
|
107
|
+
}),
|
|
108
|
+
}
|
|
109
|
+
);
|
|
110
|
+
} catch {
|
|
111
|
+
// Best effort — never fail the local operation
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// ─── Pull ─────────────────────────────────────────────────────────────────────
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Pull recent team sync events from Supabase.
|
|
119
|
+
* Returns an array of events or empty array on error.
|
|
120
|
+
* Used by `switchman status` to show team-wide activity.
|
|
121
|
+
*/
|
|
122
|
+
export async function pullTeamState() {
|
|
123
|
+
try {
|
|
124
|
+
const creds = readCredentials();
|
|
125
|
+
if (!creds?.access_token || !creds?.user_id) return [];
|
|
126
|
+
|
|
127
|
+
const teamId = await getTeamId(creds.access_token, creds.user_id);
|
|
128
|
+
if (!teamId) return [];
|
|
129
|
+
|
|
130
|
+
// Pull last 5 minutes of events from all team members
|
|
131
|
+
const since = new Date(Date.now() - 5 * 60 * 1000).toISOString();
|
|
132
|
+
|
|
133
|
+
const res = await fetchWithTimeout(
|
|
134
|
+
`${SUPABASE_URL}/rest/v1/sync_state` +
|
|
135
|
+
`?team_id=eq.${teamId}` +
|
|
136
|
+
`&created_at=gte.${since}` +
|
|
137
|
+
`&order=created_at.desc` +
|
|
138
|
+
`&limit=50`,
|
|
139
|
+
{ headers: getHeaders(creds.access_token) }
|
|
140
|
+
);
|
|
141
|
+
|
|
142
|
+
if (!res.ok) return [];
|
|
143
|
+
return await res.json();
|
|
144
|
+
} catch {
|
|
145
|
+
return [];
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Pull active team members (those with events in the last 15 minutes).
|
|
151
|
+
* Returns array of { email, worktree, event_type, payload, created_at }
|
|
152
|
+
*/
|
|
153
|
+
export async function pullActiveTeamMembers() {
|
|
154
|
+
try {
|
|
155
|
+
const creds = readCredentials();
|
|
156
|
+
if (!creds?.access_token || !creds?.user_id) return [];
|
|
157
|
+
|
|
158
|
+
const teamId = await getTeamId(creds.access_token, creds.user_id);
|
|
159
|
+
if (!teamId) return [];
|
|
160
|
+
|
|
161
|
+
const since = new Date(Date.now() - 15 * 60 * 1000).toISOString();
|
|
162
|
+
|
|
163
|
+
const res = await fetchWithTimeout(
|
|
164
|
+
`${SUPABASE_URL}/rest/v1/sync_state` +
|
|
165
|
+
`?team_id=eq.${teamId}` +
|
|
166
|
+
`&created_at=gte.${since}` +
|
|
167
|
+
`&order=created_at.desc` +
|
|
168
|
+
`&limit=100`,
|
|
169
|
+
{ headers: getHeaders(creds.access_token) }
|
|
170
|
+
);
|
|
171
|
+
|
|
172
|
+
if (!res.ok) return [];
|
|
173
|
+
const events = await res.json();
|
|
174
|
+
|
|
175
|
+
// Deduplicate — keep most recent event per user+worktree
|
|
176
|
+
const seen = new Map();
|
|
177
|
+
for (const event of events) {
|
|
178
|
+
const key = `${event.user_id}:${event.worktree}`;
|
|
179
|
+
if (!seen.has(key)) {
|
|
180
|
+
seen.set(key, event);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return [...seen.values()];
|
|
185
|
+
} catch {
|
|
186
|
+
return [];
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// ─── Cleanup ──────────────────────────────────────────────────────────────────
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Delete sync events older than the configured retention window for this user.
|
|
194
|
+
* Called occasionally to keep the table tidy.
|
|
195
|
+
* Best effort — never fails.
|
|
196
|
+
*/
|
|
197
|
+
export async function cleanupOldSyncEvents({ retentionDays = 7 } = {}) {
|
|
198
|
+
try {
|
|
199
|
+
const creds = readCredentials();
|
|
200
|
+
if (!creds?.access_token || !creds?.user_id) return;
|
|
201
|
+
|
|
202
|
+
const cutoff = new Date(Date.now() - Math.max(1, Number.parseInt(retentionDays, 10) || 7) * 24 * 60 * 60 * 1000).toISOString();
|
|
203
|
+
|
|
204
|
+
await fetchWithTimeout(
|
|
205
|
+
`${SUPABASE_URL}/rest/v1/sync_state` +
|
|
206
|
+
`?user_id=eq.${creds.user_id}` +
|
|
207
|
+
`&created_at=lt.${cutoff}`,
|
|
208
|
+
{
|
|
209
|
+
method: 'DELETE',
|
|
210
|
+
headers: getHeaders(creds.access_token),
|
|
211
|
+
}
|
|
212
|
+
);
|
|
213
|
+
} catch {
|
|
214
|
+
// Best effort
|
|
215
|
+
}
|
|
216
|
+
}
|
package/src/mcp/server.js
CHANGED
|
@@ -26,7 +26,9 @@ import {
|
|
|
26
26
|
createTask,
|
|
27
27
|
startTaskLease,
|
|
28
28
|
completeTask,
|
|
29
|
+
completeLeaseTask,
|
|
29
30
|
failTask,
|
|
31
|
+
failLeaseTask,
|
|
30
32
|
listTasks,
|
|
31
33
|
getNextPendingTask,
|
|
32
34
|
listLeases,
|
|
@@ -631,11 +633,16 @@ Returns JSON:
|
|
|
631
633
|
db.close();
|
|
632
634
|
return toolError(`Task ${task_id} is active under lease ${activeLease.id}, not ${lease_id}.`);
|
|
633
635
|
}
|
|
634
|
-
|
|
635
|
-
|
|
636
|
+
const effectiveLeaseId = activeLease?.id ?? lease_id ?? null;
|
|
637
|
+
if (effectiveLeaseId) {
|
|
638
|
+
completeLeaseTask(db, effectiveLeaseId);
|
|
639
|
+
} else {
|
|
640
|
+
completeTask(db, task_id);
|
|
641
|
+
releaseFileClaims(db, task_id);
|
|
642
|
+
}
|
|
636
643
|
db.close();
|
|
637
644
|
|
|
638
|
-
const result = { task_id, lease_id:
|
|
645
|
+
const result = { task_id, lease_id: effectiveLeaseId, status: 'done', files_released: true };
|
|
639
646
|
return toolOk(JSON.stringify(result, null, 2), result);
|
|
640
647
|
} catch (err) {
|
|
641
648
|
return toolError(err.message);
|
|
@@ -686,11 +693,16 @@ Returns JSON:
|
|
|
686
693
|
db.close();
|
|
687
694
|
return toolError(`Task ${task_id} is active under lease ${activeLease.id}, not ${lease_id}.`);
|
|
688
695
|
}
|
|
689
|
-
|
|
690
|
-
|
|
696
|
+
const effectiveLeaseId = activeLease?.id ?? lease_id ?? null;
|
|
697
|
+
if (effectiveLeaseId) {
|
|
698
|
+
failLeaseTask(db, effectiveLeaseId, reason);
|
|
699
|
+
} else {
|
|
700
|
+
failTask(db, task_id, reason);
|
|
701
|
+
releaseFileClaims(db, task_id);
|
|
702
|
+
}
|
|
691
703
|
db.close();
|
|
692
704
|
|
|
693
|
-
const result = { task_id, lease_id:
|
|
705
|
+
const result = { task_id, lease_id: effectiveLeaseId, status: 'failed', reason, files_released: true };
|
|
694
706
|
return toolOk(JSON.stringify(result, null, 2), result);
|
|
695
707
|
} catch (err) {
|
|
696
708
|
return toolError(err.message);
|
package/tests.zip
ADDED
|
Binary file
|