switchman-dev 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,225 @@
1
+ import { getMergeQueueItem, listMergeQueue, listTasks, listWorktrees, markMergeQueueState, startMergeQueueItem } from './db.js';
2
+ import { gitBranchExists, gitMergeBranchInto, gitRebaseOnto } from './git.js';
3
+ import { runAiMergeGate } from './merge-gate.js';
4
+ import { scanAllWorktrees } from './detector.js';
5
+
6
+ function describeQueueError(err) {
7
+ const message = String(err?.stderr || err?.message || err || '').trim();
8
+ if (/conflict/i.test(message)) {
9
+ return {
10
+ code: 'merge_conflict',
11
+ summary: message || 'Merge conflict blocked queue item.',
12
+ nextAction: 'Resolve the branch conflict manually, then run `switchman queue retry <itemId>`.',
13
+ retryable: true,
14
+ };
15
+ }
16
+
17
+ if (/not a valid object name|pathspec|did not match any file/i.test(message)) {
18
+ return {
19
+ code: 'source_missing',
20
+ summary: message || 'The queued source branch no longer exists.',
21
+ nextAction: 'Recreate the source branch or remove the queue item.',
22
+ retryable: false,
23
+ };
24
+ }
25
+
26
+ return {
27
+ code: 'merge_failed',
28
+ summary: message || 'Merge queue item failed.',
29
+ nextAction: 'Inspect the branch state, then retry or remove the queue item.',
30
+ retryable: true,
31
+ };
32
+ }
33
+
34
+ function scheduleRetryOrBlock(db, item, failure) {
35
+ const retriesUsed = Number(item.retry_count || 0);
36
+ const maxRetries = Number(item.max_retries || 0);
37
+ if (failure.retryable && retriesUsed < maxRetries) {
38
+ return {
39
+ status: 'retrying',
40
+ item: markMergeQueueState(db, item.id, {
41
+ status: 'retrying',
42
+ lastErrorCode: failure.code,
43
+ lastErrorSummary: failure.summary,
44
+ nextAction: `Retry ${retriesUsed + 1} of ${maxRetries} scheduled automatically. Run \`switchman queue run\` again after fixing any underlying branch drift if needed.`,
45
+ incrementRetry: true,
46
+ }),
47
+ };
48
+ }
49
+
50
+ return {
51
+ status: 'blocked',
52
+ item: markMergeQueueState(db, item.id, {
53
+ status: 'blocked',
54
+ lastErrorCode: failure.code,
55
+ lastErrorSummary: failure.summary,
56
+ nextAction: failure.nextAction.replace('<itemId>', item.id),
57
+ }),
58
+ };
59
+ }
60
+
61
+ async function evaluateQueueRepoGate(db, repoRoot) {
62
+ const report = await scanAllWorktrees(db, repoRoot);
63
+ const aiGate = await runAiMergeGate(db, repoRoot);
64
+ const ok = report.conflicts.length === 0
65
+ && report.fileConflicts.length === 0
66
+ && (report.ownershipConflicts?.length || 0) === 0
67
+ && (report.semanticConflicts?.length || 0) === 0
68
+ && report.unclaimedChanges.length === 0
69
+ && report.complianceSummary.non_compliant === 0
70
+ && report.complianceSummary.stale === 0
71
+ && aiGate.status !== 'blocked'
72
+ && (aiGate.dependency_invalidations?.filter((item) => item.severity === 'blocked').length || 0) === 0;
73
+
74
+ return {
75
+ ok,
76
+ summary: ok
77
+ ? `Repo gate passed for ${report.worktrees.length} worktree(s).`
78
+ : 'Repo gate rejected unmanaged changes, stale leases, ownership conflicts, stale dependency invalidations, or boundary validation failures.',
79
+ report,
80
+ aiGate,
81
+ };
82
+ }
83
+
84
+ export function resolveQueueSource(db, repoRoot, item) {
85
+ if (!item) {
86
+ throw new Error('Queue item is required.');
87
+ }
88
+
89
+ if (item.source_type === 'branch') {
90
+ return {
91
+ branch: item.source_ref,
92
+ worktree: item.source_worktree || null,
93
+ pipeline_id: item.source_pipeline_id || null,
94
+ };
95
+ }
96
+
97
+ if (item.source_type === 'worktree') {
98
+ const worktree = listWorktrees(db).find((entry) => entry.name === item.source_worktree || entry.name === item.source_ref);
99
+ if (!worktree) {
100
+ throw new Error(`Queued worktree ${item.source_worktree || item.source_ref} is not registered.`);
101
+ }
102
+ return {
103
+ branch: worktree.branch,
104
+ worktree: worktree.name,
105
+ worktree_path: worktree.path,
106
+ pipeline_id: item.source_pipeline_id || null,
107
+ };
108
+ }
109
+
110
+ if (item.source_type === 'pipeline') {
111
+ const tasks = listTasks(db).filter((task) => task.id.startsWith(`${item.source_pipeline_id || item.source_ref}-`));
112
+ const implementationTask = tasks.find((task) => task.worktree);
113
+ if (!implementationTask?.worktree) {
114
+ throw new Error(`Pipeline ${item.source_pipeline_id || item.source_ref} has no landed worktree branch to queue.`);
115
+ }
116
+ const worktree = listWorktrees(db).find((entry) => entry.name === implementationTask.worktree);
117
+ if (!worktree) {
118
+ throw new Error(`Queued pipeline worktree ${implementationTask.worktree} is not registered.`);
119
+ }
120
+ return {
121
+ branch: worktree.branch,
122
+ worktree: worktree.name,
123
+ worktree_path: worktree.path,
124
+ pipeline_id: item.source_pipeline_id || item.source_ref,
125
+ };
126
+ }
127
+
128
+ throw new Error(`Unsupported queue source type: ${item.source_type}`);
129
+ }
130
+
131
+ export function inferQueueNextAction(item) {
132
+ if (!item) return null;
133
+ if (item.status === 'blocked' && item.next_action) return item.next_action;
134
+ if (item.status === 'merged') return 'No action needed.';
135
+ return null;
136
+ }
137
+
138
+ export function buildQueueStatusSummary(items) {
139
+ const counts = {
140
+ queued: items.filter((item) => item.status === 'queued').length,
141
+ validating: items.filter((item) => item.status === 'validating').length,
142
+ rebasing: items.filter((item) => item.status === 'rebasing').length,
143
+ merging: items.filter((item) => item.status === 'merging').length,
144
+ retrying: items.filter((item) => item.status === 'retrying').length,
145
+ blocked: items.filter((item) => item.status === 'blocked').length,
146
+ merged: items.filter((item) => item.status === 'merged').length,
147
+ };
148
+
149
+ return {
150
+ counts,
151
+ next: items.find((item) => ['queued', 'retrying', 'validating', 'rebasing', 'merging'].includes(item.status)) || null,
152
+ blocked: items.filter((item) => item.status === 'blocked'),
153
+ };
154
+ }
155
+
156
+ export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main' } = {}) {
157
+ const nextItem = listMergeQueue(db).find((item) => ['queued', 'retrying'].includes(item.status));
158
+ if (!nextItem) {
159
+ return { status: 'idle', item: null };
160
+ }
161
+
162
+ const started = startMergeQueueItem(db, nextItem.id);
163
+ if (!started) {
164
+ return { status: 'idle', item: null };
165
+ }
166
+
167
+ try {
168
+ const resolved = resolveQueueSource(db, repoRoot, started);
169
+ const queueTarget = started.target_branch || targetBranch;
170
+
171
+ if (!gitBranchExists(repoRoot, resolved.branch)) {
172
+ return scheduleRetryOrBlock(db, started, {
173
+ code: 'source_missing',
174
+ summary: `Source branch ${resolved.branch} does not exist.`,
175
+ nextAction: `Remove this queue item or recreate ${resolved.branch}, then run \`switchman queue retry ${started.id}\`.`,
176
+ retryable: false,
177
+ });
178
+ }
179
+
180
+ markMergeQueueState(db, started.id, { status: 'rebasing' });
181
+ gitRebaseOnto(resolved.worktree_path || repoRoot, queueTarget, resolved.branch);
182
+
183
+ const gate = await evaluateQueueRepoGate(db, repoRoot);
184
+ if (!gate.ok) {
185
+ return {
186
+ status: 'blocked',
187
+ item: markMergeQueueState(db, started.id, {
188
+ status: 'blocked',
189
+ lastErrorCode: 'gate_failed',
190
+ lastErrorSummary: gate.summary,
191
+ nextAction: `Run \`switchman gate ci\`, resolve the reported issues, then run \`switchman queue retry ${started.id}\`.`,
192
+ }),
193
+ };
194
+ }
195
+
196
+ markMergeQueueState(db, started.id, { status: 'merging' });
197
+ const mergedCommit = gitMergeBranchInto(repoRoot, queueTarget, resolved.branch);
198
+
199
+ return {
200
+ status: 'merged',
201
+ item: markMergeQueueState(db, started.id, {
202
+ status: 'merged',
203
+ mergedCommit,
204
+ }),
205
+ };
206
+ } catch (err) {
207
+ const failure = describeQueueError(err);
208
+ return scheduleRetryOrBlock(db, started, failure);
209
+ }
210
+ }
211
+
212
+ export async function runMergeQueue(db, repoRoot, { maxItems = 1, targetBranch = 'main' } = {}) {
213
+ const processed = [];
214
+ for (let count = 0; count < maxItems; count++) {
215
+ const result = await runNextQueueItem(db, repoRoot, { targetBranch });
216
+ if (!result.item) break;
217
+ processed.push(result);
218
+ if (result.status !== 'merged') break;
219
+ }
220
+
221
+ return {
222
+ processed,
223
+ summary: buildQueueStatusSummary(listMergeQueue(db)),
224
+ };
225
+ }
@@ -0,0 +1,311 @@
1
+ import { execSync } from 'child_process';
2
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
3
+ import { join } from 'path';
4
+
5
+ const SOURCE_EXTENSIONS = new Set(['.js', '.mjs', '.cjs', '.ts', '.tsx', '.jsx']);
6
+
7
+ const SUBSYSTEM_PATTERNS = [
8
+ { key: 'auth', regex: /(^|\/)(auth|login|session|permissions?|rbac|acl)(\/|$)/i },
9
+ { key: 'schema', regex: /(^|\/)(schema|migrations?|db|database|sql)(\/|$)|schema\./i },
10
+ { key: 'config', regex: /(^|\/)(config|configs|settings)(\/|$)|(^|\/)(package\.json|pnpm-lock\.yaml|package-lock\.json|yarn\.lock|tsconfig.*|vite\.config.*|webpack\.config.*)$/i },
11
+ { key: 'api', regex: /(^|\/)(api|routes?|controllers?)(\/|$)/i },
12
+ { key: 'payments', regex: /(^|\/)(payments?|billing|invoice|checkout|subscription)(\/|$)/i },
13
+ { key: 'ui', regex: /(^|\/)(components?|ui|pages?)(\/|$)/i },
14
+ ];
15
+
16
+ function uniq(values) {
17
+ return [...new Set(values)];
18
+ }
19
+
20
+ function isSourceLikePath(filePath) {
21
+ return [...SOURCE_EXTENSIONS].some((ext) => filePath.endsWith(ext));
22
+ }
23
+
24
+ function classifySubsystems(filePath) {
25
+ const tags = SUBSYSTEM_PATTERNS.filter((pattern) => pattern.regex.test(filePath)).map((pattern) => pattern.key);
26
+ return tags.length > 0 ? tags : ['general'];
27
+ }
28
+
29
+ function areaForPath(filePath) {
30
+ const parts = String(filePath || '').split('/').filter(Boolean);
31
+ if (parts.length === 0) return 'repo';
32
+ if (parts.length === 1) return parts[0];
33
+ if (['src', 'app', 'lib', 'server', 'client', 'tests', 'test', 'spec', 'specs'].includes(parts[0])) {
34
+ return `${parts[0]}/${parts[1]}`;
35
+ }
36
+ return parts[0];
37
+ }
38
+
39
+ function extractExports(content) {
40
+ const objects = [];
41
+ const patterns = [
42
+ { kind: 'function', regex: /export\s+(?:async\s+)?function\s+([A-Za-z_$][\w$]*)/g },
43
+ { kind: 'class', regex: /export\s+class\s+([A-Za-z_$][\w$]*)/g },
44
+ { kind: 'const', regex: /export\s+(?:const|let|var)\s+([A-Za-z_$][\w$]*)/g },
45
+ { kind: 'type', regex: /export\s+type\s+([A-Za-z_$][\w$]*)/g },
46
+ { kind: 'interface', regex: /export\s+interface\s+([A-Za-z_$][\w$]*)/g },
47
+ { kind: 'enum', regex: /export\s+enum\s+([A-Za-z_$][\w$]*)/g },
48
+ ];
49
+
50
+ for (const pattern of patterns) {
51
+ for (const match of content.matchAll(pattern.regex)) {
52
+ objects.push({ kind: pattern.kind, name: match[1] });
53
+ }
54
+ }
55
+
56
+ if (/export\s+default\s+/.test(content)) {
57
+ objects.push({ kind: 'default', name: 'default' });
58
+ }
59
+
60
+ return uniq(objects.map((item) => `${item.kind}:${item.name}`)).map((key) => {
61
+ const [kind, name] = key.split(':');
62
+ return { kind, name };
63
+ });
64
+ }
65
+
66
+ function extractExportBlocks(content) {
67
+ const lines = String(content || '').split('\n');
68
+ const blocks = [];
69
+ for (let i = 0; i < lines.length; i++) {
70
+ const line = lines[i];
71
+ if (!line.trim().startsWith('export ')) continue;
72
+
73
+ const blockLines = [line];
74
+ let braceDepth = (line.match(/{/g) || []).length - (line.match(/}/g) || []).length;
75
+ let needsTerminator = !/[;}]$/.test(line.trim());
76
+
77
+ while (i + 1 < lines.length && (braceDepth > 0 || needsTerminator)) {
78
+ i += 1;
79
+ const nextLine = lines[i];
80
+ blockLines.push(nextLine);
81
+ braceDepth += (nextLine.match(/{/g) || []).length - (nextLine.match(/}/g) || []).length;
82
+ if (braceDepth <= 0 && /[;}]$/.test(nextLine.trim())) {
83
+ needsTerminator = false;
84
+ }
85
+ }
86
+
87
+ blocks.push(blockLines.join('\n').trim());
88
+ }
89
+
90
+ return blocks.filter(Boolean);
91
+ }
92
+
93
+ function parseFileObjects(repoPath, filePath) {
94
+ const absolutePath = join(repoPath, filePath);
95
+ if (!existsSync(absolutePath) || !isSourceLikePath(filePath)) return [];
96
+
97
+ const content = readFileSync(absolutePath, 'utf8');
98
+ const exports = extractExports(content);
99
+ const exportBlocks = extractExportBlocks(content);
100
+ const subsystemTags = classifySubsystems(filePath);
101
+ const area = areaForPath(filePath);
102
+
103
+ return exports.map((entry, index) => ({
104
+ object_id: `${filePath}#${entry.kind}:${entry.name}`,
105
+ file_path: filePath,
106
+ kind: entry.kind,
107
+ name: entry.name,
108
+ area,
109
+ subsystem_tags: subsystemTags,
110
+ source_text: exportBlocks[index] || `export ${entry.kind} ${entry.name}`,
111
+ }));
112
+ }
113
+
114
+ function trackedFiles(repoPath) {
115
+ try {
116
+ const output = execSync('git ls-files', {
117
+ cwd: repoPath,
118
+ encoding: 'utf8',
119
+ stdio: ['pipe', 'pipe', 'pipe'],
120
+ }).trim();
121
+ return output.split('\n').filter(Boolean);
122
+ } catch {
123
+ return [];
124
+ }
125
+ }
126
+
127
+ export function buildSemanticIndexForPath(repoPath, filePaths = null) {
128
+ const files = filePaths || trackedFiles(repoPath);
129
+ const objects = files
130
+ .filter(isSourceLikePath)
131
+ .flatMap((filePath) => parseFileObjects(repoPath, filePath))
132
+ .sort((left, right) =>
133
+ left.file_path.localeCompare(right.file_path)
134
+ || left.kind.localeCompare(right.kind)
135
+ || left.name.localeCompare(right.name)
136
+ );
137
+
138
+ return {
139
+ generated_at: new Date().toISOString(),
140
+ object_count: objects.length,
141
+ objects: objects.map(({ source_text, ...object }) => object),
142
+ };
143
+ }
144
+
145
+ export function detectSemanticConflicts(semanticIndexes = []) {
146
+ const conflicts = [];
147
+
148
+ for (let i = 0; i < semanticIndexes.length; i++) {
149
+ for (let j = i + 1; j < semanticIndexes.length; j++) {
150
+ const left = semanticIndexes[i];
151
+ const right = semanticIndexes[j];
152
+ const rightByName = new Map();
153
+ for (const object of right.objects) {
154
+ if (!rightByName.has(object.name)) rightByName.set(object.name, []);
155
+ rightByName.get(object.name).push(object);
156
+ }
157
+
158
+ for (const leftObject of left.objects) {
159
+ const matching = rightByName.get(leftObject.name) || [];
160
+ for (const rightObject of matching) {
161
+ if (leftObject.object_id === rightObject.object_id) {
162
+ conflicts.push({
163
+ type: 'semantic_object_overlap',
164
+ severity: 'blocked',
165
+ worktreeA: left.worktree,
166
+ worktreeB: right.worktree,
167
+ object_name: leftObject.name,
168
+ object_kind: leftObject.kind,
169
+ fileA: leftObject.file_path,
170
+ fileB: rightObject.file_path,
171
+ area: leftObject.area,
172
+ });
173
+ continue;
174
+ }
175
+
176
+ const sharedSubsystems = leftObject.subsystem_tags.filter((tag) => rightObject.subsystem_tags.includes(tag));
177
+ if (sharedSubsystems.length > 0 || leftObject.area === rightObject.area) {
178
+ conflicts.push({
179
+ type: 'semantic_name_overlap',
180
+ severity: 'warn',
181
+ worktreeA: left.worktree,
182
+ worktreeB: right.worktree,
183
+ object_name: leftObject.name,
184
+ object_kind_a: leftObject.kind,
185
+ object_kind_b: rightObject.kind,
186
+ fileA: leftObject.file_path,
187
+ fileB: rightObject.file_path,
188
+ shared_subsystems: sharedSubsystems,
189
+ area: leftObject.area === rightObject.area ? leftObject.area : null,
190
+ });
191
+ }
192
+ }
193
+ }
194
+ }
195
+ }
196
+
197
+ return uniq(conflicts.map((item) => JSON.stringify(item))).map((item) => JSON.parse(item));
198
+ }
199
+
200
+ export function materializeSemanticIndex(repoRoot, { worktrees = [] } = {}) {
201
+ const semanticIndex = {
202
+ generated_at: new Date().toISOString(),
203
+ worktrees: worktrees
204
+ .map((worktree) => ({
205
+ worktree: worktree.name,
206
+ branch: worktree.branch || 'unknown',
207
+ index: buildSemanticIndexForPath(worktree.path),
208
+ }))
209
+ .sort((left, right) => left.worktree.localeCompare(right.worktree)),
210
+ };
211
+
212
+ const switchmanDir = join(repoRoot, '.switchman');
213
+ if (!existsSync(switchmanDir)) mkdirSync(switchmanDir, { recursive: true });
214
+ const outputPath = join(switchmanDir, 'semantic-index.json');
215
+ writeFileSync(outputPath, `${JSON.stringify(semanticIndex, null, 2)}\n`);
216
+ return {
217
+ output_path: outputPath,
218
+ semantic_index: semanticIndex,
219
+ };
220
+ }
221
+
222
+ function normalizeObjectRow(row) {
223
+ return {
224
+ ...row,
225
+ subsystem_tags: JSON.parse(row.subsystem_tags || '[]'),
226
+ };
227
+ }
228
+
229
+ export function importCodeObjectsToStore(db, repoRoot, { filePaths = null } = {}) {
230
+ const files = filePaths || trackedFiles(repoRoot);
231
+ const objects = files
232
+ .filter(isSourceLikePath)
233
+ .flatMap((filePath) => parseFileObjects(repoRoot, filePath));
234
+
235
+ const upsert = db.prepare(`
236
+ INSERT INTO code_objects (object_id, file_path, kind, name, source_text, subsystem_tags, area, updated_at)
237
+ VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))
238
+ ON CONFLICT(object_id) DO UPDATE SET
239
+ file_path=excluded.file_path,
240
+ kind=excluded.kind,
241
+ name=excluded.name,
242
+ source_text=excluded.source_text,
243
+ subsystem_tags=excluded.subsystem_tags,
244
+ area=excluded.area,
245
+ updated_at=datetime('now')
246
+ `);
247
+
248
+ for (const object of objects) {
249
+ upsert.run(
250
+ object.object_id,
251
+ object.file_path,
252
+ object.kind,
253
+ object.name,
254
+ object.source_text,
255
+ JSON.stringify(object.subsystem_tags || []),
256
+ object.area || null,
257
+ );
258
+ }
259
+
260
+ return listCodeObjects(db);
261
+ }
262
+
263
+ export function listCodeObjects(db, { filePath = null } = {}) {
264
+ const where = filePath ? 'WHERE file_path=?' : '';
265
+ const params = filePath ? [filePath] : [];
266
+ return db.prepare(`
267
+ SELECT *
268
+ FROM code_objects
269
+ ${where}
270
+ ORDER BY file_path ASC, kind ASC, name ASC
271
+ `).all(...params).map(normalizeObjectRow);
272
+ }
273
+
274
+ export function updateCodeObjectSource(db, objectId, sourceText) {
275
+ db.prepare(`
276
+ UPDATE code_objects
277
+ SET source_text=?,
278
+ updated_at=datetime('now')
279
+ WHERE object_id=?
280
+ `).run(sourceText, objectId);
281
+ const row = db.prepare(`SELECT * FROM code_objects WHERE object_id=?`).get(objectId);
282
+ return row ? normalizeObjectRow(row) : null;
283
+ }
284
+
285
+ export function materializeCodeObjects(db, repoRoot, { outputRoot = repoRoot } = {}) {
286
+ const objects = listCodeObjects(db);
287
+ const byFile = new Map();
288
+ for (const object of objects) {
289
+ if (!byFile.has(object.file_path)) byFile.set(object.file_path, []);
290
+ byFile.get(object.file_path).push(object);
291
+ }
292
+
293
+ const files = [];
294
+ for (const [filePath, entries] of byFile.entries()) {
295
+ const absolutePath = join(outputRoot, filePath);
296
+ const dirPath = absolutePath.split('/').slice(0, -1).join('/');
297
+ if (!existsSync(dirPath)) mkdirSync(dirPath, { recursive: true });
298
+ const ordered = entries
299
+ .slice()
300
+ .sort((left, right) => left.kind.localeCompare(right.kind) || left.name.localeCompare(right.name));
301
+ const content = `${ordered.map((entry) => entry.source_text.trim()).join('\n\n')}\n`;
302
+ writeFileSync(absolutePath, content);
303
+ files.push(filePath);
304
+ }
305
+
306
+ return {
307
+ output_root: outputRoot,
308
+ file_count: files.length,
309
+ files: files.sort(),
310
+ };
311
+ }