switchman-dev 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,311 @@
1
+ import { execSync } from 'child_process';
2
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
3
+ import { join } from 'path';
4
+
5
+ const SOURCE_EXTENSIONS = new Set(['.js', '.mjs', '.cjs', '.ts', '.tsx', '.jsx']);
6
+
7
+ const SUBSYSTEM_PATTERNS = [
8
+ { key: 'auth', regex: /(^|\/)(auth|login|session|permissions?|rbac|acl)(\/|$)/i },
9
+ { key: 'schema', regex: /(^|\/)(schema|migrations?|db|database|sql)(\/|$)|schema\./i },
10
+ { key: 'config', regex: /(^|\/)(config|configs|settings)(\/|$)|(^|\/)(package\.json|pnpm-lock\.yaml|package-lock\.json|yarn\.lock|tsconfig.*|vite\.config.*|webpack\.config.*)$/i },
11
+ { key: 'api', regex: /(^|\/)(api|routes?|controllers?)(\/|$)/i },
12
+ { key: 'payments', regex: /(^|\/)(payments?|billing|invoice|checkout|subscription)(\/|$)/i },
13
+ { key: 'ui', regex: /(^|\/)(components?|ui|pages?)(\/|$)/i },
14
+ ];
15
+
16
+ function uniq(values) {
17
+ return [...new Set(values)];
18
+ }
19
+
20
+ function isSourceLikePath(filePath) {
21
+ return [...SOURCE_EXTENSIONS].some((ext) => filePath.endsWith(ext));
22
+ }
23
+
24
+ function classifySubsystems(filePath) {
25
+ const tags = SUBSYSTEM_PATTERNS.filter((pattern) => pattern.regex.test(filePath)).map((pattern) => pattern.key);
26
+ return tags.length > 0 ? tags : ['general'];
27
+ }
28
+
29
+ function areaForPath(filePath) {
30
+ const parts = String(filePath || '').split('/').filter(Boolean);
31
+ if (parts.length === 0) return 'repo';
32
+ if (parts.length === 1) return parts[0];
33
+ if (['src', 'app', 'lib', 'server', 'client', 'tests', 'test', 'spec', 'specs'].includes(parts[0])) {
34
+ return `${parts[0]}/${parts[1]}`;
35
+ }
36
+ return parts[0];
37
+ }
38
+
39
+ function extractExports(content) {
40
+ const objects = [];
41
+ const patterns = [
42
+ { kind: 'function', regex: /export\s+(?:async\s+)?function\s+([A-Za-z_$][\w$]*)/g },
43
+ { kind: 'class', regex: /export\s+class\s+([A-Za-z_$][\w$]*)/g },
44
+ { kind: 'const', regex: /export\s+(?:const|let|var)\s+([A-Za-z_$][\w$]*)/g },
45
+ { kind: 'type', regex: /export\s+type\s+([A-Za-z_$][\w$]*)/g },
46
+ { kind: 'interface', regex: /export\s+interface\s+([A-Za-z_$][\w$]*)/g },
47
+ { kind: 'enum', regex: /export\s+enum\s+([A-Za-z_$][\w$]*)/g },
48
+ ];
49
+
50
+ for (const pattern of patterns) {
51
+ for (const match of content.matchAll(pattern.regex)) {
52
+ objects.push({ kind: pattern.kind, name: match[1] });
53
+ }
54
+ }
55
+
56
+ if (/export\s+default\s+/.test(content)) {
57
+ objects.push({ kind: 'default', name: 'default' });
58
+ }
59
+
60
+ return uniq(objects.map((item) => `${item.kind}:${item.name}`)).map((key) => {
61
+ const [kind, name] = key.split(':');
62
+ return { kind, name };
63
+ });
64
+ }
65
+
66
+ function extractExportBlocks(content) {
67
+ const lines = String(content || '').split('\n');
68
+ const blocks = [];
69
+ for (let i = 0; i < lines.length; i++) {
70
+ const line = lines[i];
71
+ if (!line.trim().startsWith('export ')) continue;
72
+
73
+ const blockLines = [line];
74
+ let braceDepth = (line.match(/{/g) || []).length - (line.match(/}/g) || []).length;
75
+ let needsTerminator = !/[;}]$/.test(line.trim());
76
+
77
+ while (i + 1 < lines.length && (braceDepth > 0 || needsTerminator)) {
78
+ i += 1;
79
+ const nextLine = lines[i];
80
+ blockLines.push(nextLine);
81
+ braceDepth += (nextLine.match(/{/g) || []).length - (nextLine.match(/}/g) || []).length;
82
+ if (braceDepth <= 0 && /[;}]$/.test(nextLine.trim())) {
83
+ needsTerminator = false;
84
+ }
85
+ }
86
+
87
+ blocks.push(blockLines.join('\n').trim());
88
+ }
89
+
90
+ return blocks.filter(Boolean);
91
+ }
92
+
93
+ function parseFileObjects(repoPath, filePath) {
94
+ const absolutePath = join(repoPath, filePath);
95
+ if (!existsSync(absolutePath) || !isSourceLikePath(filePath)) return [];
96
+
97
+ const content = readFileSync(absolutePath, 'utf8');
98
+ const exports = extractExports(content);
99
+ const exportBlocks = extractExportBlocks(content);
100
+ const subsystemTags = classifySubsystems(filePath);
101
+ const area = areaForPath(filePath);
102
+
103
+ return exports.map((entry, index) => ({
104
+ object_id: `${filePath}#${entry.kind}:${entry.name}`,
105
+ file_path: filePath,
106
+ kind: entry.kind,
107
+ name: entry.name,
108
+ area,
109
+ subsystem_tags: subsystemTags,
110
+ source_text: exportBlocks[index] || `export ${entry.kind} ${entry.name}`,
111
+ }));
112
+ }
113
+
114
+ function trackedFiles(repoPath) {
115
+ try {
116
+ const output = execSync('git ls-files', {
117
+ cwd: repoPath,
118
+ encoding: 'utf8',
119
+ stdio: ['pipe', 'pipe', 'pipe'],
120
+ }).trim();
121
+ return output.split('\n').filter(Boolean);
122
+ } catch {
123
+ return [];
124
+ }
125
+ }
126
+
127
+ export function buildSemanticIndexForPath(repoPath, filePaths = null) {
128
+ const files = filePaths || trackedFiles(repoPath);
129
+ const objects = files
130
+ .filter(isSourceLikePath)
131
+ .flatMap((filePath) => parseFileObjects(repoPath, filePath))
132
+ .sort((left, right) =>
133
+ left.file_path.localeCompare(right.file_path)
134
+ || left.kind.localeCompare(right.kind)
135
+ || left.name.localeCompare(right.name)
136
+ );
137
+
138
+ return {
139
+ generated_at: new Date().toISOString(),
140
+ object_count: objects.length,
141
+ objects: objects.map(({ source_text, ...object }) => object),
142
+ };
143
+ }
144
+
145
+ export function detectSemanticConflicts(semanticIndexes = []) {
146
+ const conflicts = [];
147
+
148
+ for (let i = 0; i < semanticIndexes.length; i++) {
149
+ for (let j = i + 1; j < semanticIndexes.length; j++) {
150
+ const left = semanticIndexes[i];
151
+ const right = semanticIndexes[j];
152
+ const rightByName = new Map();
153
+ for (const object of right.objects) {
154
+ if (!rightByName.has(object.name)) rightByName.set(object.name, []);
155
+ rightByName.get(object.name).push(object);
156
+ }
157
+
158
+ for (const leftObject of left.objects) {
159
+ const matching = rightByName.get(leftObject.name) || [];
160
+ for (const rightObject of matching) {
161
+ if (leftObject.object_id === rightObject.object_id) {
162
+ conflicts.push({
163
+ type: 'semantic_object_overlap',
164
+ severity: 'blocked',
165
+ worktreeA: left.worktree,
166
+ worktreeB: right.worktree,
167
+ object_name: leftObject.name,
168
+ object_kind: leftObject.kind,
169
+ fileA: leftObject.file_path,
170
+ fileB: rightObject.file_path,
171
+ area: leftObject.area,
172
+ });
173
+ continue;
174
+ }
175
+
176
+ const sharedSubsystems = leftObject.subsystem_tags.filter((tag) => rightObject.subsystem_tags.includes(tag));
177
+ if (sharedSubsystems.length > 0 || leftObject.area === rightObject.area) {
178
+ conflicts.push({
179
+ type: 'semantic_name_overlap',
180
+ severity: 'warn',
181
+ worktreeA: left.worktree,
182
+ worktreeB: right.worktree,
183
+ object_name: leftObject.name,
184
+ object_kind_a: leftObject.kind,
185
+ object_kind_b: rightObject.kind,
186
+ fileA: leftObject.file_path,
187
+ fileB: rightObject.file_path,
188
+ shared_subsystems: sharedSubsystems,
189
+ area: leftObject.area === rightObject.area ? leftObject.area : null,
190
+ });
191
+ }
192
+ }
193
+ }
194
+ }
195
+ }
196
+
197
+ return uniq(conflicts.map((item) => JSON.stringify(item))).map((item) => JSON.parse(item));
198
+ }
199
+
200
+ export function materializeSemanticIndex(repoRoot, { worktrees = [] } = {}) {
201
+ const semanticIndex = {
202
+ generated_at: new Date().toISOString(),
203
+ worktrees: worktrees
204
+ .map((worktree) => ({
205
+ worktree: worktree.name,
206
+ branch: worktree.branch || 'unknown',
207
+ index: buildSemanticIndexForPath(worktree.path),
208
+ }))
209
+ .sort((left, right) => left.worktree.localeCompare(right.worktree)),
210
+ };
211
+
212
+ const switchmanDir = join(repoRoot, '.switchman');
213
+ if (!existsSync(switchmanDir)) mkdirSync(switchmanDir, { recursive: true });
214
+ const outputPath = join(switchmanDir, 'semantic-index.json');
215
+ writeFileSync(outputPath, `${JSON.stringify(semanticIndex, null, 2)}\n`);
216
+ return {
217
+ output_path: outputPath,
218
+ semantic_index: semanticIndex,
219
+ };
220
+ }
221
+
222
+ function normalizeObjectRow(row) {
223
+ return {
224
+ ...row,
225
+ subsystem_tags: JSON.parse(row.subsystem_tags || '[]'),
226
+ };
227
+ }
228
+
229
+ export function importCodeObjectsToStore(db, repoRoot, { filePaths = null } = {}) {
230
+ const files = filePaths || trackedFiles(repoRoot);
231
+ const objects = files
232
+ .filter(isSourceLikePath)
233
+ .flatMap((filePath) => parseFileObjects(repoRoot, filePath));
234
+
235
+ const upsert = db.prepare(`
236
+ INSERT INTO code_objects (object_id, file_path, kind, name, source_text, subsystem_tags, area, updated_at)
237
+ VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))
238
+ ON CONFLICT(object_id) DO UPDATE SET
239
+ file_path=excluded.file_path,
240
+ kind=excluded.kind,
241
+ name=excluded.name,
242
+ source_text=excluded.source_text,
243
+ subsystem_tags=excluded.subsystem_tags,
244
+ area=excluded.area,
245
+ updated_at=datetime('now')
246
+ `);
247
+
248
+ for (const object of objects) {
249
+ upsert.run(
250
+ object.object_id,
251
+ object.file_path,
252
+ object.kind,
253
+ object.name,
254
+ object.source_text,
255
+ JSON.stringify(object.subsystem_tags || []),
256
+ object.area || null,
257
+ );
258
+ }
259
+
260
+ return listCodeObjects(db);
261
+ }
262
+
263
+ export function listCodeObjects(db, { filePath = null } = {}) {
264
+ const where = filePath ? 'WHERE file_path=?' : '';
265
+ const params = filePath ? [filePath] : [];
266
+ return db.prepare(`
267
+ SELECT *
268
+ FROM code_objects
269
+ ${where}
270
+ ORDER BY file_path ASC, kind ASC, name ASC
271
+ `).all(...params).map(normalizeObjectRow);
272
+ }
273
+
274
+ export function updateCodeObjectSource(db, objectId, sourceText) {
275
+ db.prepare(`
276
+ UPDATE code_objects
277
+ SET source_text=?,
278
+ updated_at=datetime('now')
279
+ WHERE object_id=?
280
+ `).run(sourceText, objectId);
281
+ const row = db.prepare(`SELECT * FROM code_objects WHERE object_id=?`).get(objectId);
282
+ return row ? normalizeObjectRow(row) : null;
283
+ }
284
+
285
+ export function materializeCodeObjects(db, repoRoot, { outputRoot = repoRoot } = {}) {
286
+ const objects = listCodeObjects(db);
287
+ const byFile = new Map();
288
+ for (const object of objects) {
289
+ if (!byFile.has(object.file_path)) byFile.set(object.file_path, []);
290
+ byFile.get(object.file_path).push(object);
291
+ }
292
+
293
+ const files = [];
294
+ for (const [filePath, entries] of byFile.entries()) {
295
+ const absolutePath = join(outputRoot, filePath);
296
+ const dirPath = absolutePath.split('/').slice(0, -1).join('/');
297
+ if (!existsSync(dirPath)) mkdirSync(dirPath, { recursive: true });
298
+ const ordered = entries
299
+ .slice()
300
+ .sort((left, right) => left.kind.localeCompare(right.kind) || left.name.localeCompare(right.name));
301
+ const content = `${ordered.map((entry) => entry.source_text.trim()).join('\n\n')}\n`;
302
+ writeFileSync(absolutePath, content);
303
+ files.push(filePath);
304
+ }
305
+
306
+ return {
307
+ output_root: outputRoot,
308
+ file_count: files.length,
309
+ files: files.sort(),
310
+ };
311
+ }