trellis 2.0.13 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +1 -1
- package/dist/embeddings/index.js +1 -1
- package/dist/{index-7gvjxt27.js → index-2917tjd8.js} +1 -1
- package/package.json +2 -10
- package/dist/transformers.node-bx3q9d7k.js +0 -33130
- package/src/cli/index.ts +0 -3356
- package/src/core/agents/harness.ts +0 -380
- package/src/core/agents/index.ts +0 -18
- package/src/core/agents/types.ts +0 -90
- package/src/core/index.ts +0 -118
- package/src/core/kernel/middleware.ts +0 -44
- package/src/core/kernel/trellis-kernel.ts +0 -593
- package/src/core/ontology/builtins.ts +0 -248
- package/src/core/ontology/index.ts +0 -34
- package/src/core/ontology/registry.ts +0 -209
- package/src/core/ontology/types.ts +0 -124
- package/src/core/ontology/validator.ts +0 -382
- package/src/core/persist/backend.ts +0 -74
- package/src/core/persist/sqlite-backend.ts +0 -298
- package/src/core/plugins/index.ts +0 -17
- package/src/core/plugins/registry.ts +0 -322
- package/src/core/plugins/types.ts +0 -126
- package/src/core/query/datalog.ts +0 -188
- package/src/core/query/engine.ts +0 -370
- package/src/core/query/index.ts +0 -34
- package/src/core/query/parser.ts +0 -481
- package/src/core/query/types.ts +0 -200
- package/src/core/store/eav-store.ts +0 -467
- package/src/decisions/auto-capture.ts +0 -136
- package/src/decisions/hooks.ts +0 -163
- package/src/decisions/index.ts +0 -261
- package/src/decisions/types.ts +0 -103
- package/src/embeddings/auto-embed.ts +0 -248
- package/src/embeddings/chunker.ts +0 -327
- package/src/embeddings/index.ts +0 -48
- package/src/embeddings/model.ts +0 -112
- package/src/embeddings/search.ts +0 -305
- package/src/embeddings/store.ts +0 -313
- package/src/embeddings/types.ts +0 -92
- package/src/engine.ts +0 -1125
- package/src/garden/cluster.ts +0 -330
- package/src/garden/garden.ts +0 -306
- package/src/garden/index.ts +0 -29
- package/src/git/git-exporter.ts +0 -286
- package/src/git/git-importer.ts +0 -329
- package/src/git/git-reader.ts +0 -189
- package/src/git/index.ts +0 -22
- package/src/identity/governance.ts +0 -211
- package/src/identity/identity.ts +0 -224
- package/src/identity/index.ts +0 -30
- package/src/identity/signing-middleware.ts +0 -97
- package/src/index.ts +0 -29
- package/src/links/index.ts +0 -49
- package/src/links/lifecycle.ts +0 -400
- package/src/links/parser.ts +0 -484
- package/src/links/ref-index.ts +0 -186
- package/src/links/resolver.ts +0 -314
- package/src/links/types.ts +0 -108
- package/src/mcp/index.ts +0 -22
- package/src/mcp/server.ts +0 -1278
- package/src/semantic/csharp-parser.ts +0 -493
- package/src/semantic/go-parser.ts +0 -585
- package/src/semantic/index.ts +0 -34
- package/src/semantic/java-parser.ts +0 -456
- package/src/semantic/python-parser.ts +0 -659
- package/src/semantic/ruby-parser.ts +0 -446
- package/src/semantic/rust-parser.ts +0 -784
- package/src/semantic/semantic-merge.ts +0 -210
- package/src/semantic/ts-parser.ts +0 -681
- package/src/semantic/types.ts +0 -175
- package/src/sync/http-transport.ts +0 -144
- package/src/sync/index.ts +0 -43
- package/src/sync/memory-transport.ts +0 -66
- package/src/sync/multi-repo.ts +0 -200
- package/src/sync/reconciler.ts +0 -237
- package/src/sync/sync-engine.ts +0 -258
- package/src/sync/types.ts +0 -104
- package/src/sync/ws-transport.ts +0 -145
- package/src/ui/client.html +0 -695
- package/src/ui/server.ts +0 -419
- package/src/vcs/blob-store.ts +0 -124
- package/src/vcs/branch.ts +0 -150
- package/src/vcs/checkpoint.ts +0 -64
- package/src/vcs/decompose.ts +0 -469
- package/src/vcs/diff.ts +0 -409
- package/src/vcs/engine-context.ts +0 -26
- package/src/vcs/index.ts +0 -23
- package/src/vcs/issue.ts +0 -800
- package/src/vcs/merge.ts +0 -425
- package/src/vcs/milestone.ts +0 -124
- package/src/vcs/ops.ts +0 -59
- package/src/vcs/types.ts +0 -213
- package/src/vcs/vcs-middleware.ts +0 -81
- package/src/watcher/fs-watcher.ts +0 -255
- package/src/watcher/index.ts +0 -9
- package/src/watcher/ingestion.ts +0 -116
|
@@ -1,681 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* TypeScript/JavaScript Parser Adapter
|
|
3
|
-
*
|
|
4
|
-
* DESIGN.md §4.2 — Structural extraction of top-level declarations
|
|
5
|
-
* using regex-based parsing. This is a Tier 1 implementation that
|
|
6
|
-
* extracts functions, classes, interfaces, type aliases, enums,
|
|
7
|
-
* variables, imports, and exports without requiring tree-sitter.
|
|
8
|
-
*
|
|
9
|
-
* Tree-sitter can be swapped in later for full Tier 2 AST fidelity.
|
|
10
|
-
*/
|
|
11
|
-
|
|
12
|
-
import type {
|
|
13
|
-
ParserAdapter,
|
|
14
|
-
ParseResult,
|
|
15
|
-
ASTEntity,
|
|
16
|
-
ASTEntityKind,
|
|
17
|
-
ImportRelation,
|
|
18
|
-
ExportRelation,
|
|
19
|
-
SemanticPatch,
|
|
20
|
-
} from './types.js';
|
|
21
|
-
|
|
22
|
-
// ---------------------------------------------------------------------------
|
|
23
|
-
// Parser Adapter
|
|
24
|
-
// ---------------------------------------------------------------------------
|
|
25
|
-
|
|
26
|
-
export const typescriptParser: ParserAdapter = {
|
|
27
|
-
languages: ['typescript', 'javascript', 'tsx', 'jsx'],
|
|
28
|
-
|
|
29
|
-
parse(content: string, filePath: string): ParseResult {
|
|
30
|
-
const fileEntityId = `file:${filePath}`;
|
|
31
|
-
const language = detectLanguage(filePath);
|
|
32
|
-
|
|
33
|
-
return {
|
|
34
|
-
fileEntityId,
|
|
35
|
-
filePath,
|
|
36
|
-
language,
|
|
37
|
-
declarations: extractDeclarations(content, filePath),
|
|
38
|
-
imports: extractImports(content),
|
|
39
|
-
exports: extractExports(content),
|
|
40
|
-
};
|
|
41
|
-
},
|
|
42
|
-
|
|
43
|
-
diff(oldResult: ParseResult, newResult: ParseResult): SemanticPatch[] {
|
|
44
|
-
return computeSemanticDiff(oldResult, newResult);
|
|
45
|
-
},
|
|
46
|
-
};
|
|
47
|
-
|
|
48
|
-
// ---------------------------------------------------------------------------
|
|
49
|
-
// Language detection
|
|
50
|
-
// ---------------------------------------------------------------------------
|
|
51
|
-
|
|
52
|
-
function detectLanguage(filePath: string): string {
|
|
53
|
-
if (filePath.endsWith('.tsx')) return 'tsx';
|
|
54
|
-
if (filePath.endsWith('.jsx')) return 'jsx';
|
|
55
|
-
if (filePath.endsWith('.ts')) return 'typescript';
|
|
56
|
-
if (filePath.endsWith('.js') || filePath.endsWith('.mjs') || filePath.endsWith('.cjs'))
|
|
57
|
-
return 'javascript';
|
|
58
|
-
return 'unknown';
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
// ---------------------------------------------------------------------------
|
|
62
|
-
// Declaration extraction
|
|
63
|
-
// ---------------------------------------------------------------------------
|
|
64
|
-
|
|
65
|
-
/**
|
|
66
|
-
* Extract top-level declarations from TypeScript/JavaScript source.
|
|
67
|
-
* Uses regex patterns to identify structural boundaries.
|
|
68
|
-
*/
|
|
69
|
-
function extractDeclarations(content: string, filePath: string): ASTEntity[] {
|
|
70
|
-
const declarations: ASTEntity[] = [];
|
|
71
|
-
const lines = content.split('\n');
|
|
72
|
-
|
|
73
|
-
let i = 0;
|
|
74
|
-
while (i < lines.length) {
|
|
75
|
-
const line = lines[i];
|
|
76
|
-
const trimmed = line.trim();
|
|
77
|
-
|
|
78
|
-
// Skip empty lines, comments, imports, exports (handled separately)
|
|
79
|
-
if (
|
|
80
|
-
!trimmed ||
|
|
81
|
-
trimmed.startsWith('//') ||
|
|
82
|
-
trimmed.startsWith('/*') ||
|
|
83
|
-
trimmed.startsWith('*') ||
|
|
84
|
-
trimmed.startsWith('import ') ||
|
|
85
|
-
trimmed.startsWith('export default ') ||
|
|
86
|
-
(trimmed.startsWith('export {') && !trimmed.includes('class') && !trimmed.includes('function'))
|
|
87
|
-
) {
|
|
88
|
-
i++;
|
|
89
|
-
continue;
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
// Strip leading export/declare/async/abstract keywords for detection
|
|
93
|
-
const stripped = trimmed
|
|
94
|
-
.replace(/^export\s+/, '')
|
|
95
|
-
.replace(/^declare\s+/, '')
|
|
96
|
-
.replace(/^abstract\s+/, '')
|
|
97
|
-
.replace(/^async\s+/, '');
|
|
98
|
-
|
|
99
|
-
const result = tryExtractDeclaration(stripped, trimmed, lines, i, filePath);
|
|
100
|
-
if (result) {
|
|
101
|
-
declarations.push(result.entity);
|
|
102
|
-
i = result.endLine + 1;
|
|
103
|
-
} else {
|
|
104
|
-
i++;
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
return declarations;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
interface ExtractionResult {
|
|
112
|
-
entity: ASTEntity;
|
|
113
|
-
endLine: number;
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
function tryExtractDeclaration(
|
|
117
|
-
stripped: string,
|
|
118
|
-
originalLine: string,
|
|
119
|
-
lines: string[],
|
|
120
|
-
startLine: number,
|
|
121
|
-
filePath: string,
|
|
122
|
-
): ExtractionResult | null {
|
|
123
|
-
// Function declaration
|
|
124
|
-
let match = stripped.match(/^function\s+(\w+)/);
|
|
125
|
-
if (match) {
|
|
126
|
-
return extractBlock(match[1], 'FunctionDef', lines, startLine, filePath);
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
// Class declaration
|
|
130
|
-
match = stripped.match(/^class\s+(\w+)/);
|
|
131
|
-
if (match) {
|
|
132
|
-
return extractBlock(match[1], 'ClassDef', lines, startLine, filePath);
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
// Interface declaration
|
|
136
|
-
match = stripped.match(/^interface\s+(\w+)/);
|
|
137
|
-
if (match) {
|
|
138
|
-
return extractBlock(match[1], 'InterfaceDef', lines, startLine, filePath);
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
// Type alias
|
|
142
|
-
match = stripped.match(/^type\s+(\w+)/);
|
|
143
|
-
if (match) {
|
|
144
|
-
return extractTypeAlias(match[1], lines, startLine, filePath);
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
// Enum declaration
|
|
148
|
-
match = stripped.match(/^enum\s+(\w+)/);
|
|
149
|
-
if (match) {
|
|
150
|
-
return extractBlock(match[1], 'EnumDef', lines, startLine, filePath);
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
// Variable declarations (const/let/var at top level)
|
|
154
|
-
match = stripped.match(/^(?:const|let|var)\s+(\w+)/);
|
|
155
|
-
if (match) {
|
|
156
|
-
return extractVariable(match[1], lines, startLine, filePath);
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
return null;
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
/**
|
|
163
|
-
* Extract a brace-delimited block (function, class, interface, enum).
|
|
164
|
-
*/
|
|
165
|
-
function extractBlock(
|
|
166
|
-
name: string,
|
|
167
|
-
kind: ASTEntityKind,
|
|
168
|
-
lines: string[],
|
|
169
|
-
startLine: number,
|
|
170
|
-
filePath: string,
|
|
171
|
-
): ExtractionResult {
|
|
172
|
-
let depth = 0;
|
|
173
|
-
let foundOpen = false;
|
|
174
|
-
let endLine = startLine;
|
|
175
|
-
|
|
176
|
-
for (let i = startLine; i < lines.length; i++) {
|
|
177
|
-
const line = lines[i];
|
|
178
|
-
for (const ch of line) {
|
|
179
|
-
if (ch === '{') {
|
|
180
|
-
depth++;
|
|
181
|
-
foundOpen = true;
|
|
182
|
-
} else if (ch === '}') {
|
|
183
|
-
depth--;
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
if (foundOpen && depth <= 0) {
|
|
188
|
-
endLine = i;
|
|
189
|
-
break;
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
// If we haven't found an opening brace and this is a one-liner
|
|
193
|
-
if (i > startLine + 50) {
|
|
194
|
-
endLine = i;
|
|
195
|
-
break;
|
|
196
|
-
}
|
|
197
|
-
endLine = i;
|
|
198
|
-
}
|
|
199
|
-
|
|
200
|
-
const rawText = lines.slice(startLine, endLine + 1).join('\n');
|
|
201
|
-
const startOffset = lines.slice(0, startLine).join('\n').length + (startLine > 0 ? 1 : 0);
|
|
202
|
-
const endOffset = startOffset + rawText.length;
|
|
203
|
-
|
|
204
|
-
const children = kind === 'ClassDef' || kind === 'InterfaceDef'
|
|
205
|
-
? extractClassMembers(rawText, name, filePath)
|
|
206
|
-
: [];
|
|
207
|
-
|
|
208
|
-
return {
|
|
209
|
-
entity: {
|
|
210
|
-
id: makeEntityId(filePath, kind, name),
|
|
211
|
-
kind,
|
|
212
|
-
name,
|
|
213
|
-
scopePath: name,
|
|
214
|
-
span: [startOffset, endOffset],
|
|
215
|
-
rawText,
|
|
216
|
-
signature: normalizeSignature(rawText),
|
|
217
|
-
children,
|
|
218
|
-
},
|
|
219
|
-
endLine,
|
|
220
|
-
};
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
/**
|
|
224
|
-
* Extract a type alias (may span multiple lines with = ... ;).
|
|
225
|
-
*/
|
|
226
|
-
function extractTypeAlias(
|
|
227
|
-
name: string,
|
|
228
|
-
lines: string[],
|
|
229
|
-
startLine: number,
|
|
230
|
-
filePath: string,
|
|
231
|
-
): ExtractionResult {
|
|
232
|
-
let endLine = startLine;
|
|
233
|
-
let depth = 0;
|
|
234
|
-
|
|
235
|
-
for (let i = startLine; i < lines.length; i++) {
|
|
236
|
-
const line = lines[i];
|
|
237
|
-
for (const ch of line) {
|
|
238
|
-
if (ch === '{' || ch === '(' || ch === '<') depth++;
|
|
239
|
-
else if (ch === '}' || ch === ')' || ch === '>') depth--;
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
if (line.includes(';') && depth <= 0) {
|
|
243
|
-
endLine = i;
|
|
244
|
-
break;
|
|
245
|
-
}
|
|
246
|
-
endLine = i;
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
const rawText = lines.slice(startLine, endLine + 1).join('\n');
|
|
250
|
-
const startOffset = lines.slice(0, startLine).join('\n').length + (startLine > 0 ? 1 : 0);
|
|
251
|
-
|
|
252
|
-
return {
|
|
253
|
-
entity: {
|
|
254
|
-
id: makeEntityId(filePath, 'TypeAlias', name),
|
|
255
|
-
kind: 'TypeAlias',
|
|
256
|
-
name,
|
|
257
|
-
scopePath: name,
|
|
258
|
-
span: [startOffset, startOffset + rawText.length],
|
|
259
|
-
rawText,
|
|
260
|
-
signature: normalizeSignature(rawText),
|
|
261
|
-
children: [],
|
|
262
|
-
},
|
|
263
|
-
endLine,
|
|
264
|
-
};
|
|
265
|
-
}
|
|
266
|
-
|
|
267
|
-
/**
|
|
268
|
-
* Extract a variable declaration.
|
|
269
|
-
*/
|
|
270
|
-
function extractVariable(
|
|
271
|
-
name: string,
|
|
272
|
-
lines: string[],
|
|
273
|
-
startLine: number,
|
|
274
|
-
filePath: string,
|
|
275
|
-
): ExtractionResult {
|
|
276
|
-
let endLine = startLine;
|
|
277
|
-
let depth = 0;
|
|
278
|
-
|
|
279
|
-
for (let i = startLine; i < lines.length; i++) {
|
|
280
|
-
const line = lines[i];
|
|
281
|
-
for (const ch of line) {
|
|
282
|
-
if (ch === '{' || ch === '(' || ch === '[') depth++;
|
|
283
|
-
else if (ch === '}' || ch === ')' || ch === ']') depth--;
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
if (depth <= 0 && (line.includes(';') || (i > startLine && lines[i + 1]?.trim().match(/^(?:export|const|let|var|function|class|interface|type|enum|import)\s/)))) {
|
|
287
|
-
endLine = i;
|
|
288
|
-
break;
|
|
289
|
-
}
|
|
290
|
-
endLine = i;
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
const rawText = lines.slice(startLine, endLine + 1).join('\n');
|
|
294
|
-
const startOffset = lines.slice(0, startLine).join('\n').length + (startLine > 0 ? 1 : 0);
|
|
295
|
-
|
|
296
|
-
return {
|
|
297
|
-
entity: {
|
|
298
|
-
id: makeEntityId(filePath, 'VariableDecl', name),
|
|
299
|
-
kind: 'VariableDecl',
|
|
300
|
-
name,
|
|
301
|
-
scopePath: name,
|
|
302
|
-
span: [startOffset, startOffset + rawText.length],
|
|
303
|
-
rawText,
|
|
304
|
-
signature: normalizeSignature(rawText),
|
|
305
|
-
children: [],
|
|
306
|
-
},
|
|
307
|
-
endLine,
|
|
308
|
-
};
|
|
309
|
-
}
|
|
310
|
-
|
|
311
|
-
/**
|
|
312
|
-
* Extract class/interface members as child entities.
|
|
313
|
-
*/
|
|
314
|
-
function extractClassMembers(
|
|
315
|
-
classText: string,
|
|
316
|
-
className: string,
|
|
317
|
-
filePath: string,
|
|
318
|
-
): ASTEntity[] {
|
|
319
|
-
const children: ASTEntity[] = [];
|
|
320
|
-
const lines = classText.split('\n');
|
|
321
|
-
|
|
322
|
-
// Skip the class opening line
|
|
323
|
-
for (let i = 1; i < lines.length - 1; i++) {
|
|
324
|
-
const line = lines[i].trim();
|
|
325
|
-
if (!line || line.startsWith('//') || line.startsWith('/*') || line.startsWith('*')) continue;
|
|
326
|
-
|
|
327
|
-
// Method
|
|
328
|
-
const methodMatch = line.match(
|
|
329
|
-
/^(?:(?:public|private|protected|static|async|abstract|readonly)\s+)*(\w+)\s*\(/,
|
|
330
|
-
);
|
|
331
|
-
if (methodMatch && methodMatch[1] !== 'if' && methodMatch[1] !== 'for' && methodMatch[1] !== 'while') {
|
|
332
|
-
const methodName = methodMatch[1];
|
|
333
|
-
const kind: ASTEntityKind = methodName === 'constructor' ? 'Constructor' : 'MethodDef';
|
|
334
|
-
children.push({
|
|
335
|
-
id: makeEntityId(filePath, kind, `${className}.${methodName}`),
|
|
336
|
-
kind,
|
|
337
|
-
name: methodName,
|
|
338
|
-
scopePath: `${className}.${methodName}`,
|
|
339
|
-
span: [0, 0], // Simplified for Tier 1
|
|
340
|
-
rawText: line,
|
|
341
|
-
signature: normalizeSignature(line),
|
|
342
|
-
children: [],
|
|
343
|
-
});
|
|
344
|
-
continue;
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
// Property
|
|
348
|
-
const propMatch = line.match(
|
|
349
|
-
/^(?:(?:public|private|protected|static|readonly)\s+)*(\w+)\s*[?:]/,
|
|
350
|
-
);
|
|
351
|
-
if (propMatch) {
|
|
352
|
-
const propName = propMatch[1];
|
|
353
|
-
children.push({
|
|
354
|
-
id: makeEntityId(filePath, 'PropertyDef', `${className}.${propName}`),
|
|
355
|
-
kind: 'PropertyDef',
|
|
356
|
-
name: propName,
|
|
357
|
-
scopePath: `${className}.${propName}`,
|
|
358
|
-
span: [0, 0],
|
|
359
|
-
rawText: line,
|
|
360
|
-
signature: normalizeSignature(line),
|
|
361
|
-
children: [],
|
|
362
|
-
});
|
|
363
|
-
}
|
|
364
|
-
}
|
|
365
|
-
|
|
366
|
-
return children;
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
// ---------------------------------------------------------------------------
|
|
370
|
-
// Import extraction
|
|
371
|
-
// ---------------------------------------------------------------------------
|
|
372
|
-
|
|
373
|
-
function extractImports(content: string): ImportRelation[] {
|
|
374
|
-
const imports: ImportRelation[] = [];
|
|
375
|
-
const lines = content.split('\n');
|
|
376
|
-
|
|
377
|
-
for (let i = 0; i < lines.length; i++) {
|
|
378
|
-
const line = lines[i].trim();
|
|
379
|
-
if (!line.startsWith('import ')) continue;
|
|
380
|
-
|
|
381
|
-
// Collect multi-line imports
|
|
382
|
-
let full = line;
|
|
383
|
-
while (!full.includes(';') && !full.match(/from\s+['"]/) && i + 1 < lines.length) {
|
|
384
|
-
i++;
|
|
385
|
-
full += ' ' + lines[i].trim();
|
|
386
|
-
}
|
|
387
|
-
if (!full.includes(';') && i + 1 < lines.length) {
|
|
388
|
-
i++;
|
|
389
|
-
full += ' ' + lines[i].trim();
|
|
390
|
-
}
|
|
391
|
-
|
|
392
|
-
const rel = parseImport(full);
|
|
393
|
-
if (rel) imports.push(rel);
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
return imports;
|
|
397
|
-
}
|
|
398
|
-
|
|
399
|
-
function parseImport(text: string): ImportRelation | null {
|
|
400
|
-
// import { a, b } from 'module'
|
|
401
|
-
const namedMatch = text.match(/import\s+\{([^}]+)\}\s+from\s+['"]([^'"]+)['"]/);
|
|
402
|
-
if (namedMatch) {
|
|
403
|
-
const specifiers = namedMatch[1].split(',').map((s) => s.trim()).filter(Boolean);
|
|
404
|
-
return {
|
|
405
|
-
source: namedMatch[2],
|
|
406
|
-
specifiers,
|
|
407
|
-
isDefault: false,
|
|
408
|
-
isNamespace: false,
|
|
409
|
-
rawText: text,
|
|
410
|
-
span: [0, text.length],
|
|
411
|
-
};
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
// import Default from 'module'
|
|
415
|
-
const defaultMatch = text.match(/import\s+(\w+)\s+from\s+['"]([^'"]+)['"]/);
|
|
416
|
-
if (defaultMatch) {
|
|
417
|
-
return {
|
|
418
|
-
source: defaultMatch[2],
|
|
419
|
-
specifiers: [defaultMatch[1]],
|
|
420
|
-
isDefault: true,
|
|
421
|
-
isNamespace: false,
|
|
422
|
-
rawText: text,
|
|
423
|
-
span: [0, text.length],
|
|
424
|
-
};
|
|
425
|
-
}
|
|
426
|
-
|
|
427
|
-
// import * as Name from 'module'
|
|
428
|
-
const nsMatch = text.match(/import\s+\*\s+as\s+(\w+)\s+from\s+['"]([^'"]+)['"]/);
|
|
429
|
-
if (nsMatch) {
|
|
430
|
-
return {
|
|
431
|
-
source: nsMatch[2],
|
|
432
|
-
specifiers: [nsMatch[1]],
|
|
433
|
-
isDefault: false,
|
|
434
|
-
isNamespace: true,
|
|
435
|
-
rawText: text,
|
|
436
|
-
span: [0, text.length],
|
|
437
|
-
};
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
// import 'module' (side-effect only)
|
|
441
|
-
const sideEffectMatch = text.match(/import\s+['"]([^'"]+)['"]/);
|
|
442
|
-
if (sideEffectMatch) {
|
|
443
|
-
return {
|
|
444
|
-
source: sideEffectMatch[1],
|
|
445
|
-
specifiers: [],
|
|
446
|
-
isDefault: false,
|
|
447
|
-
isNamespace: false,
|
|
448
|
-
rawText: text,
|
|
449
|
-
span: [0, text.length],
|
|
450
|
-
};
|
|
451
|
-
}
|
|
452
|
-
|
|
453
|
-
// import type { ... } from 'module'
|
|
454
|
-
const typeMatch = text.match(/import\s+type\s+\{([^}]+)\}\s+from\s+['"]([^'"]+)['"]/);
|
|
455
|
-
if (typeMatch) {
|
|
456
|
-
const specifiers = typeMatch[1].split(',').map((s) => s.trim()).filter(Boolean);
|
|
457
|
-
return {
|
|
458
|
-
source: typeMatch[2],
|
|
459
|
-
specifiers,
|
|
460
|
-
isDefault: false,
|
|
461
|
-
isNamespace: false,
|
|
462
|
-
rawText: text,
|
|
463
|
-
span: [0, text.length],
|
|
464
|
-
};
|
|
465
|
-
}
|
|
466
|
-
|
|
467
|
-
return null;
|
|
468
|
-
}
|
|
469
|
-
|
|
470
|
-
// ---------------------------------------------------------------------------
|
|
471
|
-
// Export extraction
|
|
472
|
-
// ---------------------------------------------------------------------------
|
|
473
|
-
|
|
474
|
-
function extractExports(content: string): ExportRelation[] {
|
|
475
|
-
const exports: ExportRelation[] = [];
|
|
476
|
-
const lines = content.split('\n');
|
|
477
|
-
|
|
478
|
-
for (const line of lines) {
|
|
479
|
-
const trimmed = line.trim();
|
|
480
|
-
|
|
481
|
-
// export default ...
|
|
482
|
-
if (trimmed.startsWith('export default ')) {
|
|
483
|
-
const nameMatch = trimmed.match(/export default (?:class|function)?\s*(\w+)?/);
|
|
484
|
-
exports.push({
|
|
485
|
-
name: nameMatch?.[1] ?? 'default',
|
|
486
|
-
isDefault: true,
|
|
487
|
-
rawText: trimmed,
|
|
488
|
-
span: [0, trimmed.length],
|
|
489
|
-
});
|
|
490
|
-
continue;
|
|
491
|
-
}
|
|
492
|
-
|
|
493
|
-
// export { ... } from '...' or export { ... }
|
|
494
|
-
const reExportMatch = trimmed.match(/export\s+\{([^}]+)\}(?:\s+from\s+['"]([^'"]+)['"])?/);
|
|
495
|
-
if (reExportMatch && !trimmed.match(/export\s+(?:function|class|interface|type|enum|const|let|var)/)) {
|
|
496
|
-
const names = reExportMatch[1].split(',').map((s) => s.trim().split(/\s+as\s+/).pop()!).filter(Boolean);
|
|
497
|
-
for (const name of names) {
|
|
498
|
-
exports.push({
|
|
499
|
-
name,
|
|
500
|
-
isDefault: false,
|
|
501
|
-
source: reExportMatch[2],
|
|
502
|
-
rawText: trimmed,
|
|
503
|
-
span: [0, trimmed.length],
|
|
504
|
-
});
|
|
505
|
-
}
|
|
506
|
-
continue;
|
|
507
|
-
}
|
|
508
|
-
|
|
509
|
-
// export * from '...'
|
|
510
|
-
const starMatch = trimmed.match(/export\s+\*\s+from\s+['"]([^'"]+)['"]/);
|
|
511
|
-
if (starMatch) {
|
|
512
|
-
exports.push({
|
|
513
|
-
name: '*',
|
|
514
|
-
isDefault: false,
|
|
515
|
-
source: starMatch[1],
|
|
516
|
-
rawText: trimmed,
|
|
517
|
-
span: [0, trimmed.length],
|
|
518
|
-
});
|
|
519
|
-
}
|
|
520
|
-
}
|
|
521
|
-
|
|
522
|
-
return exports;
|
|
523
|
-
}
|
|
524
|
-
|
|
525
|
-
// ---------------------------------------------------------------------------
|
|
526
|
-
// Semantic diff
|
|
527
|
-
// ---------------------------------------------------------------------------
|
|
528
|
-
|
|
529
|
-
function computeSemanticDiff(
|
|
530
|
-
oldResult: ParseResult,
|
|
531
|
-
newResult: ParseResult,
|
|
532
|
-
): SemanticPatch[] {
|
|
533
|
-
const patches: SemanticPatch[] = [];
|
|
534
|
-
const fileId = newResult.fileEntityId;
|
|
535
|
-
|
|
536
|
-
// Diff declarations
|
|
537
|
-
const oldDecls = new Map(oldResult.declarations.map((d) => [d.id, d]));
|
|
538
|
-
const newDecls = new Map(newResult.declarations.map((d) => [d.id, d]));
|
|
539
|
-
|
|
540
|
-
// Also index by name for rename detection
|
|
541
|
-
const oldByName = new Map(oldResult.declarations.map((d) => [d.name, d]));
|
|
542
|
-
const newByName = new Map(newResult.declarations.map((d) => [d.name, d]));
|
|
543
|
-
|
|
544
|
-
// Detect additions
|
|
545
|
-
for (const [id, entity] of newDecls) {
|
|
546
|
-
if (!oldDecls.has(id)) {
|
|
547
|
-
// Check if this is a rename (same signature, different name)
|
|
548
|
-
const oldEntity = findRenamedEntity(entity, oldResult.declarations, newDecls);
|
|
549
|
-
if (oldEntity) {
|
|
550
|
-
patches.push({
|
|
551
|
-
kind: 'symbolRename',
|
|
552
|
-
entityId: oldEntity.id,
|
|
553
|
-
oldName: oldEntity.name,
|
|
554
|
-
newName: entity.name,
|
|
555
|
-
});
|
|
556
|
-
} else {
|
|
557
|
-
patches.push({ kind: 'symbolAdd', entity });
|
|
558
|
-
}
|
|
559
|
-
}
|
|
560
|
-
}
|
|
561
|
-
|
|
562
|
-
// Detect removals
|
|
563
|
-
for (const [id, entity] of oldDecls) {
|
|
564
|
-
if (!newDecls.has(id)) {
|
|
565
|
-
// Skip if this was a rename (already handled above)
|
|
566
|
-
const wasRenamed = findRenamedEntity(entity, newResult.declarations, oldDecls);
|
|
567
|
-
if (!wasRenamed) {
|
|
568
|
-
patches.push({ kind: 'symbolRemove', entityId: id, entityName: entity.name });
|
|
569
|
-
}
|
|
570
|
-
}
|
|
571
|
-
}
|
|
572
|
-
|
|
573
|
-
// Detect modifications
|
|
574
|
-
for (const [id, newEntity] of newDecls) {
|
|
575
|
-
const oldEntity = oldDecls.get(id);
|
|
576
|
-
if (oldEntity && oldEntity.signature !== newEntity.signature) {
|
|
577
|
-
patches.push({
|
|
578
|
-
kind: 'symbolModify',
|
|
579
|
-
entityId: id,
|
|
580
|
-
entityName: newEntity.name,
|
|
581
|
-
oldSignature: oldEntity.signature,
|
|
582
|
-
newSignature: newEntity.signature,
|
|
583
|
-
oldRawText: oldEntity.rawText,
|
|
584
|
-
newRawText: newEntity.rawText,
|
|
585
|
-
});
|
|
586
|
-
}
|
|
587
|
-
}
|
|
588
|
-
|
|
589
|
-
// Diff imports
|
|
590
|
-
const oldImports = new Map(oldResult.imports.map((imp) => [imp.source, imp]));
|
|
591
|
-
const newImports = new Map(newResult.imports.map((imp) => [imp.source, imp]));
|
|
592
|
-
|
|
593
|
-
for (const [source, imp] of newImports) {
|
|
594
|
-
const oldImp = oldImports.get(source);
|
|
595
|
-
if (!oldImp) {
|
|
596
|
-
patches.push({
|
|
597
|
-
kind: 'importAdd',
|
|
598
|
-
fileId,
|
|
599
|
-
source,
|
|
600
|
-
specifiers: imp.specifiers,
|
|
601
|
-
rawText: imp.rawText,
|
|
602
|
-
});
|
|
603
|
-
} else if (JSON.stringify(oldImp.specifiers.sort()) !== JSON.stringify(imp.specifiers.sort())) {
|
|
604
|
-
patches.push({
|
|
605
|
-
kind: 'importModify',
|
|
606
|
-
fileId,
|
|
607
|
-
source,
|
|
608
|
-
oldSpecifiers: oldImp.specifiers,
|
|
609
|
-
newSpecifiers: imp.specifiers,
|
|
610
|
-
});
|
|
611
|
-
}
|
|
612
|
-
}
|
|
613
|
-
|
|
614
|
-
for (const [source] of oldImports) {
|
|
615
|
-
if (!newImports.has(source)) {
|
|
616
|
-
patches.push({ kind: 'importRemove', fileId, source });
|
|
617
|
-
}
|
|
618
|
-
}
|
|
619
|
-
|
|
620
|
-
// Diff exports
|
|
621
|
-
const oldExports = new Map(oldResult.exports.map((exp) => [exp.name, exp]));
|
|
622
|
-
const newExports = new Map(newResult.exports.map((exp) => [exp.name, exp]));
|
|
623
|
-
|
|
624
|
-
for (const [name, exp] of newExports) {
|
|
625
|
-
if (!oldExports.has(name)) {
|
|
626
|
-
patches.push({ kind: 'exportAdd', fileId, name, rawText: exp.rawText });
|
|
627
|
-
}
|
|
628
|
-
}
|
|
629
|
-
|
|
630
|
-
for (const [name] of oldExports) {
|
|
631
|
-
if (!newExports.has(name)) {
|
|
632
|
-
patches.push({ kind: 'exportRemove', fileId, name });
|
|
633
|
-
}
|
|
634
|
-
}
|
|
635
|
-
|
|
636
|
-
return patches;
|
|
637
|
-
}
|
|
638
|
-
|
|
639
|
-
/**
|
|
640
|
-
* Find a possible renamed entity: same kind and similar signature, different name.
|
|
641
|
-
*/
|
|
642
|
-
function findRenamedEntity(
|
|
643
|
-
entity: ASTEntity,
|
|
644
|
-
candidates: ASTEntity[],
|
|
645
|
-
existingIds: Map<string, ASTEntity>,
|
|
646
|
-
): ASTEntity | null {
|
|
647
|
-
for (const candidate of candidates) {
|
|
648
|
-
if (candidate.kind !== entity.kind) continue;
|
|
649
|
-
if (candidate.name === entity.name) continue;
|
|
650
|
-
if (existingIds.has(candidate.id)) continue; // Still exists — not a rename
|
|
651
|
-
|
|
652
|
-
// Check signature similarity (replace name occurrences)
|
|
653
|
-
const normalizedOld = candidate.signature.replace(new RegExp(candidate.name, 'g'), '___');
|
|
654
|
-
const normalizedNew = entity.signature.replace(new RegExp(entity.name, 'g'), '___');
|
|
655
|
-
if (normalizedOld === normalizedNew) {
|
|
656
|
-
return candidate;
|
|
657
|
-
}
|
|
658
|
-
}
|
|
659
|
-
return null;
|
|
660
|
-
}
|
|
661
|
-
|
|
662
|
-
// ---------------------------------------------------------------------------
|
|
663
|
-
// Helpers
|
|
664
|
-
// ---------------------------------------------------------------------------
|
|
665
|
-
|
|
666
|
-
function makeEntityId(filePath: string, kind: string, name: string): string {
|
|
667
|
-
return `${kind}:${filePath}:${name}`;
|
|
668
|
-
}
|
|
669
|
-
|
|
670
|
-
/**
|
|
671
|
-
* Normalize a code snippet to a structural signature:
|
|
672
|
-
* strip comments, collapse whitespace, remove trailing semicolons.
|
|
673
|
-
*/
|
|
674
|
-
function normalizeSignature(text: string): string {
|
|
675
|
-
return text
|
|
676
|
-
.replace(/\/\/[^\n]*/g, '') // line comments
|
|
677
|
-
.replace(/\/\*[\s\S]*?\*\//g, '') // block comments
|
|
678
|
-
.replace(/\s+/g, ' ') // collapse whitespace
|
|
679
|
-
.replace(/;\s*$/, '') // trailing semicolons
|
|
680
|
-
.trim();
|
|
681
|
-
}
|