trellis 2.0.13 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +1 -1
- package/dist/embeddings/index.js +1 -1
- package/dist/{index-7gvjxt27.js → index-2917tjd8.js} +1 -1
- package/package.json +2 -10
- package/dist/transformers.node-bx3q9d7k.js +0 -33130
- package/src/cli/index.ts +0 -3356
- package/src/core/agents/harness.ts +0 -380
- package/src/core/agents/index.ts +0 -18
- package/src/core/agents/types.ts +0 -90
- package/src/core/index.ts +0 -118
- package/src/core/kernel/middleware.ts +0 -44
- package/src/core/kernel/trellis-kernel.ts +0 -593
- package/src/core/ontology/builtins.ts +0 -248
- package/src/core/ontology/index.ts +0 -34
- package/src/core/ontology/registry.ts +0 -209
- package/src/core/ontology/types.ts +0 -124
- package/src/core/ontology/validator.ts +0 -382
- package/src/core/persist/backend.ts +0 -74
- package/src/core/persist/sqlite-backend.ts +0 -298
- package/src/core/plugins/index.ts +0 -17
- package/src/core/plugins/registry.ts +0 -322
- package/src/core/plugins/types.ts +0 -126
- package/src/core/query/datalog.ts +0 -188
- package/src/core/query/engine.ts +0 -370
- package/src/core/query/index.ts +0 -34
- package/src/core/query/parser.ts +0 -481
- package/src/core/query/types.ts +0 -200
- package/src/core/store/eav-store.ts +0 -467
- package/src/decisions/auto-capture.ts +0 -136
- package/src/decisions/hooks.ts +0 -163
- package/src/decisions/index.ts +0 -261
- package/src/decisions/types.ts +0 -103
- package/src/embeddings/auto-embed.ts +0 -248
- package/src/embeddings/chunker.ts +0 -327
- package/src/embeddings/index.ts +0 -48
- package/src/embeddings/model.ts +0 -112
- package/src/embeddings/search.ts +0 -305
- package/src/embeddings/store.ts +0 -313
- package/src/embeddings/types.ts +0 -92
- package/src/engine.ts +0 -1125
- package/src/garden/cluster.ts +0 -330
- package/src/garden/garden.ts +0 -306
- package/src/garden/index.ts +0 -29
- package/src/git/git-exporter.ts +0 -286
- package/src/git/git-importer.ts +0 -329
- package/src/git/git-reader.ts +0 -189
- package/src/git/index.ts +0 -22
- package/src/identity/governance.ts +0 -211
- package/src/identity/identity.ts +0 -224
- package/src/identity/index.ts +0 -30
- package/src/identity/signing-middleware.ts +0 -97
- package/src/index.ts +0 -29
- package/src/links/index.ts +0 -49
- package/src/links/lifecycle.ts +0 -400
- package/src/links/parser.ts +0 -484
- package/src/links/ref-index.ts +0 -186
- package/src/links/resolver.ts +0 -314
- package/src/links/types.ts +0 -108
- package/src/mcp/index.ts +0 -22
- package/src/mcp/server.ts +0 -1278
- package/src/semantic/csharp-parser.ts +0 -493
- package/src/semantic/go-parser.ts +0 -585
- package/src/semantic/index.ts +0 -34
- package/src/semantic/java-parser.ts +0 -456
- package/src/semantic/python-parser.ts +0 -659
- package/src/semantic/ruby-parser.ts +0 -446
- package/src/semantic/rust-parser.ts +0 -784
- package/src/semantic/semantic-merge.ts +0 -210
- package/src/semantic/ts-parser.ts +0 -681
- package/src/semantic/types.ts +0 -175
- package/src/sync/http-transport.ts +0 -144
- package/src/sync/index.ts +0 -43
- package/src/sync/memory-transport.ts +0 -66
- package/src/sync/multi-repo.ts +0 -200
- package/src/sync/reconciler.ts +0 -237
- package/src/sync/sync-engine.ts +0 -258
- package/src/sync/types.ts +0 -104
- package/src/sync/ws-transport.ts +0 -145
- package/src/ui/client.html +0 -695
- package/src/ui/server.ts +0 -419
- package/src/vcs/blob-store.ts +0 -124
- package/src/vcs/branch.ts +0 -150
- package/src/vcs/checkpoint.ts +0 -64
- package/src/vcs/decompose.ts +0 -469
- package/src/vcs/diff.ts +0 -409
- package/src/vcs/engine-context.ts +0 -26
- package/src/vcs/index.ts +0 -23
- package/src/vcs/issue.ts +0 -800
- package/src/vcs/merge.ts +0 -425
- package/src/vcs/milestone.ts +0 -124
- package/src/vcs/ops.ts +0 -59
- package/src/vcs/types.ts +0 -213
- package/src/vcs/vcs-middleware.ts +0 -81
- package/src/watcher/fs-watcher.ts +0 -255
- package/src/watcher/index.ts +0 -9
- package/src/watcher/ingestion.ts +0 -116
|
@@ -1,493 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* C# Parser Adapter
|
|
3
|
-
*
|
|
4
|
-
* Tier 1 regex-based parser for C# source files.
|
|
5
|
-
* Extracts classes, interfaces, structs, enums, records,
|
|
6
|
-
* methods, properties, fields, using directives, and namespace exports.
|
|
7
|
-
*
|
|
8
|
-
* @see TRL-10
|
|
9
|
-
*/
|
|
10
|
-
|
|
11
|
-
import type {
|
|
12
|
-
ParserAdapter,
|
|
13
|
-
ParseResult,
|
|
14
|
-
ASTEntity,
|
|
15
|
-
ASTEntityKind,
|
|
16
|
-
ImportRelation,
|
|
17
|
-
ExportRelation,
|
|
18
|
-
SemanticPatch,
|
|
19
|
-
} from './types.js';
|
|
20
|
-
|
|
21
|
-
// ---------------------------------------------------------------------------
|
|
22
|
-
// Parser Adapter
|
|
23
|
-
// ---------------------------------------------------------------------------
|
|
24
|
-
|
|
25
|
-
export const csharpParser: ParserAdapter = {
|
|
26
|
-
languages: ['csharp'],
|
|
27
|
-
|
|
28
|
-
parse(content: string, filePath: string): ParseResult {
|
|
29
|
-
const fileEntityId = `file:${filePath}`;
|
|
30
|
-
|
|
31
|
-
return {
|
|
32
|
-
fileEntityId,
|
|
33
|
-
filePath,
|
|
34
|
-
language: 'csharp',
|
|
35
|
-
declarations: extractDeclarations(content, filePath),
|
|
36
|
-
imports: extractImports(content),
|
|
37
|
-
exports: extractExports(content),
|
|
38
|
-
};
|
|
39
|
-
},
|
|
40
|
-
|
|
41
|
-
diff(oldResult: ParseResult, newResult: ParseResult): SemanticPatch[] {
|
|
42
|
-
return computeSemanticDiff(oldResult, newResult);
|
|
43
|
-
},
|
|
44
|
-
};
|
|
45
|
-
|
|
46
|
-
// ---------------------------------------------------------------------------
|
|
47
|
-
// Declaration extraction
|
|
48
|
-
// ---------------------------------------------------------------------------
|
|
49
|
-
|
|
50
|
-
const MODIFIERS_RE = /(?:(?:public|private|protected|internal|static|abstract|sealed|virtual|override|partial|readonly|async|extern|unsafe|volatile|new)\s+)*/;
|
|
51
|
-
const TYPE_DEF_RE = new RegExp(`^${MODIFIERS_RE.source}(class|interface|struct|enum|record)\\s+(\\w+)`);
|
|
52
|
-
const METHOD_RE = new RegExp(`^${MODIFIERS_RE.source}(?:\\w[\\w<>\\[\\],\\s?]*?)\\s+(\\w+)\\s*[(<]`);
|
|
53
|
-
const PROP_RE = new RegExp(`^${MODIFIERS_RE.source}(?:\\w[\\w<>\\[\\],\\s?]*?)\\s+(\\w+)\\s*\\{`);
|
|
54
|
-
const FIELD_RE = new RegExp(`^${MODIFIERS_RE.source}(?:\\w[\\w<>\\[\\],\\s?]*?)\\s+(\\w+)\\s*[;=]`);
|
|
55
|
-
|
|
56
|
-
function extractDeclarations(content: string, filePath: string): ASTEntity[] {
|
|
57
|
-
const declarations: ASTEntity[] = [];
|
|
58
|
-
const lines = content.split('\n');
|
|
59
|
-
|
|
60
|
-
let i = 0;
|
|
61
|
-
while (i < lines.length) {
|
|
62
|
-
const trimmed = lines[i].trim();
|
|
63
|
-
|
|
64
|
-
// Skip empty, comments, using, namespace
|
|
65
|
-
if (
|
|
66
|
-
!trimmed ||
|
|
67
|
-
trimmed.startsWith('//') ||
|
|
68
|
-
trimmed.startsWith('/*') ||
|
|
69
|
-
trimmed.startsWith('*') ||
|
|
70
|
-
trimmed.startsWith('using ') ||
|
|
71
|
-
trimmed.startsWith('namespace ')
|
|
72
|
-
) {
|
|
73
|
-
i++;
|
|
74
|
-
continue;
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
// Collect attributes
|
|
78
|
-
const attributes: string[] = [];
|
|
79
|
-
const attrStart = i;
|
|
80
|
-
while (i < lines.length && lines[i].trim().startsWith('[')) {
|
|
81
|
-
attributes.push(lines[i].trim());
|
|
82
|
-
i++;
|
|
83
|
-
}
|
|
84
|
-
if (i >= lines.length) break;
|
|
85
|
-
|
|
86
|
-
const declLine = lines[i].trim();
|
|
87
|
-
|
|
88
|
-
// Class/Interface/Struct/Enum/Record
|
|
89
|
-
const typeMatch = declLine.match(TYPE_DEF_RE);
|
|
90
|
-
if (typeMatch) {
|
|
91
|
-
const typeKind = typeMatch[1];
|
|
92
|
-
const name = typeMatch[2];
|
|
93
|
-
const kind: ASTEntityKind =
|
|
94
|
-
typeKind === 'interface' ? 'InterfaceDef' :
|
|
95
|
-
typeKind === 'enum' ? 'EnumDef' :
|
|
96
|
-
'ClassDef';
|
|
97
|
-
const result = extractBraceBlock(
|
|
98
|
-
name, kind, lines,
|
|
99
|
-
attributes.length > 0 ? attrStart : i,
|
|
100
|
-
i, filePath,
|
|
101
|
-
);
|
|
102
|
-
result.entity.children = extractTypeMembers(lines, i, result.endLine, name, filePath);
|
|
103
|
-
declarations.push(result.entity);
|
|
104
|
-
i = result.endLine + 1;
|
|
105
|
-
continue;
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
// If attributes collected but no match, skip
|
|
109
|
-
if (attributes.length > 0) {
|
|
110
|
-
i++;
|
|
111
|
-
continue;
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
i++;
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
return declarations;
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
// ---------------------------------------------------------------------------
|
|
121
|
-
// Block extraction
|
|
122
|
-
// ---------------------------------------------------------------------------
|
|
123
|
-
|
|
124
|
-
interface ExtractionResult {
|
|
125
|
-
entity: ASTEntity;
|
|
126
|
-
endLine: number;
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
function extractBraceBlock(
|
|
130
|
-
name: string,
|
|
131
|
-
kind: ASTEntityKind,
|
|
132
|
-
lines: string[],
|
|
133
|
-
startLine: number,
|
|
134
|
-
defLine: number,
|
|
135
|
-
filePath: string,
|
|
136
|
-
): ExtractionResult {
|
|
137
|
-
let depth = 0;
|
|
138
|
-
let foundOpen = false;
|
|
139
|
-
let endLine = defLine;
|
|
140
|
-
|
|
141
|
-
for (let i = defLine; i < lines.length; i++) {
|
|
142
|
-
for (const ch of lines[i]) {
|
|
143
|
-
if (ch === '{') { depth++; foundOpen = true; }
|
|
144
|
-
else if (ch === '}') depth--;
|
|
145
|
-
}
|
|
146
|
-
if (foundOpen && depth <= 0) { endLine = i; break; }
|
|
147
|
-
if (i > defLine + 500) { endLine = i; break; }
|
|
148
|
-
endLine = i;
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
const rawText = lines.slice(startLine, endLine + 1).join('\n');
|
|
152
|
-
const startOffset = lines.slice(0, startLine).join('\n').length + (startLine > 0 ? 1 : 0);
|
|
153
|
-
|
|
154
|
-
return {
|
|
155
|
-
entity: {
|
|
156
|
-
id: makeEntityId(filePath, kind, name),
|
|
157
|
-
kind,
|
|
158
|
-
name,
|
|
159
|
-
scopePath: name,
|
|
160
|
-
span: [startOffset, startOffset + rawText.length],
|
|
161
|
-
rawText,
|
|
162
|
-
signature: normalizeSignature(rawText),
|
|
163
|
-
children: [],
|
|
164
|
-
},
|
|
165
|
-
endLine,
|
|
166
|
-
};
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
// ---------------------------------------------------------------------------
|
|
170
|
-
// Type member extraction
|
|
171
|
-
// ---------------------------------------------------------------------------
|
|
172
|
-
|
|
173
|
-
const CONTROL_FLOW = new Set(['if', 'for', 'foreach', 'while', 'switch', 'catch', 'using', 'lock', 'try', 'else', 'return', 'throw', 'new', 'yield', 'await']);
|
|
174
|
-
|
|
175
|
-
function extractTypeMembers(
|
|
176
|
-
lines: string[],
|
|
177
|
-
startLine: number,
|
|
178
|
-
endLine: number,
|
|
179
|
-
parentName: string,
|
|
180
|
-
filePath: string,
|
|
181
|
-
): ASTEntity[] {
|
|
182
|
-
const children: ASTEntity[] = [];
|
|
183
|
-
let depth = 0;
|
|
184
|
-
|
|
185
|
-
for (let i = startLine; i <= endLine; i++) {
|
|
186
|
-
const line = lines[i];
|
|
187
|
-
const depthBefore = depth;
|
|
188
|
-
|
|
189
|
-
for (const ch of line) {
|
|
190
|
-
if (ch === '{') depth++;
|
|
191
|
-
else if (ch === '}') depth--;
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
if (depthBefore !== 1) continue;
|
|
195
|
-
|
|
196
|
-
const trimmed = line.trim();
|
|
197
|
-
if (!trimmed || trimmed.startsWith('//') || trimmed.startsWith('/*') || trimmed.startsWith('*') || trimmed.startsWith('[')) continue;
|
|
198
|
-
|
|
199
|
-
// Nested type
|
|
200
|
-
const nestedMatch = trimmed.match(TYPE_DEF_RE);
|
|
201
|
-
if (nestedMatch) {
|
|
202
|
-
const typeKind = nestedMatch[1];
|
|
203
|
-
const name = nestedMatch[2];
|
|
204
|
-
const kind: ASTEntityKind =
|
|
205
|
-
typeKind === 'interface' ? 'InterfaceDef' :
|
|
206
|
-
typeKind === 'enum' ? 'EnumDef' :
|
|
207
|
-
'ClassDef';
|
|
208
|
-
children.push({
|
|
209
|
-
id: makeEntityId(filePath, kind, `${parentName}.${name}`),
|
|
210
|
-
kind,
|
|
211
|
-
name,
|
|
212
|
-
scopePath: `${parentName}.${name}`,
|
|
213
|
-
span: [0, 0],
|
|
214
|
-
rawText: trimmed,
|
|
215
|
-
signature: normalizeSignature(trimmed),
|
|
216
|
-
children: [],
|
|
217
|
-
});
|
|
218
|
-
continue;
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
// Constructor: ClassName(
|
|
222
|
-
const ctorMatch = trimmed.match(new RegExp(`^${MODIFIERS_RE.source}${parentName}\\s*\\(`));
|
|
223
|
-
if (ctorMatch) {
|
|
224
|
-
children.push({
|
|
225
|
-
id: makeEntityId(filePath, 'Constructor', `${parentName}.${parentName}`),
|
|
226
|
-
kind: 'Constructor',
|
|
227
|
-
name: parentName,
|
|
228
|
-
scopePath: `${parentName}.${parentName}`,
|
|
229
|
-
span: [0, 0],
|
|
230
|
-
rawText: trimmed,
|
|
231
|
-
signature: normalizeSignature(trimmed),
|
|
232
|
-
children: [],
|
|
233
|
-
});
|
|
234
|
-
continue;
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
// Property: type Name { get; set; } or type Name {
|
|
238
|
-
const propMatch = trimmed.match(PROP_RE);
|
|
239
|
-
if (propMatch) {
|
|
240
|
-
const name = propMatch[1];
|
|
241
|
-
if (!CONTROL_FLOW.has(name)) {
|
|
242
|
-
// Check if it looks like a property (has get/set or => or the { is property accessor)
|
|
243
|
-
const restOfLine = trimmed.slice(trimmed.indexOf(name) + name.length).trim();
|
|
244
|
-
if (restOfLine.startsWith('{') && (
|
|
245
|
-
restOfLine.includes('get') || restOfLine.includes('set') ||
|
|
246
|
-
restOfLine.includes('=>') || !restOfLine.includes('(')
|
|
247
|
-
)) {
|
|
248
|
-
children.push({
|
|
249
|
-
id: makeEntityId(filePath, 'PropertyDef', `${parentName}.${name}`),
|
|
250
|
-
kind: 'PropertyDef',
|
|
251
|
-
name,
|
|
252
|
-
scopePath: `${parentName}.${name}`,
|
|
253
|
-
span: [0, 0],
|
|
254
|
-
rawText: trimmed,
|
|
255
|
-
signature: normalizeSignature(trimmed),
|
|
256
|
-
children: [],
|
|
257
|
-
});
|
|
258
|
-
continue;
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
// Method: returnType Name( or returnType Name<T>(
|
|
264
|
-
const methodMatch = trimmed.match(METHOD_RE);
|
|
265
|
-
if (methodMatch) {
|
|
266
|
-
const name = methodMatch[1];
|
|
267
|
-
if (!CONTROL_FLOW.has(name) && name !== parentName) {
|
|
268
|
-
children.push({
|
|
269
|
-
id: makeEntityId(filePath, 'MethodDef', `${parentName}.${name}`),
|
|
270
|
-
kind: 'MethodDef',
|
|
271
|
-
name,
|
|
272
|
-
scopePath: `${parentName}.${name}`,
|
|
273
|
-
span: [0, 0],
|
|
274
|
-
rawText: trimmed,
|
|
275
|
-
signature: normalizeSignature(trimmed),
|
|
276
|
-
children: [],
|
|
277
|
-
});
|
|
278
|
-
continue;
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
|
|
282
|
-
// Field: type name ; or type name =
|
|
283
|
-
const fieldMatch = trimmed.match(FIELD_RE);
|
|
284
|
-
if (fieldMatch) {
|
|
285
|
-
const name = fieldMatch[1];
|
|
286
|
-
if (!CONTROL_FLOW.has(name) && name !== 'var') {
|
|
287
|
-
children.push({
|
|
288
|
-
id: makeEntityId(filePath, 'PropertyDef', `${parentName}.${name}`),
|
|
289
|
-
kind: 'PropertyDef',
|
|
290
|
-
name,
|
|
291
|
-
scopePath: `${parentName}.${name}`,
|
|
292
|
-
span: [0, 0],
|
|
293
|
-
rawText: trimmed,
|
|
294
|
-
signature: normalizeSignature(trimmed),
|
|
295
|
-
children: [],
|
|
296
|
-
});
|
|
297
|
-
}
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
return children;
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
// ---------------------------------------------------------------------------
|
|
304
|
-
// Import extraction (using directives)
|
|
305
|
-
// ---------------------------------------------------------------------------
|
|
306
|
-
|
|
307
|
-
function extractImports(content: string): ImportRelation[] {
|
|
308
|
-
const imports: ImportRelation[] = [];
|
|
309
|
-
const lines = content.split('\n');
|
|
310
|
-
|
|
311
|
-
for (const line of lines) {
|
|
312
|
-
const trimmed = line.trim();
|
|
313
|
-
|
|
314
|
-
// using Namespace;
|
|
315
|
-
let match = trimmed.match(/^using\s+([A-Z][\w.]+)\s*;/);
|
|
316
|
-
if (match) {
|
|
317
|
-
imports.push({
|
|
318
|
-
source: match[1],
|
|
319
|
-
specifiers: [match[1].split('.').pop()!],
|
|
320
|
-
isDefault: false,
|
|
321
|
-
isNamespace: true,
|
|
322
|
-
rawText: trimmed,
|
|
323
|
-
span: [0, trimmed.length],
|
|
324
|
-
});
|
|
325
|
-
continue;
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
// using static Namespace.Class;
|
|
329
|
-
match = trimmed.match(/^using\s+static\s+([A-Z][\w.]+)\s*;/);
|
|
330
|
-
if (match) {
|
|
331
|
-
imports.push({
|
|
332
|
-
source: match[1],
|
|
333
|
-
specifiers: [match[1].split('.').pop()!],
|
|
334
|
-
isDefault: false,
|
|
335
|
-
isNamespace: false,
|
|
336
|
-
rawText: trimmed,
|
|
337
|
-
span: [0, trimmed.length],
|
|
338
|
-
});
|
|
339
|
-
continue;
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
// using Alias = Namespace.Type;
|
|
343
|
-
match = trimmed.match(/^using\s+(\w+)\s*=\s*([^;]+);/);
|
|
344
|
-
if (match) {
|
|
345
|
-
imports.push({
|
|
346
|
-
source: match[2].trim(),
|
|
347
|
-
specifiers: [match[1]],
|
|
348
|
-
isDefault: true,
|
|
349
|
-
isNamespace: false,
|
|
350
|
-
rawText: trimmed,
|
|
351
|
-
span: [0, trimmed.length],
|
|
352
|
-
});
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
|
|
356
|
-
return imports;
|
|
357
|
-
}
|
|
358
|
-
|
|
359
|
-
// ---------------------------------------------------------------------------
|
|
360
|
-
// Export extraction
|
|
361
|
-
// ---------------------------------------------------------------------------
|
|
362
|
-
|
|
363
|
-
function extractExports(content: string): ExportRelation[] {
|
|
364
|
-
const exports: ExportRelation[] = [];
|
|
365
|
-
const lines = content.split('\n');
|
|
366
|
-
|
|
367
|
-
for (const line of lines) {
|
|
368
|
-
const trimmed = line.trim();
|
|
369
|
-
|
|
370
|
-
// Public types are exported
|
|
371
|
-
const match = trimmed.match(/^public\s+(?:(?:abstract|sealed|static|partial)\s+)*(class|interface|struct|enum|record)\s+(\w+)/);
|
|
372
|
-
if (match) {
|
|
373
|
-
exports.push({
|
|
374
|
-
name: match[2],
|
|
375
|
-
isDefault: false,
|
|
376
|
-
rawText: trimmed.split('{')[0].trim(),
|
|
377
|
-
span: [0, 0],
|
|
378
|
-
});
|
|
379
|
-
}
|
|
380
|
-
}
|
|
381
|
-
|
|
382
|
-
return exports;
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
// ---------------------------------------------------------------------------
|
|
386
|
-
// Semantic diff
|
|
387
|
-
// ---------------------------------------------------------------------------
|
|
388
|
-
|
|
389
|
-
function computeSemanticDiff(
|
|
390
|
-
oldResult: ParseResult,
|
|
391
|
-
newResult: ParseResult,
|
|
392
|
-
): SemanticPatch[] {
|
|
393
|
-
const patches: SemanticPatch[] = [];
|
|
394
|
-
const fileId = newResult.fileEntityId;
|
|
395
|
-
|
|
396
|
-
const oldDecls = new Map(oldResult.declarations.map(d => [d.id, d]));
|
|
397
|
-
const newDecls = new Map(newResult.declarations.map(d => [d.id, d]));
|
|
398
|
-
|
|
399
|
-
for (const [id, entity] of newDecls) {
|
|
400
|
-
if (!oldDecls.has(id)) {
|
|
401
|
-
const oldEntity = findRenamedEntity(entity, oldResult.declarations, newDecls);
|
|
402
|
-
if (oldEntity) {
|
|
403
|
-
patches.push({ kind: 'symbolRename', entityId: oldEntity.id, oldName: oldEntity.name, newName: entity.name });
|
|
404
|
-
} else {
|
|
405
|
-
patches.push({ kind: 'symbolAdd', entity });
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
}
|
|
409
|
-
|
|
410
|
-
for (const [id, entity] of oldDecls) {
|
|
411
|
-
if (!newDecls.has(id)) {
|
|
412
|
-
const wasRenamed = findRenamedEntity(entity, newResult.declarations, oldDecls);
|
|
413
|
-
if (!wasRenamed) {
|
|
414
|
-
patches.push({ kind: 'symbolRemove', entityId: id, entityName: entity.name });
|
|
415
|
-
}
|
|
416
|
-
}
|
|
417
|
-
}
|
|
418
|
-
|
|
419
|
-
for (const [id, newEntity] of newDecls) {
|
|
420
|
-
const oldEntity = oldDecls.get(id);
|
|
421
|
-
if (oldEntity && oldEntity.signature !== newEntity.signature) {
|
|
422
|
-
patches.push({
|
|
423
|
-
kind: 'symbolModify', entityId: id, entityName: newEntity.name,
|
|
424
|
-
oldSignature: oldEntity.signature, newSignature: newEntity.signature,
|
|
425
|
-
oldRawText: oldEntity.rawText, newRawText: newEntity.rawText,
|
|
426
|
-
});
|
|
427
|
-
}
|
|
428
|
-
}
|
|
429
|
-
|
|
430
|
-
const oldImports = new Map(oldResult.imports.map(imp => [imp.source, imp]));
|
|
431
|
-
const newImports = new Map(newResult.imports.map(imp => [imp.source, imp]));
|
|
432
|
-
|
|
433
|
-
for (const [source, imp] of newImports) {
|
|
434
|
-
const oldImp = oldImports.get(source);
|
|
435
|
-
if (!oldImp) {
|
|
436
|
-
patches.push({ kind: 'importAdd', fileId, source, specifiers: imp.specifiers, rawText: imp.rawText });
|
|
437
|
-
} else if (JSON.stringify(oldImp.specifiers.sort()) !== JSON.stringify(imp.specifiers.sort())) {
|
|
438
|
-
patches.push({ kind: 'importModify', fileId, source, oldSpecifiers: oldImp.specifiers, newSpecifiers: imp.specifiers });
|
|
439
|
-
}
|
|
440
|
-
}
|
|
441
|
-
for (const [source] of oldImports) {
|
|
442
|
-
if (!newImports.has(source)) {
|
|
443
|
-
patches.push({ kind: 'importRemove', fileId, source });
|
|
444
|
-
}
|
|
445
|
-
}
|
|
446
|
-
|
|
447
|
-
const oldExports = new Map(oldResult.exports.map(exp => [exp.name, exp]));
|
|
448
|
-
const newExports = new Map(newResult.exports.map(exp => [exp.name, exp]));
|
|
449
|
-
for (const [name, exp] of newExports) {
|
|
450
|
-
if (!oldExports.has(name)) {
|
|
451
|
-
patches.push({ kind: 'exportAdd', fileId, name, rawText: exp.rawText });
|
|
452
|
-
}
|
|
453
|
-
}
|
|
454
|
-
for (const [name] of oldExports) {
|
|
455
|
-
if (!newExports.has(name)) {
|
|
456
|
-
patches.push({ kind: 'exportRemove', fileId, name });
|
|
457
|
-
}
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
return patches;
|
|
461
|
-
}
|
|
462
|
-
|
|
463
|
-
// ---------------------------------------------------------------------------
|
|
464
|
-
// Helpers
|
|
465
|
-
// ---------------------------------------------------------------------------
|
|
466
|
-
|
|
467
|
-
function findRenamedEntity(
|
|
468
|
-
entity: ASTEntity,
|
|
469
|
-
candidates: ASTEntity[],
|
|
470
|
-
existingIds: Map<string, ASTEntity>,
|
|
471
|
-
): ASTEntity | null {
|
|
472
|
-
for (const candidate of candidates) {
|
|
473
|
-
if (candidate.kind !== entity.kind) continue;
|
|
474
|
-
if (candidate.name === entity.name) continue;
|
|
475
|
-
if (existingIds.has(candidate.id)) continue;
|
|
476
|
-
const normalizedOld = candidate.signature.replace(new RegExp(candidate.name, 'g'), '___');
|
|
477
|
-
const normalizedNew = entity.signature.replace(new RegExp(entity.name, 'g'), '___');
|
|
478
|
-
if (normalizedOld === normalizedNew) return candidate;
|
|
479
|
-
}
|
|
480
|
-
return null;
|
|
481
|
-
}
|
|
482
|
-
|
|
483
|
-
function makeEntityId(filePath: string, kind: string, name: string): string {
|
|
484
|
-
return `${kind}:${filePath}:${name}`;
|
|
485
|
-
}
|
|
486
|
-
|
|
487
|
-
function normalizeSignature(text: string): string {
|
|
488
|
-
return text
|
|
489
|
-
.replace(/\/\/[^\n]*/g, '')
|
|
490
|
-
.replace(/\/\*[\s\S]*?\*\//g, '')
|
|
491
|
-
.replace(/\s+/g, ' ')
|
|
492
|
-
.trim();
|
|
493
|
-
}
|