trellis 2.0.13 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +1 -1
- package/dist/embeddings/index.js +1 -1
- package/dist/{index-7gvjxt27.js → index-2917tjd8.js} +1 -1
- package/package.json +2 -10
- package/dist/transformers.node-bx3q9d7k.js +0 -33130
- package/src/cli/index.ts +0 -3356
- package/src/core/agents/harness.ts +0 -380
- package/src/core/agents/index.ts +0 -18
- package/src/core/agents/types.ts +0 -90
- package/src/core/index.ts +0 -118
- package/src/core/kernel/middleware.ts +0 -44
- package/src/core/kernel/trellis-kernel.ts +0 -593
- package/src/core/ontology/builtins.ts +0 -248
- package/src/core/ontology/index.ts +0 -34
- package/src/core/ontology/registry.ts +0 -209
- package/src/core/ontology/types.ts +0 -124
- package/src/core/ontology/validator.ts +0 -382
- package/src/core/persist/backend.ts +0 -74
- package/src/core/persist/sqlite-backend.ts +0 -298
- package/src/core/plugins/index.ts +0 -17
- package/src/core/plugins/registry.ts +0 -322
- package/src/core/plugins/types.ts +0 -126
- package/src/core/query/datalog.ts +0 -188
- package/src/core/query/engine.ts +0 -370
- package/src/core/query/index.ts +0 -34
- package/src/core/query/parser.ts +0 -481
- package/src/core/query/types.ts +0 -200
- package/src/core/store/eav-store.ts +0 -467
- package/src/decisions/auto-capture.ts +0 -136
- package/src/decisions/hooks.ts +0 -163
- package/src/decisions/index.ts +0 -261
- package/src/decisions/types.ts +0 -103
- package/src/embeddings/auto-embed.ts +0 -248
- package/src/embeddings/chunker.ts +0 -327
- package/src/embeddings/index.ts +0 -48
- package/src/embeddings/model.ts +0 -112
- package/src/embeddings/search.ts +0 -305
- package/src/embeddings/store.ts +0 -313
- package/src/embeddings/types.ts +0 -92
- package/src/engine.ts +0 -1125
- package/src/garden/cluster.ts +0 -330
- package/src/garden/garden.ts +0 -306
- package/src/garden/index.ts +0 -29
- package/src/git/git-exporter.ts +0 -286
- package/src/git/git-importer.ts +0 -329
- package/src/git/git-reader.ts +0 -189
- package/src/git/index.ts +0 -22
- package/src/identity/governance.ts +0 -211
- package/src/identity/identity.ts +0 -224
- package/src/identity/index.ts +0 -30
- package/src/identity/signing-middleware.ts +0 -97
- package/src/index.ts +0 -29
- package/src/links/index.ts +0 -49
- package/src/links/lifecycle.ts +0 -400
- package/src/links/parser.ts +0 -484
- package/src/links/ref-index.ts +0 -186
- package/src/links/resolver.ts +0 -314
- package/src/links/types.ts +0 -108
- package/src/mcp/index.ts +0 -22
- package/src/mcp/server.ts +0 -1278
- package/src/semantic/csharp-parser.ts +0 -493
- package/src/semantic/go-parser.ts +0 -585
- package/src/semantic/index.ts +0 -34
- package/src/semantic/java-parser.ts +0 -456
- package/src/semantic/python-parser.ts +0 -659
- package/src/semantic/ruby-parser.ts +0 -446
- package/src/semantic/rust-parser.ts +0 -784
- package/src/semantic/semantic-merge.ts +0 -210
- package/src/semantic/ts-parser.ts +0 -681
- package/src/semantic/types.ts +0 -175
- package/src/sync/http-transport.ts +0 -144
- package/src/sync/index.ts +0 -43
- package/src/sync/memory-transport.ts +0 -66
- package/src/sync/multi-repo.ts +0 -200
- package/src/sync/reconciler.ts +0 -237
- package/src/sync/sync-engine.ts +0 -258
- package/src/sync/types.ts +0 -104
- package/src/sync/ws-transport.ts +0 -145
- package/src/ui/client.html +0 -695
- package/src/ui/server.ts +0 -419
- package/src/vcs/blob-store.ts +0 -124
- package/src/vcs/branch.ts +0 -150
- package/src/vcs/checkpoint.ts +0 -64
- package/src/vcs/decompose.ts +0 -469
- package/src/vcs/diff.ts +0 -409
- package/src/vcs/engine-context.ts +0 -26
- package/src/vcs/index.ts +0 -23
- package/src/vcs/issue.ts +0 -800
- package/src/vcs/merge.ts +0 -425
- package/src/vcs/milestone.ts +0 -124
- package/src/vcs/ops.ts +0 -59
- package/src/vcs/types.ts +0 -213
- package/src/vcs/vcs-middleware.ts +0 -81
- package/src/watcher/fs-watcher.ts +0 -255
- package/src/watcher/index.ts +0 -9
- package/src/watcher/ingestion.ts +0 -116
|
@@ -1,784 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Rust Parser Adapter
|
|
3
|
-
*
|
|
4
|
-
* Tier 1 regex-based parser for Rust source files.
|
|
5
|
-
* Extracts structs, enums, traits, impl blocks, functions,
|
|
6
|
-
* methods, type aliases, constants, macros, and use statements.
|
|
7
|
-
*
|
|
8
|
-
* @see TRL-7
|
|
9
|
-
*/
|
|
10
|
-
|
|
11
|
-
import type {
|
|
12
|
-
ParserAdapter,
|
|
13
|
-
ParseResult,
|
|
14
|
-
ASTEntity,
|
|
15
|
-
ASTEntityKind,
|
|
16
|
-
ImportRelation,
|
|
17
|
-
ExportRelation,
|
|
18
|
-
SemanticPatch,
|
|
19
|
-
} from './types.js';
|
|
20
|
-
|
|
21
|
-
// ---------------------------------------------------------------------------
|
|
22
|
-
// Parser Adapter
|
|
23
|
-
// ---------------------------------------------------------------------------
|
|
24
|
-
|
|
25
|
-
export const rustParser: ParserAdapter = {
|
|
26
|
-
languages: ['rust'],
|
|
27
|
-
|
|
28
|
-
parse(content: string, filePath: string): ParseResult {
|
|
29
|
-
const fileEntityId = `file:${filePath}`;
|
|
30
|
-
|
|
31
|
-
return {
|
|
32
|
-
fileEntityId,
|
|
33
|
-
filePath,
|
|
34
|
-
language: 'rust',
|
|
35
|
-
declarations: extractDeclarations(content, filePath),
|
|
36
|
-
imports: extractImports(content),
|
|
37
|
-
exports: extractExports(content, filePath),
|
|
38
|
-
};
|
|
39
|
-
},
|
|
40
|
-
|
|
41
|
-
diff(oldResult: ParseResult, newResult: ParseResult): SemanticPatch[] {
|
|
42
|
-
return computeSemanticDiff(oldResult, newResult);
|
|
43
|
-
},
|
|
44
|
-
};
|
|
45
|
-
|
|
46
|
-
// ---------------------------------------------------------------------------
|
|
47
|
-
// Declaration extraction
|
|
48
|
-
// ---------------------------------------------------------------------------
|
|
49
|
-
|
|
50
|
-
function extractDeclarations(content: string, filePath: string): ASTEntity[] {
|
|
51
|
-
const declarations: ASTEntity[] = [];
|
|
52
|
-
const lines = content.split('\n');
|
|
53
|
-
|
|
54
|
-
let i = 0;
|
|
55
|
-
while (i < lines.length) {
|
|
56
|
-
const line = lines[i];
|
|
57
|
-
const trimmed = line.trim();
|
|
58
|
-
|
|
59
|
-
// Skip empty, comments, use statements, attributes (handled with next decl)
|
|
60
|
-
if (
|
|
61
|
-
!trimmed ||
|
|
62
|
-
trimmed.startsWith('//') ||
|
|
63
|
-
trimmed.startsWith('/*') ||
|
|
64
|
-
trimmed.startsWith('*') ||
|
|
65
|
-
trimmed.startsWith('use ') ||
|
|
66
|
-
trimmed.startsWith('extern ') ||
|
|
67
|
-
trimmed.startsWith('mod ') ||
|
|
68
|
-
trimmed.startsWith('#![')
|
|
69
|
-
) {
|
|
70
|
-
i++;
|
|
71
|
-
continue;
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
// Collect attributes (#[...])
|
|
75
|
-
const attrs: string[] = [];
|
|
76
|
-
const attrStart = i;
|
|
77
|
-
while (i < lines.length && lines[i].trim().startsWith('#[')) {
|
|
78
|
-
attrs.push(lines[i].trim());
|
|
79
|
-
i++;
|
|
80
|
-
}
|
|
81
|
-
if (i >= lines.length) break;
|
|
82
|
-
|
|
83
|
-
const declLine = lines[i].trim();
|
|
84
|
-
|
|
85
|
-
// Strip visibility + qualifiers for matching
|
|
86
|
-
const stripped = declLine
|
|
87
|
-
.replace(/^pub(\s*\([^)]*\))?\s+/, '')
|
|
88
|
-
.replace(/^async\s+/, '')
|
|
89
|
-
.replace(/^unsafe\s+/, '')
|
|
90
|
-
.replace(/^const\s+(?=fn)/, '');
|
|
91
|
-
|
|
92
|
-
// Struct: struct Name { ... } or struct Name;
|
|
93
|
-
let match = stripped.match(/^struct\s+(\w+)/);
|
|
94
|
-
if (match) {
|
|
95
|
-
if (declLine.includes(';') && !declLine.includes('{')) {
|
|
96
|
-
// Unit struct or tuple struct without brace block on this line
|
|
97
|
-
const result = extractToSemicolon(
|
|
98
|
-
match[1],
|
|
99
|
-
'ClassDef',
|
|
100
|
-
lines,
|
|
101
|
-
attrs.length > 0 ? attrStart : i,
|
|
102
|
-
i,
|
|
103
|
-
filePath,
|
|
104
|
-
attrs,
|
|
105
|
-
);
|
|
106
|
-
declarations.push(result.entity);
|
|
107
|
-
i = result.endLine + 1;
|
|
108
|
-
} else {
|
|
109
|
-
const result = extractBraceBlock(
|
|
110
|
-
match[1],
|
|
111
|
-
'ClassDef',
|
|
112
|
-
lines,
|
|
113
|
-
attrs.length > 0 ? attrStart : i,
|
|
114
|
-
i,
|
|
115
|
-
filePath,
|
|
116
|
-
attrs,
|
|
117
|
-
);
|
|
118
|
-
result.entity.children = extractStructFields(
|
|
119
|
-
lines,
|
|
120
|
-
i,
|
|
121
|
-
result.endLine,
|
|
122
|
-
match[1],
|
|
123
|
-
filePath,
|
|
124
|
-
);
|
|
125
|
-
declarations.push(result.entity);
|
|
126
|
-
i = result.endLine + 1;
|
|
127
|
-
}
|
|
128
|
-
continue;
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
// Enum: enum Name { ... }
|
|
132
|
-
match = stripped.match(/^enum\s+(\w+)/);
|
|
133
|
-
if (match) {
|
|
134
|
-
const result = extractBraceBlock(
|
|
135
|
-
match[1],
|
|
136
|
-
'EnumDef',
|
|
137
|
-
lines,
|
|
138
|
-
attrs.length > 0 ? attrStart : i,
|
|
139
|
-
i,
|
|
140
|
-
filePath,
|
|
141
|
-
attrs,
|
|
142
|
-
);
|
|
143
|
-
declarations.push(result.entity);
|
|
144
|
-
i = result.endLine + 1;
|
|
145
|
-
continue;
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
// Trait: trait Name { ... }
|
|
149
|
-
match = stripped.match(/^trait\s+(\w+)/);
|
|
150
|
-
if (match) {
|
|
151
|
-
const result = extractBraceBlock(
|
|
152
|
-
match[1],
|
|
153
|
-
'InterfaceDef',
|
|
154
|
-
lines,
|
|
155
|
-
attrs.length > 0 ? attrStart : i,
|
|
156
|
-
i,
|
|
157
|
-
filePath,
|
|
158
|
-
attrs,
|
|
159
|
-
);
|
|
160
|
-
result.entity.children = extractTraitMethods(
|
|
161
|
-
lines,
|
|
162
|
-
i,
|
|
163
|
-
result.endLine,
|
|
164
|
-
match[1],
|
|
165
|
-
filePath,
|
|
166
|
-
);
|
|
167
|
-
declarations.push(result.entity);
|
|
168
|
-
i = result.endLine + 1;
|
|
169
|
-
continue;
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
// Impl block: impl [Trait for] Type { ... }
|
|
173
|
-
match = stripped.match(/^impl(?:<[^>]*>)?\s+(?:(\w+)\s+for\s+)?(\w+)/);
|
|
174
|
-
if (match && stripped.includes('{')) {
|
|
175
|
-
const typeName = match[2];
|
|
176
|
-
const traitName = match[1];
|
|
177
|
-
const implName = traitName ? `${traitName} for ${typeName}` : typeName;
|
|
178
|
-
const result = extractBraceBlock(
|
|
179
|
-
implName,
|
|
180
|
-
'ClassDef',
|
|
181
|
-
lines,
|
|
182
|
-
attrs.length > 0 ? attrStart : i,
|
|
183
|
-
i,
|
|
184
|
-
filePath,
|
|
185
|
-
attrs,
|
|
186
|
-
);
|
|
187
|
-
result.entity.children = extractImplMethods(
|
|
188
|
-
lines,
|
|
189
|
-
i,
|
|
190
|
-
result.endLine,
|
|
191
|
-
implName,
|
|
192
|
-
filePath,
|
|
193
|
-
);
|
|
194
|
-
result.entity.id = makeEntityId(filePath, 'ClassDef', `impl:${implName}`);
|
|
195
|
-
declarations.push(result.entity);
|
|
196
|
-
i = result.endLine + 1;
|
|
197
|
-
continue;
|
|
198
|
-
}
|
|
199
|
-
|
|
200
|
-
// Async function: async fn name(...)
|
|
201
|
-
match = stripped.match(/^fn\s+(\w+)/);
|
|
202
|
-
if (match) {
|
|
203
|
-
const result = extractBraceBlock(
|
|
204
|
-
match[1],
|
|
205
|
-
'FunctionDef',
|
|
206
|
-
lines,
|
|
207
|
-
attrs.length > 0 ? attrStart : i,
|
|
208
|
-
i,
|
|
209
|
-
filePath,
|
|
210
|
-
attrs,
|
|
211
|
-
);
|
|
212
|
-
declarations.push(result.entity);
|
|
213
|
-
i = result.endLine + 1;
|
|
214
|
-
continue;
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
// Type alias: type Name = ...;
|
|
218
|
-
match = stripped.match(/^type\s+(\w+)/);
|
|
219
|
-
if (match) {
|
|
220
|
-
const result = extractToSemicolon(
|
|
221
|
-
match[1],
|
|
222
|
-
'TypeAlias',
|
|
223
|
-
lines,
|
|
224
|
-
attrs.length > 0 ? attrStart : i,
|
|
225
|
-
i,
|
|
226
|
-
filePath,
|
|
227
|
-
attrs,
|
|
228
|
-
);
|
|
229
|
-
declarations.push(result.entity);
|
|
230
|
-
i = result.endLine + 1;
|
|
231
|
-
continue;
|
|
232
|
-
}
|
|
233
|
-
|
|
234
|
-
// Const: const NAME: Type = ...;
|
|
235
|
-
match = stripped.match(/^const\s+(\w+)/);
|
|
236
|
-
if (match) {
|
|
237
|
-
const result = extractToSemicolon(
|
|
238
|
-
match[1],
|
|
239
|
-
'VariableDecl',
|
|
240
|
-
lines,
|
|
241
|
-
attrs.length > 0 ? attrStart : i,
|
|
242
|
-
i,
|
|
243
|
-
filePath,
|
|
244
|
-
attrs,
|
|
245
|
-
);
|
|
246
|
-
declarations.push(result.entity);
|
|
247
|
-
i = result.endLine + 1;
|
|
248
|
-
continue;
|
|
249
|
-
}
|
|
250
|
-
|
|
251
|
-
// Static: static [mut] NAME: Type = ...;
|
|
252
|
-
match = stripped.match(/^static\s+(?:mut\s+)?(\w+)/);
|
|
253
|
-
if (match) {
|
|
254
|
-
const result = extractToSemicolon(
|
|
255
|
-
match[1],
|
|
256
|
-
'VariableDecl',
|
|
257
|
-
lines,
|
|
258
|
-
attrs.length > 0 ? attrStart : i,
|
|
259
|
-
i,
|
|
260
|
-
filePath,
|
|
261
|
-
attrs,
|
|
262
|
-
);
|
|
263
|
-
declarations.push(result.entity);
|
|
264
|
-
i = result.endLine + 1;
|
|
265
|
-
continue;
|
|
266
|
-
}
|
|
267
|
-
|
|
268
|
-
// Macro: macro_rules! name { ... }
|
|
269
|
-
match = stripped.match(/^macro_rules!\s+(\w+)/);
|
|
270
|
-
if (match) {
|
|
271
|
-
const result = extractBraceBlock(
|
|
272
|
-
match[1],
|
|
273
|
-
'FunctionDef',
|
|
274
|
-
lines,
|
|
275
|
-
attrs.length > 0 ? attrStart : i,
|
|
276
|
-
i,
|
|
277
|
-
filePath,
|
|
278
|
-
attrs,
|
|
279
|
-
);
|
|
280
|
-
result.entity.id = makeEntityId(
|
|
281
|
-
filePath,
|
|
282
|
-
'FunctionDef',
|
|
283
|
-
`macro:${match[1]}`,
|
|
284
|
-
);
|
|
285
|
-
declarations.push(result.entity);
|
|
286
|
-
i = result.endLine + 1;
|
|
287
|
-
continue;
|
|
288
|
-
}
|
|
289
|
-
|
|
290
|
-
// If we collected attributes but no matching decl, skip
|
|
291
|
-
if (attrs.length > 0) {
|
|
292
|
-
i++;
|
|
293
|
-
continue;
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
i++;
|
|
297
|
-
}
|
|
298
|
-
|
|
299
|
-
return declarations;
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
// ---------------------------------------------------------------------------
|
|
303
|
-
// Block extraction helpers
|
|
304
|
-
// ---------------------------------------------------------------------------
|
|
305
|
-
|
|
306
|
-
interface ExtractionResult {
|
|
307
|
-
entity: ASTEntity;
|
|
308
|
-
endLine: number;
|
|
309
|
-
}
|
|
310
|
-
|
|
311
|
-
function extractBraceBlock(
|
|
312
|
-
name: string,
|
|
313
|
-
kind: ASTEntityKind,
|
|
314
|
-
lines: string[],
|
|
315
|
-
startLine: number,
|
|
316
|
-
defLine: number,
|
|
317
|
-
filePath: string,
|
|
318
|
-
attrs: string[],
|
|
319
|
-
): ExtractionResult {
|
|
320
|
-
let depth = 0;
|
|
321
|
-
let foundOpen = false;
|
|
322
|
-
let endLine = defLine;
|
|
323
|
-
|
|
324
|
-
for (let i = defLine; i < lines.length; i++) {
|
|
325
|
-
for (const ch of lines[i]) {
|
|
326
|
-
if (ch === '{') {
|
|
327
|
-
depth++;
|
|
328
|
-
foundOpen = true;
|
|
329
|
-
} else if (ch === '}') {
|
|
330
|
-
depth--;
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
if (foundOpen && depth <= 0) {
|
|
334
|
-
endLine = i;
|
|
335
|
-
break;
|
|
336
|
-
}
|
|
337
|
-
if (i > defLine + 300) {
|
|
338
|
-
endLine = i;
|
|
339
|
-
break;
|
|
340
|
-
}
|
|
341
|
-
endLine = i;
|
|
342
|
-
}
|
|
343
|
-
|
|
344
|
-
const rawText = lines.slice(startLine, endLine + 1).join('\n');
|
|
345
|
-
const startOffset =
|
|
346
|
-
lines.slice(0, startLine).join('\n').length + (startLine > 0 ? 1 : 0);
|
|
347
|
-
|
|
348
|
-
return {
|
|
349
|
-
entity: {
|
|
350
|
-
id: makeEntityId(filePath, kind, name),
|
|
351
|
-
kind,
|
|
352
|
-
name,
|
|
353
|
-
scopePath: name,
|
|
354
|
-
span: [startOffset, startOffset + rawText.length],
|
|
355
|
-
rawText,
|
|
356
|
-
signature: normalizeSignature(rawText),
|
|
357
|
-
children: [],
|
|
358
|
-
},
|
|
359
|
-
endLine,
|
|
360
|
-
};
|
|
361
|
-
}
|
|
362
|
-
|
|
363
|
-
function extractToSemicolon(
|
|
364
|
-
name: string,
|
|
365
|
-
kind: ASTEntityKind,
|
|
366
|
-
lines: string[],
|
|
367
|
-
startLine: number,
|
|
368
|
-
defLine: number,
|
|
369
|
-
filePath: string,
|
|
370
|
-
attrs: string[],
|
|
371
|
-
): ExtractionResult {
|
|
372
|
-
let endLine = defLine;
|
|
373
|
-
for (let i = defLine; i < lines.length; i++) {
|
|
374
|
-
if (lines[i].includes(';')) {
|
|
375
|
-
endLine = i;
|
|
376
|
-
break;
|
|
377
|
-
}
|
|
378
|
-
endLine = i;
|
|
379
|
-
}
|
|
380
|
-
|
|
381
|
-
const rawText = lines.slice(startLine, endLine + 1).join('\n');
|
|
382
|
-
const startOffset =
|
|
383
|
-
lines.slice(0, startLine).join('\n').length + (startLine > 0 ? 1 : 0);
|
|
384
|
-
|
|
385
|
-
return {
|
|
386
|
-
entity: {
|
|
387
|
-
id: makeEntityId(filePath, kind, name),
|
|
388
|
-
kind,
|
|
389
|
-
name,
|
|
390
|
-
scopePath: name,
|
|
391
|
-
span: [startOffset, startOffset + rawText.length],
|
|
392
|
-
rawText,
|
|
393
|
-
signature: normalizeSignature(rawText),
|
|
394
|
-
children: [],
|
|
395
|
-
},
|
|
396
|
-
endLine,
|
|
397
|
-
};
|
|
398
|
-
}
|
|
399
|
-
|
|
400
|
-
// ---------------------------------------------------------------------------
|
|
401
|
-
// Member extraction
|
|
402
|
-
// ---------------------------------------------------------------------------
|
|
403
|
-
|
|
404
|
-
function extractStructFields(
|
|
405
|
-
lines: string[],
|
|
406
|
-
startLine: number,
|
|
407
|
-
endLine: number,
|
|
408
|
-
structName: string,
|
|
409
|
-
filePath: string,
|
|
410
|
-
): ASTEntity[] {
|
|
411
|
-
const children: ASTEntity[] = [];
|
|
412
|
-
let inBody = false;
|
|
413
|
-
|
|
414
|
-
for (let i = startLine; i <= endLine; i++) {
|
|
415
|
-
const trimmed = lines[i].trim();
|
|
416
|
-
if (trimmed.includes('{')) {
|
|
417
|
-
inBody = true;
|
|
418
|
-
continue;
|
|
419
|
-
}
|
|
420
|
-
if (!inBody) continue;
|
|
421
|
-
if (trimmed === '}' || !trimmed || trimmed.startsWith('//')) continue;
|
|
422
|
-
|
|
423
|
-
const match = trimmed.match(/^(?:pub\s+)?(\w+)\s*:/);
|
|
424
|
-
if (match) {
|
|
425
|
-
children.push({
|
|
426
|
-
id: makeEntityId(filePath, 'PropertyDef', `${structName}.${match[1]}`),
|
|
427
|
-
kind: 'PropertyDef',
|
|
428
|
-
name: match[1],
|
|
429
|
-
scopePath: `${structName}.${match[1]}`,
|
|
430
|
-
span: [0, 0],
|
|
431
|
-
rawText: trimmed,
|
|
432
|
-
signature: normalizeSignature(trimmed),
|
|
433
|
-
children: [],
|
|
434
|
-
});
|
|
435
|
-
}
|
|
436
|
-
}
|
|
437
|
-
return children;
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
function extractTraitMethods(
|
|
441
|
-
lines: string[],
|
|
442
|
-
startLine: number,
|
|
443
|
-
endLine: number,
|
|
444
|
-
traitName: string,
|
|
445
|
-
filePath: string,
|
|
446
|
-
): ASTEntity[] {
|
|
447
|
-
return extractBlockMethods(lines, startLine, endLine, traitName, filePath);
|
|
448
|
-
}
|
|
449
|
-
|
|
450
|
-
function extractImplMethods(
|
|
451
|
-
lines: string[],
|
|
452
|
-
startLine: number,
|
|
453
|
-
endLine: number,
|
|
454
|
-
implName: string,
|
|
455
|
-
filePath: string,
|
|
456
|
-
): ASTEntity[] {
|
|
457
|
-
return extractBlockMethods(lines, startLine, endLine, implName, filePath);
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
function extractBlockMethods(
|
|
461
|
-
lines: string[],
|
|
462
|
-
startLine: number,
|
|
463
|
-
endLine: number,
|
|
464
|
-
parentName: string,
|
|
465
|
-
filePath: string,
|
|
466
|
-
): ASTEntity[] {
|
|
467
|
-
const children: ASTEntity[] = [];
|
|
468
|
-
let depth = 0;
|
|
469
|
-
|
|
470
|
-
for (let i = startLine; i <= endLine; i++) {
|
|
471
|
-
const line = lines[i];
|
|
472
|
-
const depthBefore = depth;
|
|
473
|
-
|
|
474
|
-
for (const ch of line) {
|
|
475
|
-
if (ch === '{') depth++;
|
|
476
|
-
else if (ch === '}') depth--;
|
|
477
|
-
}
|
|
478
|
-
|
|
479
|
-
// A direct child line starts at depth 1 (inside the block, not nested deeper)
|
|
480
|
-
if (depthBefore === 1) {
|
|
481
|
-
const trimmed = line
|
|
482
|
-
.trim()
|
|
483
|
-
.replace(/^pub(\s*\([^)]*\))?\s+/, '')
|
|
484
|
-
.replace(/^async\s+/, '')
|
|
485
|
-
.replace(/^unsafe\s+/, '')
|
|
486
|
-
.replace(/^const\s+(?=fn)/, '');
|
|
487
|
-
|
|
488
|
-
const match = trimmed.match(/^fn\s+(\w+)/);
|
|
489
|
-
if (match) {
|
|
490
|
-
const kind: ASTEntityKind =
|
|
491
|
-
match[1] === 'new' ? 'Constructor' : 'MethodDef';
|
|
492
|
-
children.push({
|
|
493
|
-
id: makeEntityId(filePath, kind, `${parentName}.${match[1]}`),
|
|
494
|
-
kind,
|
|
495
|
-
name: match[1],
|
|
496
|
-
scopePath: `${parentName}.${match[1]}`,
|
|
497
|
-
span: [0, 0],
|
|
498
|
-
rawText: line.trim(),
|
|
499
|
-
signature: normalizeSignature(line.trim()),
|
|
500
|
-
children: [],
|
|
501
|
-
});
|
|
502
|
-
}
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
return children;
|
|
506
|
-
}
|
|
507
|
-
|
|
508
|
-
// ---------------------------------------------------------------------------
|
|
509
|
-
// Import extraction (use statements)
|
|
510
|
-
// ---------------------------------------------------------------------------
|
|
511
|
-
|
|
512
|
-
function extractImports(content: string): ImportRelation[] {
|
|
513
|
-
const imports: ImportRelation[] = [];
|
|
514
|
-
const lines = content.split('\n');
|
|
515
|
-
|
|
516
|
-
for (let i = 0; i < lines.length; i++) {
|
|
517
|
-
const trimmed = lines[i].trim();
|
|
518
|
-
if (!trimmed.startsWith('use ') && !trimmed.startsWith('pub use '))
|
|
519
|
-
continue;
|
|
520
|
-
|
|
521
|
-
let full = trimmed;
|
|
522
|
-
// Handle multi-line use
|
|
523
|
-
while (!full.includes(';') && i + 1 < lines.length) {
|
|
524
|
-
i++;
|
|
525
|
-
full += ' ' + lines[i].trim();
|
|
526
|
-
}
|
|
527
|
-
|
|
528
|
-
const cleaned = full
|
|
529
|
-
.replace(/^pub\s+/, '')
|
|
530
|
-
.replace(/^use\s+/, '')
|
|
531
|
-
.replace(/;$/, '')
|
|
532
|
-
.trim();
|
|
533
|
-
|
|
534
|
-
// use crate::module::{A, B, C};
|
|
535
|
-
const braceMatch = cleaned.match(/^(.+)::\{([^}]+)\}$/);
|
|
536
|
-
if (braceMatch) {
|
|
537
|
-
const source = braceMatch[1];
|
|
538
|
-
const specifiers = braceMatch[2]
|
|
539
|
-
.split(',')
|
|
540
|
-
.map((s) =>
|
|
541
|
-
s
|
|
542
|
-
.trim()
|
|
543
|
-
.split('::')
|
|
544
|
-
.pop()!
|
|
545
|
-
.replace(/\s+as\s+\w+/, ''),
|
|
546
|
-
)
|
|
547
|
-
.filter(Boolean);
|
|
548
|
-
imports.push({
|
|
549
|
-
source,
|
|
550
|
-
specifiers,
|
|
551
|
-
isDefault: false,
|
|
552
|
-
isNamespace: false,
|
|
553
|
-
rawText: full,
|
|
554
|
-
span: [0, full.length],
|
|
555
|
-
});
|
|
556
|
-
continue;
|
|
557
|
-
}
|
|
558
|
-
|
|
559
|
-
// use crate::module::Name; or use crate::module::*;
|
|
560
|
-
const simpleMatch = cleaned.match(/^(.+)::(\w+|\*)$/);
|
|
561
|
-
if (simpleMatch) {
|
|
562
|
-
const isWild = simpleMatch[2] === '*';
|
|
563
|
-
imports.push({
|
|
564
|
-
source: cleaned,
|
|
565
|
-
specifiers: [simpleMatch[2]],
|
|
566
|
-
isDefault: !isWild,
|
|
567
|
-
isNamespace: isWild,
|
|
568
|
-
rawText: full,
|
|
569
|
-
span: [0, full.length],
|
|
570
|
-
});
|
|
571
|
-
continue;
|
|
572
|
-
}
|
|
573
|
-
|
|
574
|
-
// use module as alias;
|
|
575
|
-
const aliasMatch = cleaned.match(/^(\S+)\s+as\s+(\w+)$/);
|
|
576
|
-
if (aliasMatch) {
|
|
577
|
-
imports.push({
|
|
578
|
-
source: aliasMatch[1],
|
|
579
|
-
specifiers: [aliasMatch[2]],
|
|
580
|
-
isDefault: true,
|
|
581
|
-
isNamespace: false,
|
|
582
|
-
rawText: full,
|
|
583
|
-
span: [0, full.length],
|
|
584
|
-
});
|
|
585
|
-
continue;
|
|
586
|
-
}
|
|
587
|
-
|
|
588
|
-
// Fallback: treat whole path as source
|
|
589
|
-
imports.push({
|
|
590
|
-
source: cleaned,
|
|
591
|
-
specifiers: [cleaned.split('::').pop() ?? cleaned],
|
|
592
|
-
isDefault: true,
|
|
593
|
-
isNamespace: false,
|
|
594
|
-
rawText: full,
|
|
595
|
-
span: [0, full.length],
|
|
596
|
-
});
|
|
597
|
-
}
|
|
598
|
-
|
|
599
|
-
return imports;
|
|
600
|
-
}
|
|
601
|
-
|
|
602
|
-
// ---------------------------------------------------------------------------
|
|
603
|
-
// Export extraction
|
|
604
|
-
// ---------------------------------------------------------------------------
|
|
605
|
-
|
|
606
|
-
/**
|
|
607
|
-
* In Rust, `pub` items are exports. We derive from declarations.
|
|
608
|
-
*/
|
|
609
|
-
function extractExports(content: string, filePath: string): ExportRelation[] {
|
|
610
|
-
const exports: ExportRelation[] = [];
|
|
611
|
-
const lines = content.split('\n');
|
|
612
|
-
|
|
613
|
-
for (const line of lines) {
|
|
614
|
-
const trimmed = line.trim();
|
|
615
|
-
if (!trimmed.startsWith('pub ') && !trimmed.startsWith('pub(')) continue;
|
|
616
|
-
|
|
617
|
-
// pub fn name, pub struct name, pub enum name, pub trait name, pub type name, pub const name
|
|
618
|
-
const match = trimmed.match(
|
|
619
|
-
/^pub(?:\s*\([^)]*\))?\s+(?:async\s+|unsafe\s+|const\s+)?(?:fn|struct|enum|trait|type|const|static)\s+(\w+)/,
|
|
620
|
-
);
|
|
621
|
-
if (match) {
|
|
622
|
-
exports.push({
|
|
623
|
-
name: match[1],
|
|
624
|
-
isDefault: false,
|
|
625
|
-
rawText: trimmed.split('{')[0].trim(),
|
|
626
|
-
span: [0, 0],
|
|
627
|
-
});
|
|
628
|
-
}
|
|
629
|
-
}
|
|
630
|
-
|
|
631
|
-
return exports;
|
|
632
|
-
}
|
|
633
|
-
|
|
634
|
-
// ---------------------------------------------------------------------------
|
|
635
|
-
// Semantic diff (same generic algorithm)
|
|
636
|
-
// ---------------------------------------------------------------------------
|
|
637
|
-
|
|
638
|
-
function computeSemanticDiff(
|
|
639
|
-
oldResult: ParseResult,
|
|
640
|
-
newResult: ParseResult,
|
|
641
|
-
): SemanticPatch[] {
|
|
642
|
-
const patches: SemanticPatch[] = [];
|
|
643
|
-
const fileId = newResult.fileEntityId;
|
|
644
|
-
|
|
645
|
-
const oldDecls = new Map(oldResult.declarations.map((d) => [d.id, d]));
|
|
646
|
-
const newDecls = new Map(newResult.declarations.map((d) => [d.id, d]));
|
|
647
|
-
|
|
648
|
-
for (const [id, entity] of newDecls) {
|
|
649
|
-
if (!oldDecls.has(id)) {
|
|
650
|
-
const oldEntity = findRenamedEntity(
|
|
651
|
-
entity,
|
|
652
|
-
oldResult.declarations,
|
|
653
|
-
newDecls,
|
|
654
|
-
);
|
|
655
|
-
if (oldEntity) {
|
|
656
|
-
patches.push({
|
|
657
|
-
kind: 'symbolRename',
|
|
658
|
-
entityId: oldEntity.id,
|
|
659
|
-
oldName: oldEntity.name,
|
|
660
|
-
newName: entity.name,
|
|
661
|
-
});
|
|
662
|
-
} else {
|
|
663
|
-
patches.push({ kind: 'symbolAdd', entity });
|
|
664
|
-
}
|
|
665
|
-
}
|
|
666
|
-
}
|
|
667
|
-
|
|
668
|
-
for (const [id, entity] of oldDecls) {
|
|
669
|
-
if (!newDecls.has(id)) {
|
|
670
|
-
const wasRenamed = findRenamedEntity(
|
|
671
|
-
entity,
|
|
672
|
-
newResult.declarations,
|
|
673
|
-
oldDecls,
|
|
674
|
-
);
|
|
675
|
-
if (!wasRenamed) {
|
|
676
|
-
patches.push({
|
|
677
|
-
kind: 'symbolRemove',
|
|
678
|
-
entityId: id,
|
|
679
|
-
entityName: entity.name,
|
|
680
|
-
});
|
|
681
|
-
}
|
|
682
|
-
}
|
|
683
|
-
}
|
|
684
|
-
|
|
685
|
-
for (const [id, newEntity] of newDecls) {
|
|
686
|
-
const oldEntity = oldDecls.get(id);
|
|
687
|
-
if (oldEntity && oldEntity.signature !== newEntity.signature) {
|
|
688
|
-
patches.push({
|
|
689
|
-
kind: 'symbolModify',
|
|
690
|
-
entityId: id,
|
|
691
|
-
entityName: newEntity.name,
|
|
692
|
-
oldSignature: oldEntity.signature,
|
|
693
|
-
newSignature: newEntity.signature,
|
|
694
|
-
oldRawText: oldEntity.rawText,
|
|
695
|
-
newRawText: newEntity.rawText,
|
|
696
|
-
});
|
|
697
|
-
}
|
|
698
|
-
}
|
|
699
|
-
|
|
700
|
-
const oldImports = new Map(oldResult.imports.map((imp) => [imp.source, imp]));
|
|
701
|
-
const newImports = new Map(newResult.imports.map((imp) => [imp.source, imp]));
|
|
702
|
-
|
|
703
|
-
for (const [source, imp] of newImports) {
|
|
704
|
-
const oldImp = oldImports.get(source);
|
|
705
|
-
if (!oldImp) {
|
|
706
|
-
patches.push({
|
|
707
|
-
kind: 'importAdd',
|
|
708
|
-
fileId,
|
|
709
|
-
source,
|
|
710
|
-
specifiers: imp.specifiers,
|
|
711
|
-
rawText: imp.rawText,
|
|
712
|
-
});
|
|
713
|
-
} else if (
|
|
714
|
-
JSON.stringify(oldImp.specifiers.sort()) !==
|
|
715
|
-
JSON.stringify(imp.specifiers.sort())
|
|
716
|
-
) {
|
|
717
|
-
patches.push({
|
|
718
|
-
kind: 'importModify',
|
|
719
|
-
fileId,
|
|
720
|
-
source,
|
|
721
|
-
oldSpecifiers: oldImp.specifiers,
|
|
722
|
-
newSpecifiers: imp.specifiers,
|
|
723
|
-
});
|
|
724
|
-
}
|
|
725
|
-
}
|
|
726
|
-
for (const [source] of oldImports) {
|
|
727
|
-
if (!newImports.has(source)) {
|
|
728
|
-
patches.push({ kind: 'importRemove', fileId, source });
|
|
729
|
-
}
|
|
730
|
-
}
|
|
731
|
-
|
|
732
|
-
const oldExports = new Map(oldResult.exports.map((exp) => [exp.name, exp]));
|
|
733
|
-
const newExports = new Map(newResult.exports.map((exp) => [exp.name, exp]));
|
|
734
|
-
for (const [name, exp] of newExports) {
|
|
735
|
-
if (!oldExports.has(name)) {
|
|
736
|
-
patches.push({ kind: 'exportAdd', fileId, name, rawText: exp.rawText });
|
|
737
|
-
}
|
|
738
|
-
}
|
|
739
|
-
for (const [name] of oldExports) {
|
|
740
|
-
if (!newExports.has(name)) {
|
|
741
|
-
patches.push({ kind: 'exportRemove', fileId, name });
|
|
742
|
-
}
|
|
743
|
-
}
|
|
744
|
-
|
|
745
|
-
return patches;
|
|
746
|
-
}
|
|
747
|
-
|
|
748
|
-
// ---------------------------------------------------------------------------
|
|
749
|
-
// Helpers
|
|
750
|
-
// ---------------------------------------------------------------------------
|
|
751
|
-
|
|
752
|
-
function findRenamedEntity(
|
|
753
|
-
entity: ASTEntity,
|
|
754
|
-
candidates: ASTEntity[],
|
|
755
|
-
existingIds: Map<string, ASTEntity>,
|
|
756
|
-
): ASTEntity | null {
|
|
757
|
-
for (const candidate of candidates) {
|
|
758
|
-
if (candidate.kind !== entity.kind) continue;
|
|
759
|
-
if (candidate.name === entity.name) continue;
|
|
760
|
-
if (existingIds.has(candidate.id)) continue;
|
|
761
|
-
const normalizedOld = candidate.signature.replace(
|
|
762
|
-
new RegExp(candidate.name, 'g'),
|
|
763
|
-
'___',
|
|
764
|
-
);
|
|
765
|
-
const normalizedNew = entity.signature.replace(
|
|
766
|
-
new RegExp(entity.name, 'g'),
|
|
767
|
-
'___',
|
|
768
|
-
);
|
|
769
|
-
if (normalizedOld === normalizedNew) return candidate;
|
|
770
|
-
}
|
|
771
|
-
return null;
|
|
772
|
-
}
|
|
773
|
-
|
|
774
|
-
function makeEntityId(filePath: string, kind: string, name: string): string {
|
|
775
|
-
return `${kind}:${filePath}:${name}`;
|
|
776
|
-
}
|
|
777
|
-
|
|
778
|
-
function normalizeSignature(text: string): string {
|
|
779
|
-
return text
|
|
780
|
-
.replace(/\/\/[^\n]*/g, '')
|
|
781
|
-
.replace(/\/\*[\s\S]*?\*\//g, '')
|
|
782
|
-
.replace(/\s+/g, ' ')
|
|
783
|
-
.trim();
|
|
784
|
-
}
|