@ez-corp/ez-context 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +198 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +597 -0
- package/dist/cli.js.map +1 -0
- package/dist/emitters-D6bP4xWs.js +1849 -0
- package/dist/emitters-D6bP4xWs.js.map +1 -0
- package/dist/ez-context +0 -0
- package/dist/index.d.ts +248 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +3 -0
- package/package.json +70 -0
|
@@ -0,0 +1,1849 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import { access, mkdir, readFile, writeFile } from "node:fs/promises";
|
|
3
|
+
import path, { dirname, join, relative, resolve } from "node:path";
|
|
4
|
+
import { parse } from "smol-toml";
|
|
5
|
+
import { globby } from "globby";
|
|
6
|
+
import yaml, { load } from "js-yaml";
|
|
7
|
+
import { Project, SyntaxKind } from "ts-morph";
|
|
8
|
+
import { EzSearchError, index, query } from "@ez-corp/ez-search";
|
|
9
|
+
import { existsSync } from "node:fs";
|
|
10
|
+
|
|
11
|
+
//#region src/core/schema.ts
|
|
12
|
+
const ConventionCategorySchema = z.enum([
|
|
13
|
+
"stack",
|
|
14
|
+
"naming",
|
|
15
|
+
"architecture",
|
|
16
|
+
"error_handling",
|
|
17
|
+
"testing",
|
|
18
|
+
"imports",
|
|
19
|
+
"other"
|
|
20
|
+
]);
|
|
21
|
+
const EvidenceRefSchema = z.object({
|
|
22
|
+
file: z.string(),
|
|
23
|
+
line: z.number().int().positive().nullable()
|
|
24
|
+
});
|
|
25
|
+
const ConventionEntrySchema = z.object({
|
|
26
|
+
id: z.uuid(),
|
|
27
|
+
category: ConventionCategorySchema,
|
|
28
|
+
pattern: z.string().min(1),
|
|
29
|
+
confidence: z.number().min(0).max(1),
|
|
30
|
+
evidence: z.array(EvidenceRefSchema),
|
|
31
|
+
metadata: z.record(z.string(), z.unknown()).optional()
|
|
32
|
+
});
|
|
33
|
+
const StackInfoSchema = z.object({
|
|
34
|
+
language: z.string(),
|
|
35
|
+
framework: z.string().optional(),
|
|
36
|
+
testRunner: z.string().optional(),
|
|
37
|
+
buildTool: z.string().optional(),
|
|
38
|
+
packageManager: z.string().optional(),
|
|
39
|
+
nodeVersion: z.string().optional()
|
|
40
|
+
});
|
|
41
|
+
const ArchitectureInfoSchema = z.object({
|
|
42
|
+
pattern: z.string().optional(),
|
|
43
|
+
layers: z.array(z.string()),
|
|
44
|
+
entryPoints: z.array(z.string()).optional()
|
|
45
|
+
});
|
|
46
|
+
const ConventionRegistrySchema = z.object({
|
|
47
|
+
version: z.literal("1"),
|
|
48
|
+
projectPath: z.string(),
|
|
49
|
+
generatedAt: z.string().datetime(),
|
|
50
|
+
stack: StackInfoSchema,
|
|
51
|
+
conventions: z.array(ConventionEntrySchema),
|
|
52
|
+
architecture: ArchitectureInfoSchema,
|
|
53
|
+
metadata: z.record(z.string(), z.unknown()).optional()
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
//#endregion
|
|
57
|
+
//#region src/core/registry.ts
|
|
58
|
+
/**
|
|
59
|
+
* Create a new empty ConventionRegistry for the given project path.
|
|
60
|
+
* The returned registry passes ConventionRegistrySchema validation.
|
|
61
|
+
*/
|
|
62
|
+
function createRegistry(projectPath) {
|
|
63
|
+
const registry = {
|
|
64
|
+
version: "1",
|
|
65
|
+
projectPath,
|
|
66
|
+
generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
67
|
+
stack: { language: "unknown" },
|
|
68
|
+
conventions: [],
|
|
69
|
+
architecture: { layers: [] }
|
|
70
|
+
};
|
|
71
|
+
const result = ConventionRegistrySchema.safeParse(registry);
|
|
72
|
+
if (!result.success) throw new Error(`createRegistry produced invalid registry: ${JSON.stringify(result.error.issues)}`);
|
|
73
|
+
return result.data;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Add a convention entry to the registry, auto-generating a UUID for the id.
|
|
77
|
+
* Returns a new registry (does not mutate the input).
|
|
78
|
+
*/
|
|
79
|
+
function addConvention(registry, entry) {
|
|
80
|
+
const newEntry = {
|
|
81
|
+
id: crypto.randomUUID(),
|
|
82
|
+
...entry
|
|
83
|
+
};
|
|
84
|
+
const updated = {
|
|
85
|
+
...registry,
|
|
86
|
+
conventions: [...registry.conventions, newEntry]
|
|
87
|
+
};
|
|
88
|
+
const result = ConventionRegistrySchema.safeParse(updated);
|
|
89
|
+
if (!result.success) throw new Error(`addConvention produced invalid registry: ${JSON.stringify(result.error.issues)}`);
|
|
90
|
+
return result.data;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
//#endregion
|
|
94
|
+
//#region src/extractors/index.ts
|
|
95
|
+
/**
|
|
96
|
+
* Run all extractors in parallel via Promise.allSettled so that a single
|
|
97
|
+
* failing extractor does not abort the others.
|
|
98
|
+
*
|
|
99
|
+
* Fulfilled entries are added to the registry immutably via `addConvention`.
|
|
100
|
+
* Rejected extractors emit a console.warn and are skipped.
|
|
101
|
+
*/
|
|
102
|
+
async function runExtractors(extractors, ctx, registry) {
|
|
103
|
+
const results = await Promise.allSettled(extractors.map((e) => e.extract(ctx).then((entries) => ({
|
|
104
|
+
extractor: e,
|
|
105
|
+
entries
|
|
106
|
+
}))));
|
|
107
|
+
let current = registry;
|
|
108
|
+
for (const [i, result] of results.entries()) if (result.status === "fulfilled") for (const entry of result.value.entries) current = addConvention(current, entry);
|
|
109
|
+
else console.warn(`[runExtractors] Extractor "${extractors[i].name}" failed:`, result.reason);
|
|
110
|
+
return current;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
//#endregion
|
|
114
|
+
//#region src/extractors/static/package-json.ts
|
|
115
|
+
const FRAMEWORK_MAP = {
|
|
116
|
+
react: "React",
|
|
117
|
+
vue: "Vue",
|
|
118
|
+
"@angular/core": "Angular",
|
|
119
|
+
next: "Next.js",
|
|
120
|
+
nuxt: "Nuxt",
|
|
121
|
+
svelte: "Svelte",
|
|
122
|
+
hono: "Hono",
|
|
123
|
+
express: "Express",
|
|
124
|
+
fastify: "Fastify",
|
|
125
|
+
koa: "Koa"
|
|
126
|
+
};
|
|
127
|
+
const TEST_RUNNER_MAP = {
|
|
128
|
+
vitest: "Vitest",
|
|
129
|
+
jest: "Jest",
|
|
130
|
+
mocha: "Mocha",
|
|
131
|
+
jasmine: "Jasmine",
|
|
132
|
+
ava: "Ava"
|
|
133
|
+
};
|
|
134
|
+
function allDeps(pkg) {
|
|
135
|
+
return {
|
|
136
|
+
...pkg.dependencies,
|
|
137
|
+
...pkg.devDependencies
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
const EVIDENCE$1 = [{
|
|
141
|
+
file: "package.json",
|
|
142
|
+
line: null
|
|
143
|
+
}];
|
|
144
|
+
const packageJsonExtractor = {
|
|
145
|
+
name: "package-json",
|
|
146
|
+
async extract(ctx) {
|
|
147
|
+
const filePath = join(ctx.projectPath, "package.json");
|
|
148
|
+
try {
|
|
149
|
+
await access(filePath);
|
|
150
|
+
} catch {
|
|
151
|
+
return [];
|
|
152
|
+
}
|
|
153
|
+
let pkg;
|
|
154
|
+
try {
|
|
155
|
+
const raw = await readFile(filePath, "utf-8");
|
|
156
|
+
pkg = JSON.parse(raw);
|
|
157
|
+
} catch {
|
|
158
|
+
return [];
|
|
159
|
+
}
|
|
160
|
+
const deps = allDeps(pkg);
|
|
161
|
+
const entries = [];
|
|
162
|
+
const language = "typescript" in deps || "@types/node" in deps ? "TypeScript" : "JavaScript";
|
|
163
|
+
entries.push({
|
|
164
|
+
category: "stack",
|
|
165
|
+
pattern: `Language: ${language}`,
|
|
166
|
+
confidence: .95,
|
|
167
|
+
evidence: EVIDENCE$1,
|
|
168
|
+
metadata: { language }
|
|
169
|
+
});
|
|
170
|
+
for (const [pkg_name, label] of Object.entries(FRAMEWORK_MAP)) if (pkg_name in deps) {
|
|
171
|
+
const version = deps[pkg_name];
|
|
172
|
+
entries.push({
|
|
173
|
+
category: "stack",
|
|
174
|
+
pattern: `Framework: ${label}`,
|
|
175
|
+
confidence: 1,
|
|
176
|
+
evidence: EVIDENCE$1,
|
|
177
|
+
metadata: {
|
|
178
|
+
framework: label,
|
|
179
|
+
version
|
|
180
|
+
}
|
|
181
|
+
});
|
|
182
|
+
break;
|
|
183
|
+
}
|
|
184
|
+
for (const [pkg_name, label] of Object.entries(TEST_RUNNER_MAP)) if (pkg_name in deps) {
|
|
185
|
+
entries.push({
|
|
186
|
+
category: "testing",
|
|
187
|
+
pattern: `Test runner: ${label}`,
|
|
188
|
+
confidence: .95,
|
|
189
|
+
evidence: EVIDENCE$1,
|
|
190
|
+
metadata: { testRunner: label }
|
|
191
|
+
});
|
|
192
|
+
break;
|
|
193
|
+
}
|
|
194
|
+
if (typeof pkg.packageManager === "string") {
|
|
195
|
+
const pmName = pkg.packageManager.split("@")[0];
|
|
196
|
+
if (pmName) entries.push({
|
|
197
|
+
category: "stack",
|
|
198
|
+
pattern: `Package manager: ${pmName}`,
|
|
199
|
+
confidence: 1,
|
|
200
|
+
evidence: EVIDENCE$1,
|
|
201
|
+
metadata: { packageManager: pmName }
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
if (pkg.type === "module") entries.push({
|
|
205
|
+
category: "imports",
|
|
206
|
+
pattern: `ES modules (package.json "type": "module")`,
|
|
207
|
+
confidence: 1,
|
|
208
|
+
evidence: EVIDENCE$1,
|
|
209
|
+
metadata: { moduleSystem: "esm" }
|
|
210
|
+
});
|
|
211
|
+
const scripts = pkg.scripts ?? {};
|
|
212
|
+
for (const [scriptName, command] of Object.entries(scripts)) {
|
|
213
|
+
const isTestScript = scriptName === "test" || scriptName.startsWith("test:");
|
|
214
|
+
if (isTestScript || scriptName === "build" || scriptName === "lint") entries.push({
|
|
215
|
+
category: isTestScript ? "testing" : "stack",
|
|
216
|
+
pattern: `Script "${scriptName}": ${command}`,
|
|
217
|
+
confidence: 1,
|
|
218
|
+
evidence: EVIDENCE$1,
|
|
219
|
+
metadata: {
|
|
220
|
+
scriptName,
|
|
221
|
+
command
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
return entries;
|
|
226
|
+
}
|
|
227
|
+
};
|
|
228
|
+
|
|
229
|
+
//#endregion
|
|
230
|
+
//#region src/extractors/static/lockfile.ts
|
|
231
|
+
const LOCKFILES = [
|
|
232
|
+
{
|
|
233
|
+
file: "bun.lock",
|
|
234
|
+
manager: "bun"
|
|
235
|
+
},
|
|
236
|
+
{
|
|
237
|
+
file: "bun.lockb",
|
|
238
|
+
manager: "bun"
|
|
239
|
+
},
|
|
240
|
+
{
|
|
241
|
+
file: "pnpm-lock.yaml",
|
|
242
|
+
manager: "pnpm"
|
|
243
|
+
},
|
|
244
|
+
{
|
|
245
|
+
file: "yarn.lock",
|
|
246
|
+
manager: "yarn"
|
|
247
|
+
},
|
|
248
|
+
{
|
|
249
|
+
file: "package-lock.json",
|
|
250
|
+
manager: "npm"
|
|
251
|
+
}
|
|
252
|
+
];
|
|
253
|
+
const lockfileExtractor = {
|
|
254
|
+
name: "lockfile",
|
|
255
|
+
async extract(ctx) {
|
|
256
|
+
for (const { file, manager } of LOCKFILES) try {
|
|
257
|
+
await access(join(ctx.projectPath, file));
|
|
258
|
+
return [{
|
|
259
|
+
category: "stack",
|
|
260
|
+
pattern: `Package manager: ${manager}`,
|
|
261
|
+
confidence: 1,
|
|
262
|
+
evidence: [{
|
|
263
|
+
file,
|
|
264
|
+
line: null
|
|
265
|
+
}],
|
|
266
|
+
metadata: { packageManager: manager }
|
|
267
|
+
}];
|
|
268
|
+
} catch {}
|
|
269
|
+
return [];
|
|
270
|
+
}
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
//#endregion
|
|
274
|
+
//#region src/extractors/static/tsconfig.ts
|
|
275
|
+
const EVIDENCE = [{
|
|
276
|
+
file: "tsconfig.json",
|
|
277
|
+
line: null
|
|
278
|
+
}];
|
|
279
|
+
/**
|
|
280
|
+
* Strip single-line // comments and trailing commas so JSON.parse accepts
|
|
281
|
+
* tsconfig.json files that use non-standard JSON.
|
|
282
|
+
*/
|
|
283
|
+
function stripTsConfigNonStandardJson(raw) {
|
|
284
|
+
return raw.replace(/\/\/[^\n]*/g, "").replace(/,(\s*[}\]])/g, "$1");
|
|
285
|
+
}
|
|
286
|
+
const tsconfigExtractor = {
|
|
287
|
+
name: "tsconfig",
|
|
288
|
+
async extract(ctx) {
|
|
289
|
+
const filePath = join(ctx.projectPath, "tsconfig.json");
|
|
290
|
+
try {
|
|
291
|
+
await access(filePath);
|
|
292
|
+
} catch {
|
|
293
|
+
return [];
|
|
294
|
+
}
|
|
295
|
+
let config;
|
|
296
|
+
try {
|
|
297
|
+
const raw = await readFile(filePath, "utf-8");
|
|
298
|
+
config = JSON.parse(stripTsConfigNonStandardJson(raw));
|
|
299
|
+
} catch {
|
|
300
|
+
return [];
|
|
301
|
+
}
|
|
302
|
+
const co = config.compilerOptions ?? {};
|
|
303
|
+
const entries = [];
|
|
304
|
+
if (co.strict === true) entries.push({
|
|
305
|
+
category: "stack",
|
|
306
|
+
pattern: "TypeScript strict mode enabled",
|
|
307
|
+
confidence: 1,
|
|
308
|
+
evidence: EVIDENCE,
|
|
309
|
+
metadata: { strict: true }
|
|
310
|
+
});
|
|
311
|
+
const notable = {};
|
|
312
|
+
for (const key of [
|
|
313
|
+
"target",
|
|
314
|
+
"module",
|
|
315
|
+
"moduleResolution"
|
|
316
|
+
]) if (co[key] !== void 0) notable[key] = co[key];
|
|
317
|
+
if (Object.keys(notable).length > 0) entries.push({
|
|
318
|
+
category: "stack",
|
|
319
|
+
pattern: "TypeScript compiler options configured",
|
|
320
|
+
confidence: 1,
|
|
321
|
+
evidence: EVIDENCE,
|
|
322
|
+
metadata: notable
|
|
323
|
+
});
|
|
324
|
+
if (co.paths && Object.keys(co.paths).length > 0) entries.push({
|
|
325
|
+
category: "imports",
|
|
326
|
+
pattern: "Uses TypeScript path aliases",
|
|
327
|
+
confidence: 1,
|
|
328
|
+
evidence: EVIDENCE,
|
|
329
|
+
metadata: { aliases: co.paths }
|
|
330
|
+
});
|
|
331
|
+
return entries;
|
|
332
|
+
}
|
|
333
|
+
};
|
|
334
|
+
|
|
335
|
+
//#endregion
|
|
336
|
+
//#region src/extractors/static/go-mod.ts
|
|
337
|
+
const goModExtractor = {
|
|
338
|
+
name: "go-mod",
|
|
339
|
+
async extract(ctx) {
|
|
340
|
+
const filePath = join(ctx.projectPath, "go.mod");
|
|
341
|
+
try {
|
|
342
|
+
await access(filePath);
|
|
343
|
+
} catch {
|
|
344
|
+
return [];
|
|
345
|
+
}
|
|
346
|
+
let raw;
|
|
347
|
+
try {
|
|
348
|
+
raw = await readFile(filePath, "utf-8");
|
|
349
|
+
} catch {
|
|
350
|
+
return [];
|
|
351
|
+
}
|
|
352
|
+
const lines = raw.split("\n");
|
|
353
|
+
let moduleName = "";
|
|
354
|
+
let goVersion = "";
|
|
355
|
+
let dependencyCount = 0;
|
|
356
|
+
let inRequireBlock = false;
|
|
357
|
+
for (const line of lines) {
|
|
358
|
+
const trimmed = line.trim();
|
|
359
|
+
if (!moduleName) {
|
|
360
|
+
const moduleMatch = trimmed.match(/^module\s+(\S+)/);
|
|
361
|
+
if (moduleMatch?.[1]) {
|
|
362
|
+
moduleName = moduleMatch[1];
|
|
363
|
+
continue;
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
if (!goVersion) {
|
|
367
|
+
const goMatch = trimmed.match(/^go\s+(\S+)/);
|
|
368
|
+
if (goMatch?.[1]) {
|
|
369
|
+
goVersion = goMatch[1];
|
|
370
|
+
continue;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
if (trimmed === "require (") {
|
|
374
|
+
inRequireBlock = true;
|
|
375
|
+
continue;
|
|
376
|
+
}
|
|
377
|
+
if (inRequireBlock) {
|
|
378
|
+
if (trimmed === ")") inRequireBlock = false;
|
|
379
|
+
else if (trimmed.length > 0 && !trimmed.startsWith("//")) dependencyCount++;
|
|
380
|
+
continue;
|
|
381
|
+
}
|
|
382
|
+
if (trimmed.match(/^require\s+\S+\s+v\S+/)) dependencyCount++;
|
|
383
|
+
}
|
|
384
|
+
if (!moduleName) return [];
|
|
385
|
+
return [{
|
|
386
|
+
category: "stack",
|
|
387
|
+
pattern: `Go project (${moduleName})`,
|
|
388
|
+
confidence: 1,
|
|
389
|
+
evidence: [{
|
|
390
|
+
file: "go.mod",
|
|
391
|
+
line: null
|
|
392
|
+
}],
|
|
393
|
+
metadata: {
|
|
394
|
+
language: "Go",
|
|
395
|
+
moduleName,
|
|
396
|
+
goVersion: goVersion || null,
|
|
397
|
+
dependencyCount
|
|
398
|
+
}
|
|
399
|
+
}];
|
|
400
|
+
}
|
|
401
|
+
};
|
|
402
|
+
|
|
403
|
+
//#endregion
|
|
404
|
+
//#region src/extractors/static/cargo-toml.ts
|
|
405
|
+
const cargoTomlExtractor = {
|
|
406
|
+
name: "cargo-toml",
|
|
407
|
+
async extract(ctx) {
|
|
408
|
+
const filePath = join(ctx.projectPath, "Cargo.toml");
|
|
409
|
+
try {
|
|
410
|
+
await access(filePath);
|
|
411
|
+
} catch {
|
|
412
|
+
return [];
|
|
413
|
+
}
|
|
414
|
+
let cargo;
|
|
415
|
+
try {
|
|
416
|
+
cargo = parse(await readFile(filePath, "utf-8"));
|
|
417
|
+
} catch {
|
|
418
|
+
return [];
|
|
419
|
+
}
|
|
420
|
+
const packageName = cargo.package?.name;
|
|
421
|
+
if (!packageName) return [];
|
|
422
|
+
const dependencyCount = Object.keys(cargo.dependencies ?? {}).length;
|
|
423
|
+
return [{
|
|
424
|
+
category: "stack",
|
|
425
|
+
pattern: `Rust project (${packageName})`,
|
|
426
|
+
confidence: 1,
|
|
427
|
+
evidence: [{
|
|
428
|
+
file: "Cargo.toml",
|
|
429
|
+
line: null
|
|
430
|
+
}],
|
|
431
|
+
metadata: {
|
|
432
|
+
language: "Rust",
|
|
433
|
+
packageName,
|
|
434
|
+
dependencyCount
|
|
435
|
+
}
|
|
436
|
+
}];
|
|
437
|
+
}
|
|
438
|
+
};
|
|
439
|
+
|
|
440
|
+
//#endregion
|
|
441
|
+
//#region src/extractors/static/ci.ts
|
|
442
|
+
const BUILD_KEYWORDS = [
|
|
443
|
+
"build",
|
|
444
|
+
"compile",
|
|
445
|
+
"tsc",
|
|
446
|
+
"tsdown"
|
|
447
|
+
];
|
|
448
|
+
const TEST_KEYWORDS = [
|
|
449
|
+
"test",
|
|
450
|
+
"vitest",
|
|
451
|
+
"jest",
|
|
452
|
+
"pytest",
|
|
453
|
+
"cargo test",
|
|
454
|
+
"go test"
|
|
455
|
+
];
|
|
456
|
+
const LINT_KEYWORDS = [
|
|
457
|
+
"lint",
|
|
458
|
+
"eslint",
|
|
459
|
+
"biome",
|
|
460
|
+
"clippy",
|
|
461
|
+
"ruff"
|
|
462
|
+
];
|
|
463
|
+
function categorizeCommand(cmd) {
|
|
464
|
+
const lower = cmd.toLowerCase();
|
|
465
|
+
if (TEST_KEYWORDS.some((kw) => lower.includes(kw))) return "testing";
|
|
466
|
+
if (BUILD_KEYWORDS.some((kw) => lower.includes(kw))) return "stack";
|
|
467
|
+
if (LINT_KEYWORDS.some((kw) => lower.includes(kw))) return "stack";
|
|
468
|
+
return null;
|
|
469
|
+
}
|
|
470
|
+
/**
|
|
471
|
+
* Extract run commands from a GitHub Actions workflow YAML.
|
|
472
|
+
* Traverses jobs.*.steps[].run
|
|
473
|
+
*/
|
|
474
|
+
function extractGithubActionsCommands(doc, filePath) {
|
|
475
|
+
const matched = [];
|
|
476
|
+
const raw = [];
|
|
477
|
+
if (!doc || typeof doc !== "object") return {
|
|
478
|
+
matched,
|
|
479
|
+
raw
|
|
480
|
+
};
|
|
481
|
+
const jobs = doc["jobs"];
|
|
482
|
+
if (!jobs || typeof jobs !== "object") return {
|
|
483
|
+
matched,
|
|
484
|
+
raw
|
|
485
|
+
};
|
|
486
|
+
for (const job of Object.values(jobs)) {
|
|
487
|
+
if (!job || typeof job !== "object") continue;
|
|
488
|
+
const steps = job["steps"];
|
|
489
|
+
if (!Array.isArray(steps)) continue;
|
|
490
|
+
for (const step of steps) {
|
|
491
|
+
if (!step || typeof step !== "object") continue;
|
|
492
|
+
const run = step["run"];
|
|
493
|
+
if (typeof run !== "string") continue;
|
|
494
|
+
for (const line of run.split("\n")) {
|
|
495
|
+
const trimmed = line.trim();
|
|
496
|
+
if (!trimmed) continue;
|
|
497
|
+
raw.push(trimmed);
|
|
498
|
+
const category = categorizeCommand(trimmed);
|
|
499
|
+
if (category !== null) matched.push({
|
|
500
|
+
command: trimmed,
|
|
501
|
+
category,
|
|
502
|
+
ciFile: filePath
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
return {
|
|
508
|
+
matched,
|
|
509
|
+
raw
|
|
510
|
+
};
|
|
511
|
+
}
|
|
512
|
+
/** Keys that are not job definitions in GitLab CI top-level. */
|
|
513
|
+
const GITLAB_RESERVED = new Set([
|
|
514
|
+
"stages",
|
|
515
|
+
"variables",
|
|
516
|
+
"include",
|
|
517
|
+
"default",
|
|
518
|
+
"workflow",
|
|
519
|
+
"image",
|
|
520
|
+
"services",
|
|
521
|
+
"before_script",
|
|
522
|
+
"after_script",
|
|
523
|
+
"cache",
|
|
524
|
+
"artifacts"
|
|
525
|
+
]);
|
|
526
|
+
/**
|
|
527
|
+
* Extract script commands from a GitLab CI YAML.
|
|
528
|
+
* Traverses top-level job keys (skipping reserved keys), looks for script arrays.
|
|
529
|
+
*/
|
|
530
|
+
function extractGitlabCiCommands(doc, filePath) {
|
|
531
|
+
const matched = [];
|
|
532
|
+
const raw = [];
|
|
533
|
+
if (!doc || typeof doc !== "object") return {
|
|
534
|
+
matched,
|
|
535
|
+
raw
|
|
536
|
+
};
|
|
537
|
+
for (const [key, job] of Object.entries(doc)) {
|
|
538
|
+
if (GITLAB_RESERVED.has(key)) continue;
|
|
539
|
+
if (!job || typeof job !== "object") continue;
|
|
540
|
+
const script = job["script"];
|
|
541
|
+
const scripts = Array.isArray(script) ? script : typeof script === "string" ? [script] : [];
|
|
542
|
+
for (const cmd of scripts) {
|
|
543
|
+
if (typeof cmd !== "string") continue;
|
|
544
|
+
const trimmed = cmd.trim();
|
|
545
|
+
if (!trimmed) continue;
|
|
546
|
+
raw.push(trimmed);
|
|
547
|
+
const category = categorizeCommand(trimmed);
|
|
548
|
+
if (category !== null) matched.push({
|
|
549
|
+
command: trimmed,
|
|
550
|
+
category,
|
|
551
|
+
ciFile: filePath
|
|
552
|
+
});
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
return {
|
|
556
|
+
matched,
|
|
557
|
+
raw
|
|
558
|
+
};
|
|
559
|
+
}
|
|
560
|
+
const ciExtractor = {
|
|
561
|
+
name: "ci",
|
|
562
|
+
async extract(ctx) {
|
|
563
|
+
const ciFiles = await globby([
|
|
564
|
+
".github/workflows/*.yml",
|
|
565
|
+
".github/workflows/*.yaml",
|
|
566
|
+
".gitlab-ci.yml"
|
|
567
|
+
], {
|
|
568
|
+
cwd: ctx.projectPath,
|
|
569
|
+
gitignore: false,
|
|
570
|
+
followSymbolicLinks: false,
|
|
571
|
+
absolute: false
|
|
572
|
+
});
|
|
573
|
+
if (ciFiles.length === 0) return [];
|
|
574
|
+
const entries = [];
|
|
575
|
+
for (const relPath of ciFiles) {
|
|
576
|
+
const absPath = join(ctx.projectPath, relPath);
|
|
577
|
+
let raw;
|
|
578
|
+
try {
|
|
579
|
+
raw = await readFile(absPath, "utf-8");
|
|
580
|
+
} catch {
|
|
581
|
+
continue;
|
|
582
|
+
}
|
|
583
|
+
let doc;
|
|
584
|
+
try {
|
|
585
|
+
doc = load(raw);
|
|
586
|
+
} catch {
|
|
587
|
+
continue;
|
|
588
|
+
}
|
|
589
|
+
const { matched, raw: rawCmds } = relPath.includes(".gitlab-ci") ? extractGitlabCiCommands(doc, relPath) : extractGithubActionsCommands(doc, relPath);
|
|
590
|
+
if (rawCmds.length > 0 || matched.length > 0) for (const { command, category } of matched) entries.push({
|
|
591
|
+
category,
|
|
592
|
+
pattern: `CI command: ${command}`,
|
|
593
|
+
confidence: .9,
|
|
594
|
+
evidence: [{
|
|
595
|
+
file: relative(ctx.projectPath, absPath) || relPath,
|
|
596
|
+
line: null
|
|
597
|
+
}],
|
|
598
|
+
metadata: {
|
|
599
|
+
command,
|
|
600
|
+
ciFile: relPath,
|
|
601
|
+
rawCommands: rawCmds
|
|
602
|
+
}
|
|
603
|
+
});
|
|
604
|
+
}
|
|
605
|
+
return entries;
|
|
606
|
+
}
|
|
607
|
+
};
|
|
608
|
+
|
|
609
|
+
//#endregion
|
|
610
|
+
//#region src/utils/fs.ts
|
|
611
|
+
/** Directories and paths that are always excluded from file listings. */
|
|
612
|
+
const ALWAYS_SKIP = [
|
|
613
|
+
"**/node_modules/**",
|
|
614
|
+
"**/dist/**",
|
|
615
|
+
"**/generated/**",
|
|
616
|
+
"**/.ez-search/**",
|
|
617
|
+
"**/.ez-context/**",
|
|
618
|
+
"**/.git/**"
|
|
619
|
+
];
|
|
620
|
+
/**
|
|
621
|
+
* List project files while respecting .gitignore (INTG-04) and always
|
|
622
|
+
* skipping common generated/build directories.
|
|
623
|
+
*
|
|
624
|
+
* @returns Relative paths sorted alphabetically.
|
|
625
|
+
*/
|
|
626
|
+
async function listProjectFiles(options) {
|
|
627
|
+
const { cwd, extensions = [
|
|
628
|
+
"ts",
|
|
629
|
+
"js",
|
|
630
|
+
"json",
|
|
631
|
+
"md"
|
|
632
|
+
], additionalIgnore = [] } = options;
|
|
633
|
+
return (await globby(extensions.length === 1 ? `**/*.${extensions[0]}` : `**/*.{${extensions.join(",")}}`, {
|
|
634
|
+
cwd,
|
|
635
|
+
gitignore: true,
|
|
636
|
+
ignore: [...ALWAYS_SKIP, ...additionalIgnore],
|
|
637
|
+
followSymbolicLinks: false,
|
|
638
|
+
absolute: false
|
|
639
|
+
})).sort();
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
//#endregion
|
|
643
|
+
//#region src/extractors/static/project-structure.ts
|
|
644
|
+
/** Top-level test directory prefixes — files here are directory-based, not co-located. */
|
|
645
|
+
const TEST_DIR_PREFIXES = [
|
|
646
|
+
"test/",
|
|
647
|
+
"tests/",
|
|
648
|
+
"__tests__/"
|
|
649
|
+
];
|
|
650
|
+
const TEST_PATTERNS = [
|
|
651
|
+
{
|
|
652
|
+
glob: "**/*.test.{ts,tsx,js,jsx}",
|
|
653
|
+
location: "co-located",
|
|
654
|
+
style: "*.test.ts style"
|
|
655
|
+
},
|
|
656
|
+
{
|
|
657
|
+
glob: "**/*.spec.{ts,tsx,js,jsx}",
|
|
658
|
+
location: "co-located",
|
|
659
|
+
style: "*.spec.ts style"
|
|
660
|
+
},
|
|
661
|
+
{
|
|
662
|
+
glob: "test/**/*.{ts,tsx,js,jsx}",
|
|
663
|
+
location: "test/ directory",
|
|
664
|
+
style: "test/ directory"
|
|
665
|
+
},
|
|
666
|
+
{
|
|
667
|
+
glob: "tests/**/*.{ts,tsx,js,jsx}",
|
|
668
|
+
location: "tests/ directory",
|
|
669
|
+
style: "tests/ directory"
|
|
670
|
+
},
|
|
671
|
+
{
|
|
672
|
+
glob: "__tests__/**/*.{ts,tsx,js,jsx}",
|
|
673
|
+
location: "__tests__/ directory",
|
|
674
|
+
style: "__tests__/ directory"
|
|
675
|
+
}
|
|
676
|
+
];
|
|
677
|
+
const projectStructureExtractor = {
|
|
678
|
+
name: "project-structure",
|
|
679
|
+
async extract(ctx) {
|
|
680
|
+
const entries = [];
|
|
681
|
+
for (const { glob, location, style } of TEST_PATTERNS) {
|
|
682
|
+
let matches = await globby(glob, {
|
|
683
|
+
cwd: ctx.projectPath,
|
|
684
|
+
gitignore: true,
|
|
685
|
+
ignore: [...ALWAYS_SKIP],
|
|
686
|
+
followSymbolicLinks: false,
|
|
687
|
+
absolute: false
|
|
688
|
+
});
|
|
689
|
+
if (location === "co-located") matches = matches.filter((f) => !TEST_DIR_PREFIXES.some((prefix) => f.startsWith(prefix)));
|
|
690
|
+
if (matches.length === 0) continue;
|
|
691
|
+
const count = matches.length;
|
|
692
|
+
const confidence = Math.min(.95, .5 + count * .05);
|
|
693
|
+
const evidenceFiles = matches.slice(0, 5);
|
|
694
|
+
entries.push({
|
|
695
|
+
category: "testing",
|
|
696
|
+
pattern: `Test files in ${location} (${style})`,
|
|
697
|
+
confidence,
|
|
698
|
+
evidence: evidenceFiles.map((f) => ({
|
|
699
|
+
file: f,
|
|
700
|
+
line: null
|
|
701
|
+
})),
|
|
702
|
+
metadata: {
|
|
703
|
+
testFileCount: count,
|
|
704
|
+
location,
|
|
705
|
+
style
|
|
706
|
+
}
|
|
707
|
+
});
|
|
708
|
+
}
|
|
709
|
+
return entries;
|
|
710
|
+
}
|
|
711
|
+
};
|
|
712
|
+
|
|
713
|
+
//#endregion
|
|
714
|
+
//#region src/extractors/code/naming.ts
|
|
715
|
+
/**
|
|
716
|
+
* Classify the naming convention of a single identifier.
|
|
717
|
+
* Returns null for short names (< 4 chars) or unclassifiable names.
|
|
718
|
+
*/
|
|
719
|
+
function classifyCase(name) {
|
|
720
|
+
if (name.length < 4) return null;
|
|
721
|
+
if (/^[A-Z][A-Z0-9_]{3,}$/.test(name)) return "UPPER_SNAKE_CASE";
|
|
722
|
+
if (/^[A-Z][a-zA-Z0-9]+$/.test(name)) return "PascalCase";
|
|
723
|
+
if (/^[a-z][a-z0-9_]+$/.test(name) && name.includes("_")) return "snake_case";
|
|
724
|
+
if (/^[a-z][a-zA-Z0-9]+$/.test(name) && /[A-Z]/.test(name)) return "camelCase";
|
|
725
|
+
return null;
|
|
726
|
+
}
|
|
727
|
+
const namingExtractor = {
|
|
728
|
+
name: "naming",
|
|
729
|
+
async extract(ctx) {
|
|
730
|
+
const files = await listProjectFiles({
|
|
731
|
+
cwd: ctx.projectPath,
|
|
732
|
+
extensions: [
|
|
733
|
+
"ts",
|
|
734
|
+
"tsx",
|
|
735
|
+
"js",
|
|
736
|
+
"jsx"
|
|
737
|
+
]
|
|
738
|
+
});
|
|
739
|
+
if (files.length === 0) return [];
|
|
740
|
+
const maxFiles = ctx.options?.maxFilesForAst ?? 200;
|
|
741
|
+
const filesToAnalyse = files.slice(0, maxFiles);
|
|
742
|
+
const project = new Project({
|
|
743
|
+
compilerOptions: {
|
|
744
|
+
allowJs: true,
|
|
745
|
+
noEmit: true
|
|
746
|
+
},
|
|
747
|
+
skipFileDependencyResolution: true
|
|
748
|
+
});
|
|
749
|
+
const absPaths = filesToAnalyse.map((f) => `${ctx.projectPath}/${f}`);
|
|
750
|
+
project.addSourceFilesAtPaths(absPaths);
|
|
751
|
+
const functions = {};
|
|
752
|
+
const variables = {};
|
|
753
|
+
const classes = {};
|
|
754
|
+
const counts = {
|
|
755
|
+
functions,
|
|
756
|
+
variables,
|
|
757
|
+
classes
|
|
758
|
+
};
|
|
759
|
+
for (const sf of project.getSourceFiles()) {
|
|
760
|
+
for (const fn of sf.getFunctions()) {
|
|
761
|
+
const name = fn.getName();
|
|
762
|
+
if (!name) continue;
|
|
763
|
+
const kind = classifyCase(name);
|
|
764
|
+
if (kind) functions[kind] = (functions[kind] ?? 0) + 1;
|
|
765
|
+
}
|
|
766
|
+
for (const decl of sf.getVariableDeclarations()) {
|
|
767
|
+
const kind = classifyCase(decl.getName());
|
|
768
|
+
if (kind) variables[kind] = (variables[kind] ?? 0) + 1;
|
|
769
|
+
}
|
|
770
|
+
for (const cls of sf.getClasses()) {
|
|
771
|
+
const name = cls.getName();
|
|
772
|
+
if (!name) continue;
|
|
773
|
+
const kind = classifyCase(name);
|
|
774
|
+
if (kind) classes[kind] = (classes[kind] ?? 0) + 1;
|
|
775
|
+
}
|
|
776
|
+
}
|
|
777
|
+
const entries = [];
|
|
778
|
+
for (const [entityType, caseCounts] of Object.entries(counts)) {
|
|
779
|
+
const total = Object.values(caseCounts).reduce((a, b) => a + b, 0);
|
|
780
|
+
if (total < 3) continue;
|
|
781
|
+
let dominant = null;
|
|
782
|
+
let dominantCount = 0;
|
|
783
|
+
for (const [kind, count] of Object.entries(caseCounts)) if (count > dominantCount) {
|
|
784
|
+
dominant = kind;
|
|
785
|
+
dominantCount = count;
|
|
786
|
+
}
|
|
787
|
+
if (!dominant) continue;
|
|
788
|
+
const confidence = Math.min(.95, dominantCount / total);
|
|
789
|
+
if (confidence < .6) continue;
|
|
790
|
+
entries.push({
|
|
791
|
+
category: "naming",
|
|
792
|
+
pattern: `${entityType} use ${dominant} naming`,
|
|
793
|
+
confidence,
|
|
794
|
+
evidence: [{
|
|
795
|
+
file: "src/**/*.ts",
|
|
796
|
+
line: null
|
|
797
|
+
}],
|
|
798
|
+
metadata: {
|
|
799
|
+
entityType,
|
|
800
|
+
dominantCase: dominant,
|
|
801
|
+
counts: caseCounts,
|
|
802
|
+
sampleSize: total
|
|
803
|
+
}
|
|
804
|
+
});
|
|
805
|
+
}
|
|
806
|
+
return entries;
|
|
807
|
+
}
|
|
808
|
+
};
|
|
809
|
+
|
|
810
|
+
//#endregion
|
|
811
|
+
//#region src/extractors/code/imports.ts
|
|
812
|
+
/** Known path alias prefixes used across JS/TS ecosystems. */
|
|
813
|
+
const ALIAS_PREFIXES = [
|
|
814
|
+
"@/",
|
|
815
|
+
"~/",
|
|
816
|
+
"#/",
|
|
817
|
+
"$lib/"
|
|
818
|
+
];
|
|
819
|
+
function hasPathAlias(specifier) {
|
|
820
|
+
return ALIAS_PREFIXES.some((prefix) => specifier.startsWith(prefix));
|
|
821
|
+
}
|
|
822
|
+
/**
|
|
823
|
+
* Check whether a source file looks like a barrel file:
|
|
824
|
+
* - Has at least one export declaration
|
|
825
|
+
* - Has no function, class, or variable declarations
|
|
826
|
+
*/
|
|
827
|
+
function isBarrelFile(sourceFile) {
|
|
828
|
+
if (!(sourceFile.getExportDeclarations().length > 0)) return false;
|
|
829
|
+
const hasFunctions = sourceFile.getFunctions().length > 0;
|
|
830
|
+
const hasClasses = sourceFile.getClasses().length > 0;
|
|
831
|
+
const hasVars = sourceFile.getVariableDeclarations().length > 0;
|
|
832
|
+
return !hasFunctions && !hasClasses && !hasVars;
|
|
833
|
+
}
|
|
834
|
+
/**
|
|
835
|
+
* Build a set of absolute paths for barrel files (index.ts/js variants).
|
|
836
|
+
* Since skipFileDependencyResolution prevents module resolution,
|
|
837
|
+
* we identify barrel files upfront and match imports by path.
|
|
838
|
+
*/
|
|
839
|
+
function buildBarrelFileSet(project) {
|
|
840
|
+
const barrels = /* @__PURE__ */ new Set();
|
|
841
|
+
for (const sf of project.getSourceFiles()) {
|
|
842
|
+
const filePath = sf.getFilePath();
|
|
843
|
+
const baseName = filePath.split("/").pop() ?? "";
|
|
844
|
+
if (/^index\.[tj]sx?$/.test(baseName) && isBarrelFile(sf)) barrels.add(filePath);
|
|
845
|
+
}
|
|
846
|
+
return barrels;
|
|
847
|
+
}
|
|
848
|
+
/** Resolve a relative import specifier to candidate absolute paths. */
|
|
849
|
+
function resolveRelativeImport(importingFile, specifier) {
|
|
850
|
+
const base = resolve(dirname(importingFile), specifier);
|
|
851
|
+
const exts = [
|
|
852
|
+
".ts",
|
|
853
|
+
".tsx",
|
|
854
|
+
".js",
|
|
855
|
+
".jsx"
|
|
856
|
+
];
|
|
857
|
+
const candidates = [];
|
|
858
|
+
for (const ext of exts) candidates.push(base + ext);
|
|
859
|
+
for (const ext of exts) candidates.push(join(base, "index" + ext));
|
|
860
|
+
return candidates;
|
|
861
|
+
}
|
|
862
|
+
const importsExtractor = {
|
|
863
|
+
name: "imports",
|
|
864
|
+
async extract(ctx) {
|
|
865
|
+
const files = await listProjectFiles({
|
|
866
|
+
cwd: ctx.projectPath,
|
|
867
|
+
extensions: [
|
|
868
|
+
"ts",
|
|
869
|
+
"tsx",
|
|
870
|
+
"js",
|
|
871
|
+
"jsx"
|
|
872
|
+
]
|
|
873
|
+
});
|
|
874
|
+
if (files.length === 0) return [];
|
|
875
|
+
const maxFiles = ctx.options?.maxFilesForAst ?? 200;
|
|
876
|
+
const filesToAnalyse = files.slice(0, maxFiles);
|
|
877
|
+
const project = new Project({
|
|
878
|
+
compilerOptions: {
|
|
879
|
+
allowJs: true,
|
|
880
|
+
noEmit: true
|
|
881
|
+
},
|
|
882
|
+
skipFileDependencyResolution: true
|
|
883
|
+
});
|
|
884
|
+
const absPaths = filesToAnalyse.map((f) => `${ctx.projectPath}/${f}`);
|
|
885
|
+
project.addSourceFilesAtPaths(absPaths);
|
|
886
|
+
const barrelFiles = buildBarrelFileSet(project);
|
|
887
|
+
let relativeCount = 0;
|
|
888
|
+
let externalCount = 0;
|
|
889
|
+
let barrelCount = 0;
|
|
890
|
+
let aliasCount = 0;
|
|
891
|
+
for (const sf of project.getSourceFiles()) for (const imp of sf.getImportDeclarations()) {
|
|
892
|
+
const specifier = imp.getModuleSpecifierValue();
|
|
893
|
+
if (imp.isModuleSpecifierRelative()) {
|
|
894
|
+
relativeCount++;
|
|
895
|
+
if (resolveRelativeImport(sf.getFilePath(), specifier).some((c) => barrelFiles.has(c))) barrelCount++;
|
|
896
|
+
} else {
|
|
897
|
+
externalCount++;
|
|
898
|
+
if (hasPathAlias(specifier)) aliasCount++;
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
const totalImports = relativeCount + externalCount;
|
|
902
|
+
if (totalImports === 0) return [];
|
|
903
|
+
const entries = [];
|
|
904
|
+
const evidence = [{
|
|
905
|
+
file: "src/**/*.ts",
|
|
906
|
+
line: null
|
|
907
|
+
}];
|
|
908
|
+
const relRatio = relativeCount / totalImports;
|
|
909
|
+
let orgPattern;
|
|
910
|
+
if (relRatio >= .75) orgPattern = "Predominantly relative imports";
|
|
911
|
+
else if (relRatio <= .25) orgPattern = "Predominantly external imports";
|
|
912
|
+
else orgPattern = "Mix of relative and external imports";
|
|
913
|
+
const sizeConfidence = Math.min(.95, .5 + totalImports / 100 * .45);
|
|
914
|
+
entries.push({
|
|
915
|
+
category: "imports",
|
|
916
|
+
pattern: orgPattern,
|
|
917
|
+
confidence: sizeConfidence,
|
|
918
|
+
evidence,
|
|
919
|
+
metadata: {
|
|
920
|
+
relativeCount,
|
|
921
|
+
externalCount,
|
|
922
|
+
totalImports,
|
|
923
|
+
relativeRatio: Math.round(relRatio * 100) / 100
|
|
924
|
+
}
|
|
925
|
+
});
|
|
926
|
+
if (barrelCount > 0) {
|
|
927
|
+
const barrelRatio = barrelCount / relativeCount;
|
|
928
|
+
entries.push({
|
|
929
|
+
category: "imports",
|
|
930
|
+
pattern: "Uses barrel file (index) imports",
|
|
931
|
+
confidence: Math.min(.95, .5 + barrelRatio * .45),
|
|
932
|
+
evidence,
|
|
933
|
+
metadata: {
|
|
934
|
+
barrelCount,
|
|
935
|
+
relativeCount
|
|
936
|
+
}
|
|
937
|
+
});
|
|
938
|
+
}
|
|
939
|
+
if (aliasCount > 0) entries.push({
|
|
940
|
+
category: "imports",
|
|
941
|
+
pattern: "Uses path aliases (@/ prefix)",
|
|
942
|
+
confidence: 1,
|
|
943
|
+
evidence,
|
|
944
|
+
metadata: { aliasCount }
|
|
945
|
+
});
|
|
946
|
+
return entries;
|
|
947
|
+
}
|
|
948
|
+
};
|
|
949
|
+
|
|
950
|
+
//#endregion
|
|
951
|
+
//#region src/extractors/code/error-handling.ts
|
|
952
|
+
const staticErrorHandlingExtractor = {
|
|
953
|
+
name: "static-error-handling",
|
|
954
|
+
async extract(ctx) {
|
|
955
|
+
const files = await listProjectFiles({
|
|
956
|
+
cwd: ctx.projectPath,
|
|
957
|
+
extensions: [
|
|
958
|
+
"ts",
|
|
959
|
+
"tsx",
|
|
960
|
+
"js",
|
|
961
|
+
"jsx"
|
|
962
|
+
]
|
|
963
|
+
});
|
|
964
|
+
if (files.length === 0) return [];
|
|
965
|
+
const maxFiles = ctx.options?.maxFilesForAst ?? 200;
|
|
966
|
+
const filesToAnalyse = files.slice(0, maxFiles);
|
|
967
|
+
const project = new Project({
|
|
968
|
+
compilerOptions: {
|
|
969
|
+
allowJs: true,
|
|
970
|
+
noEmit: true
|
|
971
|
+
},
|
|
972
|
+
skipFileDependencyResolution: true
|
|
973
|
+
});
|
|
974
|
+
const absPaths = filesToAnalyse.map((f) => `${ctx.projectPath}/${f}`);
|
|
975
|
+
project.addSourceFilesAtPaths(absPaths);
|
|
976
|
+
let tryCatchFileCount = 0;
|
|
977
|
+
let tryCatchTotalCount = 0;
|
|
978
|
+
let customErrorClassCount = 0;
|
|
979
|
+
const tryCatchEvidence = [];
|
|
980
|
+
const customErrorEvidence = [];
|
|
981
|
+
for (const sf of project.getSourceFiles()) {
|
|
982
|
+
const relPath = sf.getFilePath().replace(ctx.projectPath + "/", "");
|
|
983
|
+
const tryStatements = sf.getDescendantsOfKind(SyntaxKind.TryStatement);
|
|
984
|
+
if (tryStatements.length > 0) {
|
|
985
|
+
tryCatchFileCount++;
|
|
986
|
+
tryCatchTotalCount += tryStatements.length;
|
|
987
|
+
if (tryCatchEvidence.length < 5) tryCatchEvidence.push(relPath);
|
|
988
|
+
}
|
|
989
|
+
for (const cls of sf.getClasses()) {
|
|
990
|
+
const heritage = cls.getExtends();
|
|
991
|
+
if (heritage && /\bError\b/.test(heritage.getText())) {
|
|
992
|
+
customErrorClassCount++;
|
|
993
|
+
if (customErrorEvidence.length < 5) customErrorEvidence.push(relPath);
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
const entries = [];
|
|
998
|
+
const totalFiles = filesToAnalyse.length;
|
|
999
|
+
if (tryCatchTotalCount >= 2) {
|
|
1000
|
+
const fileSpread = tryCatchFileCount / totalFiles;
|
|
1001
|
+
const densityBoost = Math.min(.2, tryCatchTotalCount * .05);
|
|
1002
|
+
const confidence = Math.min(.95, .5 + fileSpread * .35 + densityBoost);
|
|
1003
|
+
entries.push({
|
|
1004
|
+
category: "error_handling",
|
|
1005
|
+
pattern: "try/catch imperative error handling",
|
|
1006
|
+
confidence,
|
|
1007
|
+
evidence: tryCatchEvidence.map((file) => ({
|
|
1008
|
+
file,
|
|
1009
|
+
line: null
|
|
1010
|
+
})),
|
|
1011
|
+
metadata: {
|
|
1012
|
+
style: "try-catch",
|
|
1013
|
+
fileCount: tryCatchFileCount,
|
|
1014
|
+
totalCount: tryCatchTotalCount
|
|
1015
|
+
}
|
|
1016
|
+
});
|
|
1017
|
+
}
|
|
1018
|
+
if (customErrorClassCount >= 1) {
|
|
1019
|
+
const confidence = Math.min(.95, .5 + customErrorClassCount / totalFiles * .45);
|
|
1020
|
+
entries.push({
|
|
1021
|
+
category: "error_handling",
|
|
1022
|
+
pattern: "custom error class hierarchy",
|
|
1023
|
+
confidence,
|
|
1024
|
+
evidence: customErrorEvidence.map((file) => ({
|
|
1025
|
+
file,
|
|
1026
|
+
line: null
|
|
1027
|
+
})),
|
|
1028
|
+
metadata: {
|
|
1029
|
+
style: "custom-error-class",
|
|
1030
|
+
classCount: customErrorClassCount
|
|
1031
|
+
}
|
|
1032
|
+
});
|
|
1033
|
+
}
|
|
1034
|
+
return entries;
|
|
1035
|
+
}
|
|
1036
|
+
};
|
|
1037
|
+
|
|
1038
|
+
//#endregion
|
|
1039
|
+
//#region src/core/ez-search-bridge.ts
|
|
1040
|
+
/**
|
|
1041
|
+
* ez-search bridge — thin adapter over @ez-corp/ez-search.
|
|
1042
|
+
*
|
|
1043
|
+
* This is the ONLY file that imports from @ez-corp/ez-search.
|
|
1044
|
+
* All other modules interact with ez-search via the EzSearchBridge interface.
|
|
1045
|
+
*/
|
|
1046
|
+
var EzSearchBridgeImpl = class {
|
|
1047
|
+
constructor(projectPath) {
|
|
1048
|
+
this.projectPath = projectPath;
|
|
1049
|
+
}
|
|
1050
|
+
async hasIndex(projectPath) {
|
|
1051
|
+
return existsSync(join(projectPath, ".ez-search"));
|
|
1052
|
+
}
|
|
1053
|
+
async ensureIndex(projectPath) {
|
|
1054
|
+
if (await this.hasIndex(projectPath)) return;
|
|
1055
|
+
await index(projectPath);
|
|
1056
|
+
}
|
|
1057
|
+
async search(searchQuery, options = {}) {
|
|
1058
|
+
const { k = 10 } = options;
|
|
1059
|
+
let raw;
|
|
1060
|
+
try {
|
|
1061
|
+
raw = await query(searchQuery, {
|
|
1062
|
+
topK: k,
|
|
1063
|
+
projectDir: this.projectPath,
|
|
1064
|
+
autoIndex: false
|
|
1065
|
+
});
|
|
1066
|
+
} catch (err) {
|
|
1067
|
+
if (err instanceof EzSearchError && err.code === "NO_INDEX") return [];
|
|
1068
|
+
throw err;
|
|
1069
|
+
}
|
|
1070
|
+
const results = [];
|
|
1071
|
+
for (const hit of raw.code) results.push({
|
|
1072
|
+
file: hit.file,
|
|
1073
|
+
chunk: hit.text,
|
|
1074
|
+
score: hit.score
|
|
1075
|
+
});
|
|
1076
|
+
for (const hit of raw.text) results.push({
|
|
1077
|
+
file: hit.file,
|
|
1078
|
+
chunk: hit.text,
|
|
1079
|
+
score: hit.score
|
|
1080
|
+
});
|
|
1081
|
+
results.sort((a, b) => b.score - a.score);
|
|
1082
|
+
return results.slice(0, k);
|
|
1083
|
+
}
|
|
1084
|
+
async embed(_text) {
|
|
1085
|
+
throw new Error("embed() is not yet supported by the ez-search bridge. Use search() for semantic retrieval.");
|
|
1086
|
+
}
|
|
1087
|
+
};
|
|
1088
|
+
/**
|
|
1089
|
+
* Create an EzSearchBridge bound to the given project directory.
|
|
1090
|
+
*/
|
|
1091
|
+
async function createBridge(projectPath) {
|
|
1092
|
+
return new EzSearchBridgeImpl(projectPath);
|
|
1093
|
+
}
|
|
1094
|
+
|
|
1095
|
+
//#endregion
|
|
1096
|
+
//#region src/extractors/semantic/error-handling.ts
|
|
1097
|
+
const PATTERNS = [
|
|
1098
|
+
{
|
|
1099
|
+
style: "try-catch",
|
|
1100
|
+
pattern: "try/catch imperative error handling",
|
|
1101
|
+
test: (content) => /\btry\s*\{/.test(content) && /\bcatch\s*\(/.test(content)
|
|
1102
|
+
},
|
|
1103
|
+
{
|
|
1104
|
+
style: "result-type",
|
|
1105
|
+
pattern: "Result/Either functional error handling",
|
|
1106
|
+
test: (content) => /\bResult<|\bOk\(|\bErr\(|\bisOk\b|\bisErr\b|\bneverthrow\b/.test(content)
|
|
1107
|
+
},
|
|
1108
|
+
{
|
|
1109
|
+
style: "custom-error-class",
|
|
1110
|
+
pattern: "custom error class hierarchy",
|
|
1111
|
+
test: (content) => /class\s+\w+Error\b|\bnew\s+\w+Error\(/.test(content)
|
|
1112
|
+
},
|
|
1113
|
+
{
|
|
1114
|
+
style: "error-boundary",
|
|
1115
|
+
pattern: "React error boundary components",
|
|
1116
|
+
test: (content) => /\bErrorBoundary\b|\bcomponentDidCatch\b/.test(content)
|
|
1117
|
+
}
|
|
1118
|
+
];
|
|
1119
|
+
const errorHandlingExtractor = {
|
|
1120
|
+
name: "error-handling",
|
|
1121
|
+
async extract(ctx) {
|
|
1122
|
+
const bridge = await createBridge(ctx.projectPath);
|
|
1123
|
+
if (!await bridge.hasIndex(ctx.projectPath)) return [];
|
|
1124
|
+
const [general, resultType, customErrors] = await Promise.all([
|
|
1125
|
+
bridge.search("error handling try catch throw exception", { k: 30 }),
|
|
1126
|
+
bridge.search("Result Ok Err return error value", { k: 20 }),
|
|
1127
|
+
bridge.search("custom error class extends Error", { k: 20 })
|
|
1128
|
+
]);
|
|
1129
|
+
const fileContentMap = /* @__PURE__ */ new Map();
|
|
1130
|
+
for (const result of [
|
|
1131
|
+
...general,
|
|
1132
|
+
...resultType,
|
|
1133
|
+
...customErrors
|
|
1134
|
+
]) {
|
|
1135
|
+
const existing = fileContentMap.get(result.file) ?? "";
|
|
1136
|
+
fileContentMap.set(result.file, existing + "\n" + result.chunk);
|
|
1137
|
+
}
|
|
1138
|
+
const totalUniqueFiles = fileContentMap.size;
|
|
1139
|
+
if (totalUniqueFiles === 0) return [];
|
|
1140
|
+
const entries = [];
|
|
1141
|
+
for (const patternDef of PATTERNS) {
|
|
1142
|
+
const matchingFiles = [];
|
|
1143
|
+
for (const [file, content] of fileContentMap) if (patternDef.test(content)) matchingFiles.push(file);
|
|
1144
|
+
if (matchingFiles.length < 2) continue;
|
|
1145
|
+
const confidence = Math.min(.95, .5 + matchingFiles.length / totalUniqueFiles * .45);
|
|
1146
|
+
entries.push({
|
|
1147
|
+
category: "error_handling",
|
|
1148
|
+
pattern: patternDef.pattern,
|
|
1149
|
+
confidence,
|
|
1150
|
+
evidence: matchingFiles.slice(0, 5).map((file) => ({
|
|
1151
|
+
file,
|
|
1152
|
+
line: null
|
|
1153
|
+
})),
|
|
1154
|
+
metadata: {
|
|
1155
|
+
style: patternDef.style,
|
|
1156
|
+
fileCount: matchingFiles.length
|
|
1157
|
+
}
|
|
1158
|
+
});
|
|
1159
|
+
}
|
|
1160
|
+
return entries;
|
|
1161
|
+
}
|
|
1162
|
+
};
|
|
1163
|
+
|
|
1164
|
+
//#endregion
|
|
1165
|
+
//#region src/extractors/semantic/architecture.ts
|
|
1166
|
+
/** Get directory paths under src/ (or project root if no src/). */
|
|
1167
|
+
function extractSourceDirs(files) {
|
|
1168
|
+
const dirs = /* @__PURE__ */ new Set();
|
|
1169
|
+
for (const f of files) {
|
|
1170
|
+
const parts = f.split("/");
|
|
1171
|
+
if (parts.length < 2) continue;
|
|
1172
|
+
if (parts[0] === "src" && parts.length >= 3) dirs.add(`src/${parts[1]}`);
|
|
1173
|
+
else if (parts[0] !== "src") dirs.add(parts[0]);
|
|
1174
|
+
}
|
|
1175
|
+
return dirs;
|
|
1176
|
+
}
|
|
1177
|
+
/** Normalise a directory name to lower case for matching. */
|
|
1178
|
+
function normalise(dir) {
|
|
1179
|
+
return dir.split("/").pop().toLowerCase();
|
|
1180
|
+
}
|
|
1181
|
+
/** Detect MVC: >= 2 of models/, views/, controllers/, routes/ */
|
|
1182
|
+
function detectMVC(sourceDirs) {
|
|
1183
|
+
const mvc = [
|
|
1184
|
+
"model",
|
|
1185
|
+
"models",
|
|
1186
|
+
"view",
|
|
1187
|
+
"views",
|
|
1188
|
+
"controller",
|
|
1189
|
+
"controllers",
|
|
1190
|
+
"route",
|
|
1191
|
+
"routes"
|
|
1192
|
+
];
|
|
1193
|
+
const found = [];
|
|
1194
|
+
for (const dir of sourceDirs) if (mvc.includes(normalise(dir))) found.push(dir);
|
|
1195
|
+
return new Set(found.map((d) => normalise(d).replace(/s$/, ""))).size >= 2 ? found : [];
|
|
1196
|
+
}
|
|
1197
|
+
/** Detect feature-based: files under features/, modules/, pages/ with >= 5 files total. */
|
|
1198
|
+
function detectFeatureBased(files) {
|
|
1199
|
+
const featurePattern = /\/(features?|modules?|pages?)\//i;
|
|
1200
|
+
const featureDirs = /* @__PURE__ */ new Set();
|
|
1201
|
+
let count = 0;
|
|
1202
|
+
for (const f of files) if (featurePattern.test(f)) {
|
|
1203
|
+
count++;
|
|
1204
|
+
const match = f.match(/^(.*?\/(features?|modules?|pages?))\//i);
|
|
1205
|
+
if (match) featureDirs.add(match[1]);
|
|
1206
|
+
}
|
|
1207
|
+
return count >= 5 ? Array.from(featureDirs) : [];
|
|
1208
|
+
}
|
|
1209
|
+
/** Detect layer-based architecture (DDD / hexagonal / clean arch). */
|
|
1210
|
+
function detectLayerBased(sourceDirs) {
|
|
1211
|
+
const layerPatterns = [
|
|
1212
|
+
"domain",
|
|
1213
|
+
"application",
|
|
1214
|
+
"infrastructure",
|
|
1215
|
+
"service",
|
|
1216
|
+
"services",
|
|
1217
|
+
"repository",
|
|
1218
|
+
"repositories",
|
|
1219
|
+
"handler",
|
|
1220
|
+
"handlers",
|
|
1221
|
+
"usecase",
|
|
1222
|
+
"usecases",
|
|
1223
|
+
"core",
|
|
1224
|
+
"data",
|
|
1225
|
+
"presentation"
|
|
1226
|
+
];
|
|
1227
|
+
const found = [];
|
|
1228
|
+
for (const dir of sourceDirs) if (layerPatterns.includes(normalise(dir))) found.push(dir);
|
|
1229
|
+
return new Set(found.map((d) => normalise(d))).size >= 2 ? found : [];
|
|
1230
|
+
}
|
|
1231
|
+
const architectureExtractor = {
|
|
1232
|
+
name: "architecture",
|
|
1233
|
+
async extract(ctx) {
|
|
1234
|
+
const files = await listProjectFiles({
|
|
1235
|
+
cwd: ctx.projectPath,
|
|
1236
|
+
extensions: [
|
|
1237
|
+
"ts",
|
|
1238
|
+
"js",
|
|
1239
|
+
"tsx",
|
|
1240
|
+
"jsx",
|
|
1241
|
+
"py",
|
|
1242
|
+
"rb",
|
|
1243
|
+
"go",
|
|
1244
|
+
"rs"
|
|
1245
|
+
]
|
|
1246
|
+
});
|
|
1247
|
+
const sourceDirs = extractSourceDirs(files);
|
|
1248
|
+
const mvcDirs = detectMVC(sourceDirs);
|
|
1249
|
+
const featureDirs = detectFeatureBased(files);
|
|
1250
|
+
const layerDirs = detectLayerBased(sourceDirs);
|
|
1251
|
+
let detectedPattern = null;
|
|
1252
|
+
let detectedLayers = [];
|
|
1253
|
+
if (mvcDirs.length > 0) {
|
|
1254
|
+
detectedPattern = "MVC";
|
|
1255
|
+
detectedLayers = mvcDirs;
|
|
1256
|
+
} else if (featureDirs.length > 0) {
|
|
1257
|
+
detectedPattern = "feature-based";
|
|
1258
|
+
detectedLayers = featureDirs;
|
|
1259
|
+
} else if (layerDirs.length > 0) {
|
|
1260
|
+
detectedPattern = "layer-based";
|
|
1261
|
+
detectedLayers = layerDirs;
|
|
1262
|
+
}
|
|
1263
|
+
if (!detectedPattern) return [];
|
|
1264
|
+
const bridge = await createBridge(ctx.projectPath);
|
|
1265
|
+
const hasIdx = await bridge.hasIndex(ctx.projectPath);
|
|
1266
|
+
let confidence = .7;
|
|
1267
|
+
const evidence = detectedLayers.slice(0, 3).map((dir) => ({
|
|
1268
|
+
file: dir,
|
|
1269
|
+
line: null
|
|
1270
|
+
}));
|
|
1271
|
+
if (hasIdx) {
|
|
1272
|
+
const searchResults = await bridge.search("model view controller route handler service repository", { k: 20 });
|
|
1273
|
+
if (searchResults.length > 0) {
|
|
1274
|
+
confidence = .85;
|
|
1275
|
+
for (const r of searchResults.slice(0, 2)) evidence.push({
|
|
1276
|
+
file: r.file,
|
|
1277
|
+
line: null
|
|
1278
|
+
});
|
|
1279
|
+
}
|
|
1280
|
+
}
|
|
1281
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1282
|
+
const deduped = evidence.filter(({ file }) => {
|
|
1283
|
+
if (seen.has(file)) return false;
|
|
1284
|
+
seen.add(file);
|
|
1285
|
+
return true;
|
|
1286
|
+
});
|
|
1287
|
+
return [{
|
|
1288
|
+
category: "architecture",
|
|
1289
|
+
pattern: patternLabel(detectedPattern),
|
|
1290
|
+
confidence,
|
|
1291
|
+
evidence: deduped.slice(0, 5),
|
|
1292
|
+
metadata: {
|
|
1293
|
+
architecturePattern: detectedPattern,
|
|
1294
|
+
layers: detectedLayers
|
|
1295
|
+
}
|
|
1296
|
+
}];
|
|
1297
|
+
}
|
|
1298
|
+
};
|
|
1299
|
+
function patternLabel(p) {
|
|
1300
|
+
switch (p) {
|
|
1301
|
+
case "MVC": return "MVC architecture pattern";
|
|
1302
|
+
case "feature-based": return "Feature-based architecture";
|
|
1303
|
+
case "layer-based": return "Layer-based architecture";
|
|
1304
|
+
}
|
|
1305
|
+
}
|
|
1306
|
+
|
|
1307
|
+
//#endregion
|
|
1308
|
+
//#region src/core/pipeline.ts
|
|
1309
|
+
const ALL_EXTRACTORS = [
|
|
1310
|
+
packageJsonExtractor,
|
|
1311
|
+
lockfileExtractor,
|
|
1312
|
+
tsconfigExtractor,
|
|
1313
|
+
goModExtractor,
|
|
1314
|
+
cargoTomlExtractor,
|
|
1315
|
+
ciExtractor,
|
|
1316
|
+
projectStructureExtractor,
|
|
1317
|
+
namingExtractor,
|
|
1318
|
+
importsExtractor,
|
|
1319
|
+
staticErrorHandlingExtractor,
|
|
1320
|
+
errorHandlingExtractor,
|
|
1321
|
+
architectureExtractor
|
|
1322
|
+
];
|
|
1323
|
+
/**
|
|
1324
|
+
* Deduplicate evidence by file+line combination.
|
|
1325
|
+
*/
|
|
1326
|
+
function deduplicateEvidence(evidence) {
|
|
1327
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1328
|
+
return evidence.filter((e) => {
|
|
1329
|
+
const key = `${e.file}:${e.line ?? "null"}`;
|
|
1330
|
+
if (seen.has(key)) return false;
|
|
1331
|
+
seen.add(key);
|
|
1332
|
+
return true;
|
|
1333
|
+
});
|
|
1334
|
+
}
|
|
1335
|
+
/**
|
|
1336
|
+
* Deduplicate conventions by category+pattern.
|
|
1337
|
+
* For duplicates, keep the one with higher confidence.
|
|
1338
|
+
* Merge evidence arrays from duplicates.
|
|
1339
|
+
*/
|
|
1340
|
+
function deduplicateConventions(conventions) {
|
|
1341
|
+
const grouped = /* @__PURE__ */ new Map();
|
|
1342
|
+
for (const entry of conventions) {
|
|
1343
|
+
const key = `${entry.category}:${entry.pattern}`;
|
|
1344
|
+
const existing = grouped.get(key);
|
|
1345
|
+
if (!existing) grouped.set(key, entry);
|
|
1346
|
+
else {
|
|
1347
|
+
const winner = entry.confidence > existing.confidence ? entry : existing;
|
|
1348
|
+
const mergedEvidence = deduplicateEvidence([...existing.evidence, ...entry.evidence]);
|
|
1349
|
+
grouped.set(key, {
|
|
1350
|
+
...winner,
|
|
1351
|
+
evidence: mergedEvidence
|
|
1352
|
+
});
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
return Array.from(grouped.values());
|
|
1356
|
+
}
|
|
1357
|
+
/**
|
|
1358
|
+
* Populate StackInfo from extracted conventions via a post-extraction pass.
|
|
1359
|
+
* Conventions are processed in array order; first match wins for each field.
|
|
1360
|
+
*/
|
|
1361
|
+
function populateStackInfo(registry) {
|
|
1362
|
+
const stack = { ...registry.stack };
|
|
1363
|
+
for (const entry of registry.conventions) {
|
|
1364
|
+
const meta = entry.metadata ?? {};
|
|
1365
|
+
if (entry.category === "stack") {
|
|
1366
|
+
if (!stack.language || stack.language === "unknown") {
|
|
1367
|
+
if (typeof meta.language === "string") stack.language = meta.language;
|
|
1368
|
+
}
|
|
1369
|
+
if (!stack.framework && typeof meta.framework === "string") stack.framework = meta.framework;
|
|
1370
|
+
if (!stack.testRunner && typeof meta.testRunner === "string") stack.testRunner = meta.testRunner;
|
|
1371
|
+
if (!stack.packageManager && typeof meta.packageManager === "string") stack.packageManager = meta.packageManager;
|
|
1372
|
+
if (!stack.buildTool) {
|
|
1373
|
+
if (typeof meta.buildTool === "string") stack.buildTool = meta.buildTool;
|
|
1374
|
+
else if (typeof meta.scriptName === "string" && meta.scriptName === "build" && typeof meta.command === "string") stack.buildTool = meta.command.split(" ")[0];
|
|
1375
|
+
}
|
|
1376
|
+
}
|
|
1377
|
+
if (entry.category === "testing") {
|
|
1378
|
+
if (!stack.testRunner && typeof meta.testRunner === "string") stack.testRunner = meta.testRunner;
|
|
1379
|
+
}
|
|
1380
|
+
}
|
|
1381
|
+
return {
|
|
1382
|
+
...registry,
|
|
1383
|
+
stack
|
|
1384
|
+
};
|
|
1385
|
+
}
|
|
1386
|
+
/**
|
|
1387
|
+
* Populate ArchitectureInfo from extracted conventions via a post-extraction pass.
|
|
1388
|
+
* First architecture convention with the relevant metadata wins.
|
|
1389
|
+
*/
|
|
1390
|
+
function populateArchitectureInfo(registry) {
|
|
1391
|
+
const arch = { ...registry.architecture };
|
|
1392
|
+
for (const entry of registry.conventions) if (entry.category === "architecture") {
|
|
1393
|
+
if (!arch.pattern && typeof entry.metadata?.architecturePattern === "string") arch.pattern = entry.metadata.architecturePattern;
|
|
1394
|
+
if (arch.layers.length === 0 && Array.isArray(entry.metadata?.layers)) arch.layers = entry.metadata.layers;
|
|
1395
|
+
}
|
|
1396
|
+
return {
|
|
1397
|
+
...registry,
|
|
1398
|
+
architecture: arch
|
|
1399
|
+
};
|
|
1400
|
+
}
|
|
1401
|
+
/**
|
|
1402
|
+
* Run the full extraction pipeline against the given project path.
|
|
1403
|
+
*
|
|
1404
|
+
* 1. Runs all extractors in parallel (Promise.allSettled)
|
|
1405
|
+
* 2. Deduplicates conventions by category+pattern (higher confidence wins)
|
|
1406
|
+
* 3. Populates StackInfo from convention metadata
|
|
1407
|
+
* 4. Populates ArchitectureInfo from convention metadata
|
|
1408
|
+
* 5. Validates and returns the final ConventionRegistry
|
|
1409
|
+
*/
|
|
1410
|
+
async function extractConventions(projectPath, options) {
|
|
1411
|
+
const populated = await runExtractors(ALL_EXTRACTORS, {
|
|
1412
|
+
projectPath,
|
|
1413
|
+
options
|
|
1414
|
+
}, createRegistry(projectPath));
|
|
1415
|
+
const withArch = populateArchitectureInfo(populateStackInfo({
|
|
1416
|
+
...populated,
|
|
1417
|
+
conventions: deduplicateConventions(populated.conventions)
|
|
1418
|
+
}));
|
|
1419
|
+
return ConventionRegistrySchema.parse(withArch);
|
|
1420
|
+
}
|
|
1421
|
+
|
|
1422
|
+
//#endregion
|
|
1423
|
+
//#region src/emitters/writer.ts
|
|
1424
|
+
const MARKER_START = "<!-- ez-context:start -->";
|
|
1425
|
+
const MARKER_END = "<!-- ez-context:end -->";
|
|
1426
|
+
/**
|
|
1427
|
+
* Write content into the marker section of filePath.
|
|
1428
|
+
*
|
|
1429
|
+
* Three paths:
|
|
1430
|
+
* 1. File does not exist: creates it with markers wrapping content.
|
|
1431
|
+
* 2. File exists, no markers (or only one marker): appends the section at the end.
|
|
1432
|
+
* 3. File exists with both markers: splices new content between existing markers,
|
|
1433
|
+
* preserving everything outside.
|
|
1434
|
+
*/
|
|
1435
|
+
async function writeWithMarkers(filePath, content) {
|
|
1436
|
+
const wrapped = `${MARKER_START}\n${content}\n${MARKER_END}`;
|
|
1437
|
+
if (!existsSync(filePath)) {
|
|
1438
|
+
await writeFile(filePath, wrapped + "\n", "utf-8");
|
|
1439
|
+
return;
|
|
1440
|
+
}
|
|
1441
|
+
const existing = await readFile(filePath, "utf-8");
|
|
1442
|
+
const startIdx = existing.indexOf(MARKER_START);
|
|
1443
|
+
const endIdx = existing.indexOf(MARKER_END);
|
|
1444
|
+
if (startIdx === -1 || endIdx === -1) {
|
|
1445
|
+
await writeFile(filePath, existing + (existing.endsWith("\n") ? "\n" : "\n\n") + wrapped + "\n", "utf-8");
|
|
1446
|
+
return;
|
|
1447
|
+
}
|
|
1448
|
+
const before = existing.slice(0, startIdx);
|
|
1449
|
+
const after = existing.slice(endIdx + 23);
|
|
1450
|
+
await writeFile(filePath, before + wrapped + after, "utf-8");
|
|
1451
|
+
}
|
|
1452
|
+
|
|
1453
|
+
//#endregion
|
|
1454
|
+
//#region src/emitters/render-helpers.ts
|
|
1455
|
+
/**
|
|
1456
|
+
* Extract script commands from a filtered list of convention entries.
|
|
1457
|
+
* Shared by agents-md and renderConventionsBody.
|
|
1458
|
+
*/
|
|
1459
|
+
function extractCommands(filtered) {
|
|
1460
|
+
const commands = [];
|
|
1461
|
+
for (const entry of filtered) {
|
|
1462
|
+
const meta = entry.metadata;
|
|
1463
|
+
if (meta && typeof meta["command"] === "string" && typeof meta["scriptName"] === "string") commands.push({
|
|
1464
|
+
scriptName: meta["scriptName"],
|
|
1465
|
+
command: meta["command"]
|
|
1466
|
+
});
|
|
1467
|
+
}
|
|
1468
|
+
return commands;
|
|
1469
|
+
}
|
|
1470
|
+
/**
|
|
1471
|
+
* Check if a stack-category convention should appear in the Conventions section.
|
|
1472
|
+
* Stack entries that map to StackInfo fields (language, framework, etc.) are already
|
|
1473
|
+
* shown in the Stack section. Others (e.g. "TypeScript strict mode") are convention-worthy.
|
|
1474
|
+
*/
|
|
1475
|
+
function isStackConventionWorthy(entry, stack) {
|
|
1476
|
+
if (entry.category !== "stack") return false;
|
|
1477
|
+
const meta = entry.metadata ?? {};
|
|
1478
|
+
if (typeof meta["language"] === "string" && stack.language !== "unknown") return false;
|
|
1479
|
+
if (typeof meta["framework"] === "string" && stack.framework) return false;
|
|
1480
|
+
if (typeof meta["buildTool"] === "string" && stack.buildTool) return false;
|
|
1481
|
+
if (typeof meta["packageManager"] === "string" && stack.packageManager) return false;
|
|
1482
|
+
if (typeof meta["scriptName"] === "string") return false;
|
|
1483
|
+
return true;
|
|
1484
|
+
}
|
|
1485
|
+
/**
|
|
1486
|
+
* Check if a convention entry is redundant with the Stack or Commands sections.
|
|
1487
|
+
* - "Test runner: X" in testing category duplicates Stack > Test Runner
|
|
1488
|
+
* - Entries with metadata.scriptName duplicate the Commands section
|
|
1489
|
+
*/
|
|
1490
|
+
function isRedundantConvention(entry) {
|
|
1491
|
+
if (entry.category === "testing" && entry.pattern.startsWith("Test runner:")) return true;
|
|
1492
|
+
if (entry.metadata && typeof entry.metadata["scriptName"] === "string") return true;
|
|
1493
|
+
return false;
|
|
1494
|
+
}
|
|
1495
|
+
/**
|
|
1496
|
+
* Render conventions body as markdown lines.
|
|
1497
|
+
* Used by cursor-mdc, skill-md, rulesync-md, ruler-md, copilot-md.
|
|
1498
|
+
*/
|
|
1499
|
+
function renderConventionsBody(registry, confidenceThreshold) {
|
|
1500
|
+
const filtered = registry.conventions.filter((c) => c.confidence >= confidenceThreshold);
|
|
1501
|
+
const lines = [];
|
|
1502
|
+
const s = registry.stack;
|
|
1503
|
+
if (s.language !== "unknown" || Boolean(s.framework) || Boolean(s.buildTool) || Boolean(s.packageManager) || Boolean(s.testRunner)) {
|
|
1504
|
+
lines.push("## Stack");
|
|
1505
|
+
if (s.language !== "unknown") lines.push(`- Language: ${s.language}`);
|
|
1506
|
+
if (s.framework) lines.push(`- Framework: ${s.framework}`);
|
|
1507
|
+
if (s.buildTool) lines.push(`- Build: ${s.buildTool}`);
|
|
1508
|
+
if (s.packageManager) lines.push(`- Package Manager: ${s.packageManager}`);
|
|
1509
|
+
if (s.testRunner) lines.push(`- Test Runner: ${s.testRunner}`);
|
|
1510
|
+
lines.push("");
|
|
1511
|
+
}
|
|
1512
|
+
const a = registry.architecture;
|
|
1513
|
+
if (Boolean(a.pattern) || (a.layers?.length ?? 0) > 0) {
|
|
1514
|
+
lines.push("## Architecture");
|
|
1515
|
+
if (a.pattern) lines.push(`- Pattern: ${a.pattern}`);
|
|
1516
|
+
if (a.layers && a.layers.length > 0) lines.push(`- Layers: ${a.layers.join(", ")}`);
|
|
1517
|
+
lines.push("");
|
|
1518
|
+
}
|
|
1519
|
+
const categoryMap = /* @__PURE__ */ new Map();
|
|
1520
|
+
for (const entry of filtered) {
|
|
1521
|
+
if (entry.category === "architecture") continue;
|
|
1522
|
+
if (entry.category === "stack" && !isStackConventionWorthy(entry, s)) continue;
|
|
1523
|
+
if (isRedundantConvention(entry)) continue;
|
|
1524
|
+
const list = categoryMap.get(entry.category) ?? [];
|
|
1525
|
+
list.push(entry.pattern);
|
|
1526
|
+
categoryMap.set(entry.category, list);
|
|
1527
|
+
}
|
|
1528
|
+
if (categoryMap.size > 0) {
|
|
1529
|
+
lines.push("## Conventions");
|
|
1530
|
+
for (const [category, patterns] of categoryMap) for (const pattern of patterns) lines.push(`- **${category}**: ${pattern}`);
|
|
1531
|
+
lines.push("");
|
|
1532
|
+
}
|
|
1533
|
+
const commands = extractCommands(filtered);
|
|
1534
|
+
if (commands.length > 0) {
|
|
1535
|
+
lines.push("## Commands");
|
|
1536
|
+
for (const cmd of commands) lines.push(`- \`${cmd.scriptName}\`: \`${cmd.command}\``);
|
|
1537
|
+
lines.push("");
|
|
1538
|
+
}
|
|
1539
|
+
return lines;
|
|
1540
|
+
}
|
|
1541
|
+
|
|
1542
|
+
//#endregion
|
|
1543
|
+
//#region src/emitters/claude-md.ts
|
|
1544
|
+
function prepData$1(registry, confidenceThreshold) {
|
|
1545
|
+
const filtered = registry.conventions.filter((c) => c.confidence >= confidenceThreshold);
|
|
1546
|
+
const categoryMap = /* @__PURE__ */ new Map();
|
|
1547
|
+
for (const entry of filtered) {
|
|
1548
|
+
if (entry.category === "architecture") continue;
|
|
1549
|
+
if (entry.category === "stack") {
|
|
1550
|
+
const meta = entry.metadata ?? {};
|
|
1551
|
+
if (typeof meta["language"] === "string") continue;
|
|
1552
|
+
if (typeof meta["framework"] === "string") continue;
|
|
1553
|
+
if (typeof meta["buildTool"] === "string") continue;
|
|
1554
|
+
if (typeof meta["packageManager"] === "string") continue;
|
|
1555
|
+
if (typeof meta["scriptName"] === "string") continue;
|
|
1556
|
+
}
|
|
1557
|
+
if (isRedundantConvention(entry)) continue;
|
|
1558
|
+
const list = categoryMap.get(entry.category) ?? [];
|
|
1559
|
+
list.push(entry);
|
|
1560
|
+
categoryMap.set(entry.category, list);
|
|
1561
|
+
}
|
|
1562
|
+
const conventionGroups = [];
|
|
1563
|
+
for (const [category, entries] of categoryMap) conventionGroups.push({
|
|
1564
|
+
category,
|
|
1565
|
+
entries
|
|
1566
|
+
});
|
|
1567
|
+
const hasStack = registry.stack.language !== "unknown" || Boolean(registry.stack.framework) || Boolean(registry.stack.buildTool) || Boolean(registry.stack.packageManager) || Boolean(registry.stack.testRunner);
|
|
1568
|
+
const hasArchitecture = Boolean(registry.architecture.pattern) || (registry.architecture.layers?.length ?? 0) > 0;
|
|
1569
|
+
return {
|
|
1570
|
+
stack: registry.stack,
|
|
1571
|
+
architecture: registry.architecture,
|
|
1572
|
+
conventionGroups,
|
|
1573
|
+
hasStack,
|
|
1574
|
+
hasArchitecture
|
|
1575
|
+
};
|
|
1576
|
+
}
|
|
1577
|
+
function renderClaudeMd(registry, confidenceThreshold) {
|
|
1578
|
+
const data = prepData$1(registry, confidenceThreshold);
|
|
1579
|
+
const lines = [];
|
|
1580
|
+
lines.push("# Project Context");
|
|
1581
|
+
if (data.hasStack) {
|
|
1582
|
+
lines.push("");
|
|
1583
|
+
lines.push("## Stack");
|
|
1584
|
+
if (data.stack.language !== "unknown") lines.push(`- Language: ${data.stack.language}`);
|
|
1585
|
+
if (data.stack.framework) lines.push(`- Framework: ${data.stack.framework}`);
|
|
1586
|
+
if (data.stack.buildTool) lines.push(`- Build: ${data.stack.buildTool}`);
|
|
1587
|
+
if (data.stack.packageManager) lines.push(`- Package Manager: ${data.stack.packageManager}`);
|
|
1588
|
+
if (data.stack.testRunner) lines.push(`- Test Runner: ${data.stack.testRunner}`);
|
|
1589
|
+
}
|
|
1590
|
+
if (data.conventionGroups.length > 0) {
|
|
1591
|
+
lines.push("");
|
|
1592
|
+
lines.push("## Conventions");
|
|
1593
|
+
for (const group of data.conventionGroups) for (const entry of group.entries) lines.push(`- **${group.category}**: ${entry.pattern}`);
|
|
1594
|
+
}
|
|
1595
|
+
if (data.hasArchitecture) {
|
|
1596
|
+
lines.push("");
|
|
1597
|
+
lines.push("## Architecture");
|
|
1598
|
+
if (data.architecture.pattern) lines.push(`- Pattern: ${data.architecture.pattern}`);
|
|
1599
|
+
if (data.architecture.layers && data.architecture.layers.length > 0) lines.push(`- Layers: ${data.architecture.layers.join(", ")}`);
|
|
1600
|
+
}
|
|
1601
|
+
return lines.join("\n") + "\n";
|
|
1602
|
+
}
|
|
1603
|
+
|
|
1604
|
+
//#endregion
|
|
1605
|
+
//#region src/emitters/agents-md.ts
|
|
1606
|
+
function prepData(registry, confidenceThreshold) {
|
|
1607
|
+
const filtered = registry.conventions.filter((c) => c.confidence >= confidenceThreshold);
|
|
1608
|
+
const commands = extractCommands(filtered);
|
|
1609
|
+
const byCategory = (cat) => filtered.filter((c) => c.category === cat);
|
|
1610
|
+
const testingConventions = byCategory("testing");
|
|
1611
|
+
const namingConventions = byCategory("naming");
|
|
1612
|
+
const importConventions = byCategory("imports");
|
|
1613
|
+
const gitConventions = filtered.filter((c) => c.pattern.toLowerCase().includes("git") || c.category === "other" && c.pattern.toLowerCase().includes("commit"));
|
|
1614
|
+
const hasTesting = Boolean(registry.stack.testRunner) || testingConventions.length > 0;
|
|
1615
|
+
const hasProjectStructure = Boolean(registry.architecture.pattern) || (registry.architecture.layers?.length ?? 0) > 0;
|
|
1616
|
+
const hasCodeStyle = namingConventions.length > 0 || importConventions.length > 0;
|
|
1617
|
+
return {
|
|
1618
|
+
commands,
|
|
1619
|
+
testRunner: registry.stack.testRunner ?? null,
|
|
1620
|
+
testingConventions,
|
|
1621
|
+
namingConventions,
|
|
1622
|
+
importConventions,
|
|
1623
|
+
gitConventions,
|
|
1624
|
+
architecture: registry.architecture,
|
|
1625
|
+
hasTesting,
|
|
1626
|
+
hasProjectStructure,
|
|
1627
|
+
hasCodeStyle
|
|
1628
|
+
};
|
|
1629
|
+
}
|
|
1630
|
+
function renderAgentsMd(registry, confidenceThreshold) {
|
|
1631
|
+
const data = prepData(registry, confidenceThreshold);
|
|
1632
|
+
const lines = [];
|
|
1633
|
+
lines.push("# AGENTS.md");
|
|
1634
|
+
if (data.commands.length > 0) {
|
|
1635
|
+
lines.push("");
|
|
1636
|
+
lines.push("## Commands");
|
|
1637
|
+
for (const cmd of data.commands) lines.push(`- \`${cmd.scriptName}\`: \`${cmd.command}\``);
|
|
1638
|
+
}
|
|
1639
|
+
if (data.hasTesting) {
|
|
1640
|
+
lines.push("");
|
|
1641
|
+
lines.push("## Testing");
|
|
1642
|
+
if (data.testRunner) lines.push(`- Test runner: ${data.testRunner}`);
|
|
1643
|
+
for (const entry of data.testingConventions) lines.push(`- ${entry.pattern}`);
|
|
1644
|
+
}
|
|
1645
|
+
if (data.hasProjectStructure) {
|
|
1646
|
+
lines.push("");
|
|
1647
|
+
lines.push("## Project Structure");
|
|
1648
|
+
if (data.architecture.pattern) lines.push(`- Architecture: ${data.architecture.pattern}`);
|
|
1649
|
+
if (data.architecture.layers && data.architecture.layers.length > 0) lines.push(`- Layers: ${data.architecture.layers.join(", ")}`);
|
|
1650
|
+
}
|
|
1651
|
+
if (data.hasCodeStyle) {
|
|
1652
|
+
lines.push("");
|
|
1653
|
+
lines.push("## Code Style");
|
|
1654
|
+
for (const entry of data.namingConventions) lines.push(`- **naming**: ${entry.pattern}`);
|
|
1655
|
+
for (const entry of data.importConventions) lines.push(`- **imports**: ${entry.pattern}`);
|
|
1656
|
+
}
|
|
1657
|
+
if (data.gitConventions.length > 0) {
|
|
1658
|
+
lines.push("");
|
|
1659
|
+
lines.push("## Git Workflow");
|
|
1660
|
+
for (const entry of data.gitConventions) lines.push(`- ${entry.pattern}`);
|
|
1661
|
+
}
|
|
1662
|
+
lines.push("");
|
|
1663
|
+
lines.push("## Boundaries");
|
|
1664
|
+
lines.push("- Do not modify auto-generated sections between ez-context markers");
|
|
1665
|
+
return lines.join("\n") + "\n";
|
|
1666
|
+
}
|
|
1667
|
+
|
|
1668
|
+
//#endregion
|
|
1669
|
+
//#region src/emitters/cursor-mdc.ts
|
|
1670
|
+
/**
|
|
1671
|
+
* Render a Cursor MDC rule file (.cursor/rules/ez-context.mdc).
|
|
1672
|
+
*
|
|
1673
|
+
* Format: YAML frontmatter + markdown body.
|
|
1674
|
+
* - description: shown in Cursor UI
|
|
1675
|
+
* - globs: empty string (not null/omitted) per Cursor docs
|
|
1676
|
+
* - alwaysApply: true ensures conventions are always in context
|
|
1677
|
+
*/
|
|
1678
|
+
function renderCursorMdc(registry, confidenceThreshold) {
|
|
1679
|
+
const frontmatter = yaml.dump({
|
|
1680
|
+
description: "Project conventions extracted by ez-context",
|
|
1681
|
+
globs: "",
|
|
1682
|
+
alwaysApply: true
|
|
1683
|
+
}).trimEnd();
|
|
1684
|
+
const bodyLines = renderConventionsBody(registry, confidenceThreshold);
|
|
1685
|
+
while (bodyLines.length > 0 && bodyLines[bodyLines.length - 1] === "") bodyLines.pop();
|
|
1686
|
+
return `---\n${frontmatter}\n---\n\n${bodyLines.join("\n")}\n`;
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1689
|
+
//#endregion
|
|
1690
|
+
//#region src/emitters/copilot-md.ts
|
|
1691
|
+
/**
|
|
1692
|
+
* Render a GitHub Copilot instructions file (.github/copilot-instructions.md).
|
|
1693
|
+
*
|
|
1694
|
+
* Format: Plain markdown, no YAML frontmatter.
|
|
1695
|
+
* GitHub Copilot reads the entire file; HTML comment markers are used
|
|
1696
|
+
* for idempotent updates via writeWithMarkers.
|
|
1697
|
+
*/
|
|
1698
|
+
function renderCopilotMd(registry, confidenceThreshold) {
|
|
1699
|
+
const lines = [];
|
|
1700
|
+
lines.push("<!-- Generated by ez-context. Do not edit between markers. -->");
|
|
1701
|
+
lines.push("");
|
|
1702
|
+
lines.push("# Copilot Instructions");
|
|
1703
|
+
lines.push("");
|
|
1704
|
+
const bodyLines = renderConventionsBody(registry, confidenceThreshold);
|
|
1705
|
+
while (bodyLines.length > 0 && bodyLines[bodyLines.length - 1] === "") bodyLines.pop();
|
|
1706
|
+
lines.push(...bodyLines);
|
|
1707
|
+
return lines.join("\n") + "\n";
|
|
1708
|
+
}
|
|
1709
|
+
|
|
1710
|
+
//#endregion
|
|
1711
|
+
//#region src/emitters/skill-md.ts
|
|
1712
|
+
/**
|
|
1713
|
+
* Render a SKILL.md module file (.skills/ez-context/SKILL.md).
|
|
1714
|
+
*
|
|
1715
|
+
* Format: YAML frontmatter + markdown body.
|
|
1716
|
+
* - name: must match directory name (ez-context)
|
|
1717
|
+
* - description: max 1024 chars, describes what AND when to use
|
|
1718
|
+
* Body stays under 5000 tokens (~3750 words) as per SKILL.md spec.
|
|
1719
|
+
*/
|
|
1720
|
+
function renderSkillMd(registry, confidenceThreshold) {
|
|
1721
|
+
const frontmatter = yaml.dump({
|
|
1722
|
+
name: "ez-context",
|
|
1723
|
+
description: "Project conventions and coding standards for this codebase. Use when writing new code, reviewing patterns, or understanding project architecture."
|
|
1724
|
+
}).trimEnd();
|
|
1725
|
+
const bodyLines = renderConventionsBody(registry, confidenceThreshold);
|
|
1726
|
+
while (bodyLines.length > 0 && bodyLines[bodyLines.length - 1] === "") bodyLines.pop();
|
|
1727
|
+
return `---\n${frontmatter}\n---\n\n${bodyLines.join("\n")}\n`;
|
|
1728
|
+
}
|
|
1729
|
+
|
|
1730
|
+
//#endregion
|
|
1731
|
+
//#region src/emitters/rulesync-md.ts
|
|
1732
|
+
/**
|
|
1733
|
+
* Render a Rulesync rule file (.rulesync/rules/ez-context.md).
|
|
1734
|
+
*
|
|
1735
|
+
* Format: YAML frontmatter + markdown body.
|
|
1736
|
+
* - targets: specifies which AI tools receive this rule
|
|
1737
|
+
* ez-context writes INTO .rulesync/rules/; Rulesync distributes to tools.
|
|
1738
|
+
*/
|
|
1739
|
+
function renderRulesyncMd(registry, confidenceThreshold) {
|
|
1740
|
+
const frontmatter = yaml.dump({
|
|
1741
|
+
description: "Project conventions extracted by ez-context",
|
|
1742
|
+
targets: [
|
|
1743
|
+
"cursor",
|
|
1744
|
+
"copilot",
|
|
1745
|
+
"windsurf"
|
|
1746
|
+
]
|
|
1747
|
+
}).trimEnd();
|
|
1748
|
+
const bodyLines = renderConventionsBody(registry, confidenceThreshold);
|
|
1749
|
+
while (bodyLines.length > 0 && bodyLines[bodyLines.length - 1] === "") bodyLines.pop();
|
|
1750
|
+
return `---\n${frontmatter}\n---\n\n${bodyLines.join("\n")}\n`;
|
|
1751
|
+
}
|
|
1752
|
+
|
|
1753
|
+
//#endregion
|
|
1754
|
+
//#region src/emitters/ruler-md.ts
|
|
1755
|
+
/**
|
|
1756
|
+
* Render a Ruler rule file (.ruler/ez-context.md).
|
|
1757
|
+
*
|
|
1758
|
+
* Format: Plain markdown, no YAML frontmatter.
|
|
1759
|
+
* Ruler recursively discovers all .md files in .ruler/ and distributes them.
|
|
1760
|
+
* ez-context writes a single .ruler/ez-context.md as an additive conventions file.
|
|
1761
|
+
*/
|
|
1762
|
+
function renderRulerMd(registry, confidenceThreshold) {
|
|
1763
|
+
const lines = [];
|
|
1764
|
+
lines.push("# Project Conventions (ez-context)");
|
|
1765
|
+
lines.push("");
|
|
1766
|
+
const bodyLines = renderConventionsBody(registry, confidenceThreshold);
|
|
1767
|
+
while (bodyLines.length > 0 && bodyLines[bodyLines.length - 1] === "") bodyLines.pop();
|
|
1768
|
+
lines.push(...bodyLines);
|
|
1769
|
+
return lines.join("\n") + "\n";
|
|
1770
|
+
}
|
|
1771
|
+
|
|
1772
|
+
//#endregion
|
|
1773
|
+
//#region src/emitters/index.ts
|
|
1774
|
+
const FORMAT_EMITTER_MAP = {
|
|
1775
|
+
claude: {
|
|
1776
|
+
render: renderClaudeMd,
|
|
1777
|
+
filename: "CLAUDE.md",
|
|
1778
|
+
strategy: "markers"
|
|
1779
|
+
},
|
|
1780
|
+
agents: {
|
|
1781
|
+
render: renderAgentsMd,
|
|
1782
|
+
filename: "AGENTS.md",
|
|
1783
|
+
strategy: "markers"
|
|
1784
|
+
},
|
|
1785
|
+
cursor: {
|
|
1786
|
+
render: renderCursorMdc,
|
|
1787
|
+
filename: path.join(".cursor", "rules", "ez-context.mdc"),
|
|
1788
|
+
strategy: "direct"
|
|
1789
|
+
},
|
|
1790
|
+
copilot: {
|
|
1791
|
+
render: renderCopilotMd,
|
|
1792
|
+
filename: path.join(".github", "copilot-instructions.md"),
|
|
1793
|
+
strategy: "markers"
|
|
1794
|
+
},
|
|
1795
|
+
skills: {
|
|
1796
|
+
render: renderSkillMd,
|
|
1797
|
+
filename: path.join(".skills", "ez-context", "SKILL.md"),
|
|
1798
|
+
strategy: "direct"
|
|
1799
|
+
},
|
|
1800
|
+
rulesync: {
|
|
1801
|
+
render: renderRulesyncMd,
|
|
1802
|
+
filename: path.join(".rulesync", "rules", "ez-context.md"),
|
|
1803
|
+
strategy: "markers"
|
|
1804
|
+
},
|
|
1805
|
+
ruler: {
|
|
1806
|
+
render: renderRulerMd,
|
|
1807
|
+
filename: path.join(".ruler", "ez-context.md"),
|
|
1808
|
+
strategy: "direct"
|
|
1809
|
+
}
|
|
1810
|
+
};
|
|
1811
|
+
/**
|
|
1812
|
+
* Emit output files for the given ConventionRegistry.
|
|
1813
|
+
*
|
|
1814
|
+
* Defaults to ["claude", "agents"] for backward compatibility.
|
|
1815
|
+
* In dryRun mode, returns rendered content without writing any files.
|
|
1816
|
+
*/
|
|
1817
|
+
async function emit(registry, options) {
|
|
1818
|
+
const threshold = options.confidenceThreshold ?? .7;
|
|
1819
|
+
const formats = options.formats ?? ["claude", "agents"];
|
|
1820
|
+
const rendered = {};
|
|
1821
|
+
for (const format of formats) rendered[format] = FORMAT_EMITTER_MAP[format].render(registry, threshold);
|
|
1822
|
+
const claudeMd = rendered["claude"] ?? "";
|
|
1823
|
+
const agentsMd = rendered["agents"] ?? "";
|
|
1824
|
+
if (options.dryRun) return {
|
|
1825
|
+
rendered,
|
|
1826
|
+
claudeMd,
|
|
1827
|
+
agentsMd,
|
|
1828
|
+
filesWritten: []
|
|
1829
|
+
};
|
|
1830
|
+
const filesWritten = [];
|
|
1831
|
+
for (const format of formats) {
|
|
1832
|
+
const entry = FORMAT_EMITTER_MAP[format];
|
|
1833
|
+
const filePath = path.join(options.outputDir, entry.filename);
|
|
1834
|
+
await mkdir(path.dirname(filePath), { recursive: true });
|
|
1835
|
+
if (entry.strategy === "direct") await writeFile(filePath, rendered[format], "utf-8");
|
|
1836
|
+
else await writeWithMarkers(filePath, rendered[format]);
|
|
1837
|
+
filesWritten.push(filePath);
|
|
1838
|
+
}
|
|
1839
|
+
return {
|
|
1840
|
+
rendered,
|
|
1841
|
+
claudeMd,
|
|
1842
|
+
agentsMd,
|
|
1843
|
+
filesWritten
|
|
1844
|
+
};
|
|
1845
|
+
}
|
|
1846
|
+
|
|
1847
|
+
//#endregion
|
|
1848
|
+
export { EvidenceRefSchema as _, writeWithMarkers as a, ALWAYS_SKIP as c, addConvention as d, createRegistry as f, ConventionRegistrySchema as g, ConventionEntrySchema as h, MARKER_START as i, listProjectFiles as l, ConventionCategorySchema as m, emit as n, extractConventions as o, ArchitectureInfoSchema as p, MARKER_END as r, createBridge as s, FORMAT_EMITTER_MAP as t, runExtractors as u, StackInfoSchema as v };
|
|
1849
|
+
//# sourceMappingURL=emitters-D6bP4xWs.js.map
|