ai-spec-dev 0.46.0 → 0.55.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +60 -30
  2. package/cli/commands/config.ts +129 -1
  3. package/cli/commands/create.ts +14 -0
  4. package/cli/commands/fix-history.ts +176 -0
  5. package/cli/commands/init.ts +36 -1
  6. package/cli/index.ts +2 -6
  7. package/cli/pipeline/helpers.ts +6 -0
  8. package/cli/pipeline/multi-repo.ts +291 -26
  9. package/cli/pipeline/single-repo.ts +103 -2
  10. package/cli/utils.ts +23 -0
  11. package/core/code-generator.ts +63 -14
  12. package/core/cross-stack-verifier.ts +395 -0
  13. package/core/fix-history.ts +333 -0
  14. package/core/import-fixer.ts +827 -0
  15. package/core/import-verifier.ts +569 -0
  16. package/core/knowledge-memory.ts +55 -6
  17. package/core/self-evaluator.ts +44 -7
  18. package/core/spec-generator.ts +3 -3
  19. package/core/types-generator.ts +2 -2
  20. package/dist/cli/index.js +3759 -2207
  21. package/dist/cli/index.js.map +1 -1
  22. package/dist/cli/index.mjs +3747 -2195
  23. package/dist/cli/index.mjs.map +1 -1
  24. package/dist/index.d.mts +14 -0
  25. package/dist/index.d.ts +14 -0
  26. package/dist/index.js +249 -128
  27. package/dist/index.js.map +1 -1
  28. package/dist/index.mjs +249 -128
  29. package/dist/index.mjs.map +1 -1
  30. package/package.json +2 -2
  31. package/tests/cross-stack-verifier.test.ts +301 -0
  32. package/tests/fix-history.test.ts +335 -0
  33. package/tests/import-fixer.test.ts +944 -0
  34. package/tests/import-verifier.test.ts +420 -0
  35. package/tests/knowledge-memory.test.ts +40 -0
  36. package/tests/self-evaluator.test.ts +97 -0
  37. package/cli/commands/model.ts +0 -152
  38. package/cli/commands/scan.ts +0 -99
  39. package/cli/commands/workspace.ts +0 -219
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-spec-dev",
3
- "version": "0.46.0",
3
+ "version": "0.55.0",
4
4
  "description": "AI-driven Development Orchestrator SDK & CLI",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -20,7 +20,7 @@
20
20
  "gemini",
21
21
  "claude"
22
22
  ],
23
- "author": "",
23
+ "author": "hongzhong",
24
24
  "license": "MIT",
25
25
  "dependencies": {
26
26
  "@anthropic-ai/sdk": "^0.38.0",
@@ -0,0 +1,301 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import * as fs from "fs-extra";
3
+ import * as path from "path";
4
+ import * as os from "os";
5
+ import {
6
+ extractApiCallsFromSource,
7
+ normalizePathSegments,
8
+ pathsMatch,
9
+ verifyCrossStackContract,
10
+ } from "../core/cross-stack-verifier";
11
+ import type { SpecDSL } from "../core/dsl-types";
12
+
13
+ // ─── extractApiCallsFromSource ────────────────────────────────────────────────
14
+
15
+ describe("extractApiCallsFromSource", () => {
16
+ it("extracts axios.get calls", () => {
17
+ const src = `import axios from 'axios';\nconst r = await axios.get('/api/users');`;
18
+ const calls = extractApiCallsFromSource(src, "src/api/user.ts");
19
+ expect(calls).toHaveLength(1);
20
+ expect(calls[0]).toMatchObject({ method: "GET", path: "/api/users", file: "src/api/user.ts" });
21
+ });
22
+
23
+ it("extracts axios.post with template literal path", () => {
24
+ const src = "await axios.post(`/api/users/${id}/roles`, body);";
25
+ const calls = extractApiCallsFromSource(src, "a.ts");
26
+ expect(calls).toHaveLength(1);
27
+ expect(calls[0].method).toBe("POST");
28
+ expect(calls[0].path).toBe("/api/users/${id}/roles");
29
+ });
30
+
31
+ it("extracts fetch with inline method option", () => {
32
+ const src = `const r = await fetch('/api/orders', { method: 'POST', body });`;
33
+ const calls = extractApiCallsFromSource(src, "a.ts");
34
+ expect(calls).toHaveLength(1);
35
+ expect(calls[0]).toMatchObject({ method: "POST", path: "/api/orders" });
36
+ });
37
+
38
+ it("defaults fetch to GET when no method option", () => {
39
+ const src = `const r = await fetch('/api/orders');`;
40
+ const calls = extractApiCallsFromSource(src, "a.ts");
41
+ expect(calls[0].method).toBe("GET");
42
+ });
43
+
44
+ it("extracts useRequest calls with method option", () => {
45
+ const src = `const { data } = useRequest('/api/items', { method: 'DELETE' });`;
46
+ const calls = extractApiCallsFromSource(src, "a.ts");
47
+ expect(calls[0]).toMatchObject({ method: "DELETE", path: "/api/items" });
48
+ });
49
+
50
+ it("extracts generic request('/path', 'POST') helper", () => {
51
+ const src = `await request('/api/login', 'POST')`;
52
+ const calls = extractApiCallsFromSource(src, "a.ts");
53
+ expect(calls[0]).toMatchObject({ method: "POST", path: "/api/login" });
54
+ });
55
+
56
+ it("skips non-API string literals (CSS imports, assets)", () => {
57
+ const src = `import css from './style.css';\nconst logo = '/images/logo.png';`;
58
+ const calls = extractApiCallsFromSource(src, "a.ts");
59
+ expect(calls).toHaveLength(0);
60
+ });
61
+
62
+ it("finds multiple calls in one file with correct line numbers", () => {
63
+ const src = [
64
+ "// line 1",
65
+ "import axios from 'axios';",
66
+ "axios.get('/api/users');", // line 3
67
+ "",
68
+ "axios.post('/api/users', body);", // line 5
69
+ ].join("\n");
70
+ const calls = extractApiCallsFromSource(src, "x.ts");
71
+ expect(calls).toHaveLength(2);
72
+ expect(calls[0].line).toBe(3);
73
+ expect(calls[1].line).toBe(5);
74
+ });
75
+
76
+ it("marks pure request('/path') calls as UNKNOWN method", () => {
77
+ const src = `await request('/api/raw');`;
78
+ const calls = extractApiCallsFromSource(src, "a.ts");
79
+ expect(calls[0].method).toBe("UNKNOWN");
80
+ });
81
+ });
82
+
83
+ // ─── Path normalization & matching ────────────────────────────────────────────
84
+
85
+ describe("normalizePathSegments", () => {
86
+ it("wildcards :id segments", () => {
87
+ expect(normalizePathSegments("/api/users/:id")).toEqual(["api", "users", "*"]);
88
+ });
89
+
90
+ it("wildcards template literal slots", () => {
91
+ expect(normalizePathSegments("/api/users/${id}/roles")).toEqual(["api", "users", "*", "roles"]);
92
+ });
93
+
94
+ it("wildcards numeric id segments", () => {
95
+ expect(normalizePathSegments("/api/users/123")).toEqual(["api", "users", "*"]);
96
+ });
97
+
98
+ it("strips querystring", () => {
99
+ expect(normalizePathSegments("/api/search?q=foo")).toEqual(["api", "search"]);
100
+ });
101
+
102
+ it("preserves static segments lowercased", () => {
103
+ expect(normalizePathSegments("/API/Users")).toEqual(["api", "users"]);
104
+ });
105
+ });
106
+
107
+ describe("pathsMatch", () => {
108
+ it("matches DSL :id against frontend ${id}", () => {
109
+ expect(pathsMatch("/api/users/:id", "/api/users/${userId}")).toBe(true);
110
+ });
111
+
112
+ it("matches DSL :id against numeric literal", () => {
113
+ expect(pathsMatch("/api/users/:id", "/api/users/42")).toBe(true);
114
+ });
115
+
116
+ it("rejects different lengths", () => {
117
+ expect(pathsMatch("/api/users", "/api/users/:id")).toBe(false);
118
+ });
119
+
120
+ it("rejects different static segments", () => {
121
+ expect(pathsMatch("/api/users/:id", "/api/orders/:id")).toBe(false);
122
+ });
123
+
124
+ it("rejects singular vs plural", () => {
125
+ expect(pathsMatch("/api/users/:id", "/api/user/:id")).toBe(false);
126
+ });
127
+ });
128
+
129
+ // ─── verifyCrossStackContract (end-to-end with tmp dir) ───────────────────────
130
+
131
+ describe("verifyCrossStackContract", () => {
132
+ let tmpDir: string;
133
+
134
+ beforeEach(async () => {
135
+ tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "xstack-"));
136
+ });
137
+
138
+ afterEach(async () => {
139
+ await fs.remove(tmpDir);
140
+ });
141
+
142
+ const buildDsl = (endpoints: Array<{ id: string; method: string; path: string }>): SpecDSL => ({
143
+ version: "1.0",
144
+ feature: { id: "f", title: "T", description: "D" },
145
+ models: [],
146
+ endpoints: endpoints.map((e) => ({
147
+ id: e.id,
148
+ method: e.method as "GET" | "POST" | "PUT" | "PATCH" | "DELETE",
149
+ path: e.path,
150
+ description: "",
151
+ auth: false,
152
+ successStatus: 200,
153
+ successDescription: "ok",
154
+ })),
155
+ });
156
+
157
+ it("reports fully matched contract when frontend uses all endpoints correctly", async () => {
158
+ await fs.writeFile(
159
+ path.join(tmpDir, "api.ts"),
160
+ `axios.get('/api/users');\naxios.post('/api/users', body);`
161
+ );
162
+ const dsl = buildDsl([
163
+ { id: "EP-1", method: "GET", path: "/api/users" },
164
+ { id: "EP-2", method: "POST", path: "/api/users" },
165
+ ]);
166
+
167
+ const report = await verifyCrossStackContract(dsl, tmpDir);
168
+ expect(report.matched).toHaveLength(2);
169
+ expect(report.phantom).toHaveLength(0);
170
+ expect(report.methodMismatch).toHaveLength(0);
171
+ expect(report.unused).toHaveLength(0);
172
+ });
173
+
174
+ it("flags phantom endpoints when frontend calls a path not in DSL", async () => {
175
+ await fs.writeFile(
176
+ path.join(tmpDir, "api.ts"),
177
+ `axios.get('/api/ghost');\naxios.get('/api/users');`
178
+ );
179
+ const dsl = buildDsl([{ id: "EP-1", method: "GET", path: "/api/users" }]);
180
+
181
+ const report = await verifyCrossStackContract(dsl, tmpDir);
182
+ expect(report.phantom).toHaveLength(1);
183
+ expect(report.phantom[0].path).toBe("/api/ghost");
184
+ expect(report.matched).toHaveLength(1);
185
+ });
186
+
187
+ it("flags method mismatch when path matches but method differs", async () => {
188
+ await fs.writeFile(
189
+ path.join(tmpDir, "api.ts"),
190
+ `axios.get('/api/users');` // DSL says POST
191
+ );
192
+ const dsl = buildDsl([{ id: "EP-1", method: "POST", path: "/api/users" }]);
193
+
194
+ const report = await verifyCrossStackContract(dsl, tmpDir);
195
+ expect(report.methodMismatch).toHaveLength(1);
196
+ expect(report.methodMismatch[0].expectedMethod).toBe("POST");
197
+ expect(report.methodMismatch[0].call.method).toBe("GET");
198
+ expect(report.phantom).toHaveLength(0);
199
+ });
200
+
201
+ it("flags unused endpoints when DSL declares more than frontend consumes", async () => {
202
+ await fs.writeFile(
203
+ path.join(tmpDir, "api.ts"),
204
+ `axios.get('/api/users');`
205
+ );
206
+ const dsl = buildDsl([
207
+ { id: "EP-1", method: "GET", path: "/api/users" },
208
+ { id: "EP-2", method: "POST", path: "/api/users" },
209
+ { id: "EP-3", method: "DELETE", path: "/api/users/:id" },
210
+ ]);
211
+
212
+ const report = await verifyCrossStackContract(dsl, tmpDir);
213
+ expect(report.matched).toHaveLength(1);
214
+ expect(report.unused).toHaveLength(2);
215
+ expect(report.unused.map((u) => u.id).sort()).toEqual(["EP-2", "EP-3"]);
216
+ });
217
+
218
+ it("matches DSL :id endpoints against template-literal and numeric frontend calls", async () => {
219
+ await fs.writeFile(
220
+ path.join(tmpDir, "api.ts"),
221
+ "axios.get(`/api/users/${id}`);\naxios.delete('/api/users/42');"
222
+ );
223
+ const dsl = buildDsl([
224
+ { id: "EP-1", method: "GET", path: "/api/users/:id" },
225
+ { id: "EP-2", method: "DELETE", path: "/api/users/:id" },
226
+ ]);
227
+
228
+ const report = await verifyCrossStackContract(dsl, tmpDir);
229
+ expect(report.matched).toHaveLength(2);
230
+ expect(report.phantom).toHaveLength(0);
231
+ expect(report.unused).toHaveLength(0);
232
+ });
233
+
234
+ it("skips node_modules and dist folders", async () => {
235
+ await fs.ensureDir(path.join(tmpDir, "node_modules/foo"));
236
+ await fs.writeFile(
237
+ path.join(tmpDir, "node_modules/foo/index.ts"),
238
+ `axios.get('/api/should-be-ignored');`
239
+ );
240
+ await fs.writeFile(
241
+ path.join(tmpDir, "real.ts"),
242
+ `axios.get('/api/users');`
243
+ );
244
+ const dsl = buildDsl([{ id: "EP-1", method: "GET", path: "/api/users" }]);
245
+
246
+ const report = await verifyCrossStackContract(dsl, tmpDir);
247
+ expect(report.matched).toHaveLength(1);
248
+ expect(report.phantom).toHaveLength(0);
249
+ });
250
+
251
+ it("scopedFiles: only scans the listed files, ignoring pre-existing repo code", async () => {
252
+ // Pre-existing frontend code with unrelated API calls (simulates rushbuy case)
253
+ await fs.writeFile(
254
+ path.join(tmpDir, "legacy.ts"),
255
+ `axios.post('/api/youpin/deposit/service');`
256
+ );
257
+ await fs.writeFile(
258
+ path.join(tmpDir, "legacy2.ts"),
259
+ `axios.get('/api/refund/records/export');`
260
+ );
261
+ // Newly generated file (in scope) that correctly uses the DSL endpoint
262
+ const generated = path.join(tmpDir, "src/apis/task/index.ts");
263
+ await fs.ensureDir(path.dirname(generated));
264
+ await fs.writeFile(generated, `axios.get('/admin/tasks');`);
265
+
266
+ const dsl = buildDsl([{ id: "EP-1", method: "GET", path: "/admin/tasks" }]);
267
+
268
+ // Without scoping, the 2 legacy calls show as phantom
269
+ const unscoped = await verifyCrossStackContract(dsl, tmpDir);
270
+ expect(unscoped.phantom.length).toBeGreaterThanOrEqual(2);
271
+
272
+ // With scoping, only the generated file is checked — clean report
273
+ const scoped = await verifyCrossStackContract(dsl, tmpDir, {
274
+ scopedFiles: [generated],
275
+ });
276
+ expect(scoped.phantom).toHaveLength(0);
277
+ expect(scoped.matched).toHaveLength(1);
278
+ expect(scoped.totalScannedFiles).toBe(1);
279
+ });
280
+
281
+ it("scopedFiles: accepts relative paths resolved against frontendRoot", async () => {
282
+ await fs.ensureDir(path.join(tmpDir, "src"));
283
+ await fs.writeFile(path.join(tmpDir, "src/x.ts"), `axios.get('/api/users');`);
284
+ const dsl = buildDsl([{ id: "EP-1", method: "GET", path: "/api/users" }]);
285
+
286
+ const report = await verifyCrossStackContract(dsl, tmpDir, {
287
+ scopedFiles: ["src/x.ts"],
288
+ });
289
+ expect(report.matched).toHaveLength(1);
290
+ expect(report.totalScannedFiles).toBe(1);
291
+ });
292
+
293
+ it("scopedFiles: empty list falls back to full scan", async () => {
294
+ await fs.writeFile(path.join(tmpDir, "a.ts"), `axios.get('/api/users');`);
295
+ const dsl = buildDsl([{ id: "EP-1", method: "GET", path: "/api/users" }]);
296
+
297
+ const report = await verifyCrossStackContract(dsl, tmpDir, { scopedFiles: [] });
298
+ // Empty list is treated as "no scope" → walks whole tree
299
+ expect(report.matched).toHaveLength(1);
300
+ });
301
+ });
@@ -0,0 +1,335 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import * as fs from "fs-extra";
3
+ import * as path from "path";
4
+ import * as os from "os";
5
+ import {
6
+ FIX_HISTORY_FILE,
7
+ FIX_HISTORY_VERSION,
8
+ computePatternKey,
9
+ loadFixHistory,
10
+ appendFixEntry,
11
+ pruneFixHistory,
12
+ aggregateFixPatterns,
13
+ buildHallucinationAvoidanceSection,
14
+ detectPromotionCandidates,
15
+ computeFixHistoryStats,
16
+ FixHistoryEntry,
17
+ } from "../core/fix-history";
18
+
19
+ // ─── Helpers ──────────────────────────────────────────────────────────────────
20
+
21
+ let tmpDir: string;
22
+
23
+ beforeEach(async () => {
24
+ tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "fix-hist-"));
25
+ });
26
+
27
+ afterEach(async () => {
28
+ await fs.remove(tmpDir);
29
+ });
30
+
31
+ function makeEntry(overrides: Partial<FixHistoryEntry> = {}): Omit<FixHistoryEntry, "patternKey"> {
32
+ return {
33
+ ts: overrides.ts ?? new Date().toISOString(),
34
+ runId: overrides.runId ?? "run-001",
35
+ brokenImport: overrides.brokenImport ?? {
36
+ source: "@/apis/task/type",
37
+ names: ["Task"],
38
+ reason: "file_not_found",
39
+ file: "src/stores/task.ts",
40
+ line: 4,
41
+ },
42
+ fix: overrides.fix ?? {
43
+ kind: "create_file",
44
+ target: "src/apis/task/type.ts",
45
+ stage: "deterministic",
46
+ },
47
+ };
48
+ }
49
+
50
+ // ─── computePatternKey ────────────────────────────────────────────────────────
51
+
52
+ describe("computePatternKey", () => {
53
+ it("produces the same key for identical source + names", () => {
54
+ const a = computePatternKey("@/apis/task/type", ["Task"]);
55
+ const b = computePatternKey("@/apis/task/type", ["Task"]);
56
+ expect(a).toBe(b);
57
+ expect(a).toHaveLength(12);
58
+ });
59
+
60
+ it("is order-independent for names", () => {
61
+ const a = computePatternKey("@/apis/foo", ["A", "B"]);
62
+ const b = computePatternKey("@/apis/foo", ["B", "A"]);
63
+ expect(a).toBe(b);
64
+ });
65
+
66
+ it("differs when source differs", () => {
67
+ const a = computePatternKey("@/apis/task/type", ["Task"]);
68
+ const b = computePatternKey("@/apis/task/types", ["Task"]);
69
+ expect(a).not.toBe(b);
70
+ });
71
+
72
+ it("differs when names differ", () => {
73
+ const a = computePatternKey("@/x", ["A"]);
74
+ const b = computePatternKey("@/x", ["B"]);
75
+ expect(a).not.toBe(b);
76
+ });
77
+ });
78
+
79
+ // ─── loadFixHistory / appendFixEntry ──────────────────────────────────────────
80
+
81
+ describe("loadFixHistory + appendFixEntry", () => {
82
+ it("returns empty history when file does not exist", async () => {
83
+ const history = await loadFixHistory(tmpDir);
84
+ expect(history.version).toBe(FIX_HISTORY_VERSION);
85
+ expect(history.entries).toHaveLength(0);
86
+ });
87
+
88
+ it("appends and reads back a single entry", async () => {
89
+ await appendFixEntry(tmpDir, makeEntry());
90
+ const history = await loadFixHistory(tmpDir);
91
+ expect(history.entries).toHaveLength(1);
92
+ expect(history.entries[0].patternKey).toBeDefined();
93
+ expect(history.entries[0].brokenImport.source).toBe("@/apis/task/type");
94
+ });
95
+
96
+ it("computes a stable patternKey for each entry", async () => {
97
+ const result = await appendFixEntry(tmpDir, makeEntry());
98
+ const expected = computePatternKey("@/apis/task/type", ["Task"]);
99
+ expect(result.patternKey).toBe(expected);
100
+ });
101
+
102
+ it("preserves append order across multiple entries", async () => {
103
+ await appendFixEntry(tmpDir, makeEntry({ ts: "2026-04-07T10:00:00.000Z", runId: "r1" }));
104
+ await appendFixEntry(tmpDir, makeEntry({ ts: "2026-04-07T11:00:00.000Z", runId: "r2" }));
105
+ const history = await loadFixHistory(tmpDir);
106
+ expect(history.entries).toHaveLength(2);
107
+ expect(history.entries[0].runId).toBe("r1");
108
+ expect(history.entries[1].runId).toBe("r2");
109
+ });
110
+
111
+ it("gracefully handles corrupted ledger file", async () => {
112
+ await fs.writeFile(path.join(tmpDir, FIX_HISTORY_FILE), "not valid json{{{");
113
+ const history = await loadFixHistory(tmpDir);
114
+ expect(history.entries).toHaveLength(0);
115
+ });
116
+
117
+ it("handles ledger file missing `entries` array", async () => {
118
+ await fs.writeJson(path.join(tmpDir, FIX_HISTORY_FILE), { version: "1.0" });
119
+ const history = await loadFixHistory(tmpDir);
120
+ expect(history.entries).toHaveLength(0);
121
+ });
122
+ });
123
+
124
+ // ─── pruneFixHistory ──────────────────────────────────────────────────────────
125
+
126
+ describe("pruneFixHistory", () => {
127
+ it("removes entries older than maxAgeDays", async () => {
128
+ const oldTs = new Date(Date.now() - 40 * 24 * 60 * 60 * 1000).toISOString();
129
+ const freshTs = new Date().toISOString();
130
+ await appendFixEntry(tmpDir, makeEntry({ ts: oldTs, runId: "old" }));
131
+ await appendFixEntry(tmpDir, makeEntry({ ts: freshTs, runId: "new" }));
132
+
133
+ const removed = await pruneFixHistory(tmpDir, 30);
134
+ expect(removed).toBe(1);
135
+
136
+ const history = await loadFixHistory(tmpDir);
137
+ expect(history.entries).toHaveLength(1);
138
+ expect(history.entries[0].runId).toBe("new");
139
+ });
140
+
141
+ it("returns 0 when nothing is old enough to prune", async () => {
142
+ await appendFixEntry(tmpDir, makeEntry());
143
+ const removed = await pruneFixHistory(tmpDir, 30);
144
+ expect(removed).toBe(0);
145
+ });
146
+
147
+ it("returns 0 on empty history", async () => {
148
+ const removed = await pruneFixHistory(tmpDir, 30);
149
+ expect(removed).toBe(0);
150
+ });
151
+ });
152
+
153
+ // ─── aggregateFixPatterns ─────────────────────────────────────────────────────
154
+
155
+ describe("aggregateFixPatterns", () => {
156
+ it("groups entries by patternKey", async () => {
157
+ // Same pattern, seen in 3 different runs
158
+ await appendFixEntry(tmpDir, makeEntry({ runId: "r1", ts: "2026-04-01T10:00:00.000Z" }));
159
+ await appendFixEntry(tmpDir, makeEntry({ runId: "r2", ts: "2026-04-02T10:00:00.000Z" }));
160
+ await appendFixEntry(tmpDir, makeEntry({ runId: "r3", ts: "2026-04-03T10:00:00.000Z" }));
161
+
162
+ const history = await loadFixHistory(tmpDir);
163
+ const patterns = aggregateFixPatterns(history);
164
+
165
+ expect(patterns).toHaveLength(1);
166
+ expect(patterns[0].count).toBe(3);
167
+ expect(patterns[0].uniqueRunIds).toBe(3);
168
+ expect(patterns[0].firstSeen).toBe("2026-04-01T10:00:00.000Z");
169
+ expect(patterns[0].lastSeen).toBe("2026-04-03T10:00:00.000Z");
170
+ });
171
+
172
+ it("sorts by count descending", async () => {
173
+ // Pattern A: 3 hits
174
+ for (let i = 0; i < 3; i++) {
175
+ await appendFixEntry(tmpDir, makeEntry({
176
+ runId: `a${i}`,
177
+ brokenImport: { source: "@/a", names: ["A"], reason: "file_not_found", file: "x", line: 1 },
178
+ }));
179
+ }
180
+ // Pattern B: 1 hit
181
+ await appendFixEntry(tmpDir, makeEntry({
182
+ runId: "b1",
183
+ brokenImport: { source: "@/b", names: ["B"], reason: "file_not_found", file: "y", line: 1 },
184
+ }));
185
+
186
+ const history = await loadFixHistory(tmpDir);
187
+ const patterns = aggregateFixPatterns(history);
188
+ expect(patterns).toHaveLength(2);
189
+ expect(patterns[0].source).toBe("@/a");
190
+ expect(patterns[0].count).toBe(3);
191
+ expect(patterns[1].source).toBe("@/b");
192
+ });
193
+
194
+ it("counts unique runIds correctly when same run has multiple entries", async () => {
195
+ // One run, same pattern hit in 2 files
196
+ await appendFixEntry(tmpDir, makeEntry({ runId: "same-run", brokenImport: { source: "@/x", names: ["X"], reason: "file_not_found", file: "a.ts", line: 1 } }));
197
+ await appendFixEntry(tmpDir, makeEntry({ runId: "same-run", brokenImport: { source: "@/x", names: ["X"], reason: "file_not_found", file: "b.ts", line: 1 } }));
198
+
199
+ const history = await loadFixHistory(tmpDir);
200
+ const patterns = aggregateFixPatterns(history);
201
+ expect(patterns).toHaveLength(1);
202
+ expect(patterns[0].count).toBe(2);
203
+ expect(patterns[0].uniqueRunIds).toBe(1);
204
+ });
205
+ });
206
+
207
+ // ─── buildHallucinationAvoidanceSection ───────────────────────────────────────
208
+
209
+ describe("buildHallucinationAvoidanceSection", () => {
210
+ it("returns null for empty history", async () => {
211
+ const history = await loadFixHistory(tmpDir);
212
+ expect(buildHallucinationAvoidanceSection(history)).toBeNull();
213
+ });
214
+
215
+ it("produces a section with the broken import and frequency", async () => {
216
+ await appendFixEntry(tmpDir, makeEntry({
217
+ ts: "2026-04-07T10:00:00.000Z",
218
+ brokenImport: { source: "@/apis/task/type", names: ["Task"], reason: "file_not_found", file: "s", line: 1 },
219
+ }));
220
+ await appendFixEntry(tmpDir, makeEntry({
221
+ ts: "2026-04-07T11:00:00.000Z",
222
+ brokenImport: { source: "@/apis/task/type", names: ["Task"], reason: "file_not_found", file: "s", line: 1 },
223
+ }));
224
+
225
+ const history = await loadFixHistory(tmpDir);
226
+ const section = buildHallucinationAvoidanceSection(history);
227
+
228
+ expect(section).toBeTruthy();
229
+ expect(section).toContain("DO NOT REPEAT");
230
+ expect(section).toContain("@/apis/task/type");
231
+ expect(section).toContain("Task");
232
+ expect(section).toContain("seen 2x");
233
+ expect(section).toContain("2026-04-07");
234
+ });
235
+
236
+ it("respects minCount filter", async () => {
237
+ // Only 1 entry → below minCount of 2 → should return null
238
+ await appendFixEntry(tmpDir, makeEntry());
239
+ const history = await loadFixHistory(tmpDir);
240
+ const section = buildHallucinationAvoidanceSection(history, { minCount: 2 });
241
+ expect(section).toBeNull();
242
+ });
243
+
244
+ it("respects maxItems cap", async () => {
245
+ // Create 15 distinct patterns
246
+ for (let i = 0; i < 15; i++) {
247
+ await appendFixEntry(tmpDir, makeEntry({
248
+ brokenImport: { source: `@/p${i}`, names: ["X"], reason: "file_not_found", file: "s", line: 1 },
249
+ }));
250
+ }
251
+ const history = await loadFixHistory(tmpDir);
252
+ const section = buildHallucinationAvoidanceSection(history, { maxItems: 5 });
253
+ expect(section).toBeTruthy();
254
+ expect(section).toContain("10 more pattern(s) hidden");
255
+ // Should only contain 5 DO NOT lines
256
+ const notCount = (section!.match(/❌ Do NOT/g) ?? []).length;
257
+ expect(notCount).toBe(5);
258
+ });
259
+ });
260
+
261
+ // ─── detectPromotionCandidates ────────────────────────────────────────────────
262
+
263
+ describe("detectPromotionCandidates", () => {
264
+ it("returns empty when no pattern meets threshold", async () => {
265
+ for (let i = 0; i < 3; i++) {
266
+ await appendFixEntry(tmpDir, makeEntry({ runId: `r${i}` }));
267
+ }
268
+ const history = await loadFixHistory(tmpDir);
269
+ const candidates = detectPromotionCandidates(history, 5);
270
+ expect(candidates).toHaveLength(0);
271
+ });
272
+
273
+ it("returns patterns above threshold with lesson text", async () => {
274
+ for (let i = 0; i < 6; i++) {
275
+ await appendFixEntry(tmpDir, makeEntry({ runId: `r${i}` }));
276
+ }
277
+ const history = await loadFixHistory(tmpDir);
278
+ const candidates = detectPromotionCandidates(history, 5);
279
+ expect(candidates).toHaveLength(1);
280
+ expect(candidates[0].aggregate.count).toBe(6);
281
+ expect(candidates[0].lessonText).toContain("@/apis/task/type");
282
+ expect(candidates[0].lessonText).toContain("6 次");
283
+ });
284
+
285
+ it("distinguishes file_not_found vs missing_export in lesson text", async () => {
286
+ for (let i = 0; i < 5; i++) {
287
+ await appendFixEntry(tmpDir, makeEntry({
288
+ runId: `r${i}`,
289
+ brokenImport: { source: "@/a", names: ["X"], reason: "missing_export", file: "s", line: 1 },
290
+ }));
291
+ }
292
+ const history = await loadFixHistory(tmpDir);
293
+ const candidates = detectPromotionCandidates(history, 5);
294
+ expect(candidates[0].lessonText).toContain("未导出");
295
+ });
296
+ });
297
+
298
+ // ─── computeFixHistoryStats ───────────────────────────────────────────────────
299
+
300
+ describe("computeFixHistoryStats", () => {
301
+ it("counts entries, patterns, runs, and stage/reason breakdown", async () => {
302
+ await appendFixEntry(tmpDir, makeEntry({
303
+ runId: "r1",
304
+ fix: { kind: "create_file", target: "a.ts", stage: "deterministic" },
305
+ }));
306
+ await appendFixEntry(tmpDir, makeEntry({
307
+ runId: "r1",
308
+ brokenImport: { source: "@/b", names: ["B"], reason: "missing_export", file: "x", line: 1 },
309
+ fix: { kind: "create_file", target: "b.ts", stage: "ai" },
310
+ }));
311
+ await appendFixEntry(tmpDir, makeEntry({
312
+ runId: "r2",
313
+ fix: { kind: "create_file", target: "a.ts", stage: "deterministic" },
314
+ }));
315
+
316
+ const history = await loadFixHistory(tmpDir);
317
+ const stats = computeFixHistoryStats(history);
318
+
319
+ expect(stats.totalEntries).toBe(3);
320
+ expect(stats.uniquePatterns).toBe(2);
321
+ expect(stats.uniqueRunIds).toBe(2);
322
+ expect(stats.byStage.deterministic).toBe(2);
323
+ expect(stats.byStage.ai).toBe(1);
324
+ expect(stats.byReason.file_not_found).toBe(2);
325
+ expect(stats.byReason.missing_export).toBe(1);
326
+ });
327
+
328
+ it("returns zeros for empty history", async () => {
329
+ const history = await loadFixHistory(tmpDir);
330
+ const stats = computeFixHistoryStats(history);
331
+ expect(stats.totalEntries).toBe(0);
332
+ expect(stats.uniquePatterns).toBe(0);
333
+ expect(stats.lastEntryTs).toBeUndefined();
334
+ });
335
+ });