ai-spec-dev 0.46.0 → 0.56.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +60 -30
  2. package/cli/commands/config.ts +129 -1
  3. package/cli/commands/create.ts +14 -0
  4. package/cli/commands/fix-history.ts +176 -0
  5. package/cli/commands/init.ts +36 -1
  6. package/cli/index.ts +2 -6
  7. package/cli/pipeline/helpers.ts +6 -0
  8. package/cli/pipeline/multi-repo.ts +300 -26
  9. package/cli/pipeline/single-repo.ts +103 -2
  10. package/cli/utils.ts +23 -0
  11. package/core/code-generator.ts +63 -14
  12. package/core/cross-stack-verifier.ts +482 -0
  13. package/core/fix-history.ts +333 -0
  14. package/core/import-fixer.ts +827 -0
  15. package/core/import-verifier.ts +569 -0
  16. package/core/knowledge-memory.ts +55 -6
  17. package/core/self-evaluator.ts +44 -7
  18. package/core/spec-generator.ts +3 -3
  19. package/core/types-generator.ts +2 -2
  20. package/dist/cli/index.js +3968 -2353
  21. package/dist/cli/index.js.map +1 -1
  22. package/dist/cli/index.mjs +3810 -2195
  23. package/dist/cli/index.mjs.map +1 -1
  24. package/dist/index.d.mts +14 -0
  25. package/dist/index.d.ts +14 -0
  26. package/dist/index.js +249 -128
  27. package/dist/index.js.map +1 -1
  28. package/dist/index.mjs +249 -128
  29. package/dist/index.mjs.map +1 -1
  30. package/package.json +2 -2
  31. package/tests/cross-stack-verifier.test.ts +402 -0
  32. package/tests/fix-history.test.ts +335 -0
  33. package/tests/import-fixer.test.ts +944 -0
  34. package/tests/import-verifier.test.ts +420 -0
  35. package/tests/knowledge-memory.test.ts +40 -0
  36. package/tests/self-evaluator.test.ts +97 -0
  37. package/.ai-spec-workspace.json +0 -17
  38. package/.ai-spec.json +0 -7
  39. package/cli/commands/model.ts +0 -152
  40. package/cli/commands/scan.ts +0 -99
  41. package/cli/commands/workspace.ts +0 -219
@@ -0,0 +1,335 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import * as fs from "fs-extra";
3
+ import * as path from "path";
4
+ import * as os from "os";
5
+ import {
6
+ FIX_HISTORY_FILE,
7
+ FIX_HISTORY_VERSION,
8
+ computePatternKey,
9
+ loadFixHistory,
10
+ appendFixEntry,
11
+ pruneFixHistory,
12
+ aggregateFixPatterns,
13
+ buildHallucinationAvoidanceSection,
14
+ detectPromotionCandidates,
15
+ computeFixHistoryStats,
16
+ FixHistoryEntry,
17
+ } from "../core/fix-history";
18
+
19
+ // ─── Helpers ──────────────────────────────────────────────────────────────────
20
+
21
+ let tmpDir: string;
22
+
23
+ beforeEach(async () => {
24
+ tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "fix-hist-"));
25
+ });
26
+
27
+ afterEach(async () => {
28
+ await fs.remove(tmpDir);
29
+ });
30
+
31
+ function makeEntry(overrides: Partial<FixHistoryEntry> = {}): Omit<FixHistoryEntry, "patternKey"> {
32
+ return {
33
+ ts: overrides.ts ?? new Date().toISOString(),
34
+ runId: overrides.runId ?? "run-001",
35
+ brokenImport: overrides.brokenImport ?? {
36
+ source: "@/apis/task/type",
37
+ names: ["Task"],
38
+ reason: "file_not_found",
39
+ file: "src/stores/task.ts",
40
+ line: 4,
41
+ },
42
+ fix: overrides.fix ?? {
43
+ kind: "create_file",
44
+ target: "src/apis/task/type.ts",
45
+ stage: "deterministic",
46
+ },
47
+ };
48
+ }
49
+
50
+ // ─── computePatternKey ────────────────────────────────────────────────────────
51
+
52
+ describe("computePatternKey", () => {
53
+ it("produces the same key for identical source + names", () => {
54
+ const a = computePatternKey("@/apis/task/type", ["Task"]);
55
+ const b = computePatternKey("@/apis/task/type", ["Task"]);
56
+ expect(a).toBe(b);
57
+ expect(a).toHaveLength(12);
58
+ });
59
+
60
+ it("is order-independent for names", () => {
61
+ const a = computePatternKey("@/apis/foo", ["A", "B"]);
62
+ const b = computePatternKey("@/apis/foo", ["B", "A"]);
63
+ expect(a).toBe(b);
64
+ });
65
+
66
+ it("differs when source differs", () => {
67
+ const a = computePatternKey("@/apis/task/type", ["Task"]);
68
+ const b = computePatternKey("@/apis/task/types", ["Task"]);
69
+ expect(a).not.toBe(b);
70
+ });
71
+
72
+ it("differs when names differ", () => {
73
+ const a = computePatternKey("@/x", ["A"]);
74
+ const b = computePatternKey("@/x", ["B"]);
75
+ expect(a).not.toBe(b);
76
+ });
77
+ });
78
+
79
+ // ─── loadFixHistory / appendFixEntry ──────────────────────────────────────────
80
+
81
+ describe("loadFixHistory + appendFixEntry", () => {
82
+ it("returns empty history when file does not exist", async () => {
83
+ const history = await loadFixHistory(tmpDir);
84
+ expect(history.version).toBe(FIX_HISTORY_VERSION);
85
+ expect(history.entries).toHaveLength(0);
86
+ });
87
+
88
+ it("appends and reads back a single entry", async () => {
89
+ await appendFixEntry(tmpDir, makeEntry());
90
+ const history = await loadFixHistory(tmpDir);
91
+ expect(history.entries).toHaveLength(1);
92
+ expect(history.entries[0].patternKey).toBeDefined();
93
+ expect(history.entries[0].brokenImport.source).toBe("@/apis/task/type");
94
+ });
95
+
96
+ it("computes a stable patternKey for each entry", async () => {
97
+ const result = await appendFixEntry(tmpDir, makeEntry());
98
+ const expected = computePatternKey("@/apis/task/type", ["Task"]);
99
+ expect(result.patternKey).toBe(expected);
100
+ });
101
+
102
+ it("preserves append order across multiple entries", async () => {
103
+ await appendFixEntry(tmpDir, makeEntry({ ts: "2026-04-07T10:00:00.000Z", runId: "r1" }));
104
+ await appendFixEntry(tmpDir, makeEntry({ ts: "2026-04-07T11:00:00.000Z", runId: "r2" }));
105
+ const history = await loadFixHistory(tmpDir);
106
+ expect(history.entries).toHaveLength(2);
107
+ expect(history.entries[0].runId).toBe("r1");
108
+ expect(history.entries[1].runId).toBe("r2");
109
+ });
110
+
111
+ it("gracefully handles corrupted ledger file", async () => {
112
+ await fs.writeFile(path.join(tmpDir, FIX_HISTORY_FILE), "not valid json{{{");
113
+ const history = await loadFixHistory(tmpDir);
114
+ expect(history.entries).toHaveLength(0);
115
+ });
116
+
117
+ it("handles ledger file missing `entries` array", async () => {
118
+ await fs.writeJson(path.join(tmpDir, FIX_HISTORY_FILE), { version: "1.0" });
119
+ const history = await loadFixHistory(tmpDir);
120
+ expect(history.entries).toHaveLength(0);
121
+ });
122
+ });
123
+
124
+ // ─── pruneFixHistory ──────────────────────────────────────────────────────────
125
+
126
+ describe("pruneFixHistory", () => {
127
+ it("removes entries older than maxAgeDays", async () => {
128
+ const oldTs = new Date(Date.now() - 40 * 24 * 60 * 60 * 1000).toISOString();
129
+ const freshTs = new Date().toISOString();
130
+ await appendFixEntry(tmpDir, makeEntry({ ts: oldTs, runId: "old" }));
131
+ await appendFixEntry(tmpDir, makeEntry({ ts: freshTs, runId: "new" }));
132
+
133
+ const removed = await pruneFixHistory(tmpDir, 30);
134
+ expect(removed).toBe(1);
135
+
136
+ const history = await loadFixHistory(tmpDir);
137
+ expect(history.entries).toHaveLength(1);
138
+ expect(history.entries[0].runId).toBe("new");
139
+ });
140
+
141
+ it("returns 0 when nothing is old enough to prune", async () => {
142
+ await appendFixEntry(tmpDir, makeEntry());
143
+ const removed = await pruneFixHistory(tmpDir, 30);
144
+ expect(removed).toBe(0);
145
+ });
146
+
147
+ it("returns 0 on empty history", async () => {
148
+ const removed = await pruneFixHistory(tmpDir, 30);
149
+ expect(removed).toBe(0);
150
+ });
151
+ });
152
+
153
+ // ─── aggregateFixPatterns ─────────────────────────────────────────────────────
154
+
155
+ describe("aggregateFixPatterns", () => {
156
+ it("groups entries by patternKey", async () => {
157
+ // Same pattern, seen in 3 different runs
158
+ await appendFixEntry(tmpDir, makeEntry({ runId: "r1", ts: "2026-04-01T10:00:00.000Z" }));
159
+ await appendFixEntry(tmpDir, makeEntry({ runId: "r2", ts: "2026-04-02T10:00:00.000Z" }));
160
+ await appendFixEntry(tmpDir, makeEntry({ runId: "r3", ts: "2026-04-03T10:00:00.000Z" }));
161
+
162
+ const history = await loadFixHistory(tmpDir);
163
+ const patterns = aggregateFixPatterns(history);
164
+
165
+ expect(patterns).toHaveLength(1);
166
+ expect(patterns[0].count).toBe(3);
167
+ expect(patterns[0].uniqueRunIds).toBe(3);
168
+ expect(patterns[0].firstSeen).toBe("2026-04-01T10:00:00.000Z");
169
+ expect(patterns[0].lastSeen).toBe("2026-04-03T10:00:00.000Z");
170
+ });
171
+
172
+ it("sorts by count descending", async () => {
173
+ // Pattern A: 3 hits
174
+ for (let i = 0; i < 3; i++) {
175
+ await appendFixEntry(tmpDir, makeEntry({
176
+ runId: `a${i}`,
177
+ brokenImport: { source: "@/a", names: ["A"], reason: "file_not_found", file: "x", line: 1 },
178
+ }));
179
+ }
180
+ // Pattern B: 1 hit
181
+ await appendFixEntry(tmpDir, makeEntry({
182
+ runId: "b1",
183
+ brokenImport: { source: "@/b", names: ["B"], reason: "file_not_found", file: "y", line: 1 },
184
+ }));
185
+
186
+ const history = await loadFixHistory(tmpDir);
187
+ const patterns = aggregateFixPatterns(history);
188
+ expect(patterns).toHaveLength(2);
189
+ expect(patterns[0].source).toBe("@/a");
190
+ expect(patterns[0].count).toBe(3);
191
+ expect(patterns[1].source).toBe("@/b");
192
+ });
193
+
194
+ it("counts unique runIds correctly when same run has multiple entries", async () => {
195
+ // One run, same pattern hit in 2 files
196
+ await appendFixEntry(tmpDir, makeEntry({ runId: "same-run", brokenImport: { source: "@/x", names: ["X"], reason: "file_not_found", file: "a.ts", line: 1 } }));
197
+ await appendFixEntry(tmpDir, makeEntry({ runId: "same-run", brokenImport: { source: "@/x", names: ["X"], reason: "file_not_found", file: "b.ts", line: 1 } }));
198
+
199
+ const history = await loadFixHistory(tmpDir);
200
+ const patterns = aggregateFixPatterns(history);
201
+ expect(patterns).toHaveLength(1);
202
+ expect(patterns[0].count).toBe(2);
203
+ expect(patterns[0].uniqueRunIds).toBe(1);
204
+ });
205
+ });
206
+
207
+ // ─── buildHallucinationAvoidanceSection ───────────────────────────────────────
208
+
209
+ describe("buildHallucinationAvoidanceSection", () => {
210
+ it("returns null for empty history", async () => {
211
+ const history = await loadFixHistory(tmpDir);
212
+ expect(buildHallucinationAvoidanceSection(history)).toBeNull();
213
+ });
214
+
215
+ it("produces a section with the broken import and frequency", async () => {
216
+ await appendFixEntry(tmpDir, makeEntry({
217
+ ts: "2026-04-07T10:00:00.000Z",
218
+ brokenImport: { source: "@/apis/task/type", names: ["Task"], reason: "file_not_found", file: "s", line: 1 },
219
+ }));
220
+ await appendFixEntry(tmpDir, makeEntry({
221
+ ts: "2026-04-07T11:00:00.000Z",
222
+ brokenImport: { source: "@/apis/task/type", names: ["Task"], reason: "file_not_found", file: "s", line: 1 },
223
+ }));
224
+
225
+ const history = await loadFixHistory(tmpDir);
226
+ const section = buildHallucinationAvoidanceSection(history);
227
+
228
+ expect(section).toBeTruthy();
229
+ expect(section).toContain("DO NOT REPEAT");
230
+ expect(section).toContain("@/apis/task/type");
231
+ expect(section).toContain("Task");
232
+ expect(section).toContain("seen 2x");
233
+ expect(section).toContain("2026-04-07");
234
+ });
235
+
236
+ it("respects minCount filter", async () => {
237
+ // Only 1 entry → below minCount of 2 → should return null
238
+ await appendFixEntry(tmpDir, makeEntry());
239
+ const history = await loadFixHistory(tmpDir);
240
+ const section = buildHallucinationAvoidanceSection(history, { minCount: 2 });
241
+ expect(section).toBeNull();
242
+ });
243
+
244
+ it("respects maxItems cap", async () => {
245
+ // Create 15 distinct patterns
246
+ for (let i = 0; i < 15; i++) {
247
+ await appendFixEntry(tmpDir, makeEntry({
248
+ brokenImport: { source: `@/p${i}`, names: ["X"], reason: "file_not_found", file: "s", line: 1 },
249
+ }));
250
+ }
251
+ const history = await loadFixHistory(tmpDir);
252
+ const section = buildHallucinationAvoidanceSection(history, { maxItems: 5 });
253
+ expect(section).toBeTruthy();
254
+ expect(section).toContain("10 more pattern(s) hidden");
255
+ // Should only contain 5 DO NOT lines
256
+ const notCount = (section!.match(/❌ Do NOT/g) ?? []).length;
257
+ expect(notCount).toBe(5);
258
+ });
259
+ });
260
+
261
+ // ─── detectPromotionCandidates ────────────────────────────────────────────────
262
+
263
+ describe("detectPromotionCandidates", () => {
264
+ it("returns empty when no pattern meets threshold", async () => {
265
+ for (let i = 0; i < 3; i++) {
266
+ await appendFixEntry(tmpDir, makeEntry({ runId: `r${i}` }));
267
+ }
268
+ const history = await loadFixHistory(tmpDir);
269
+ const candidates = detectPromotionCandidates(history, 5);
270
+ expect(candidates).toHaveLength(0);
271
+ });
272
+
273
+ it("returns patterns above threshold with lesson text", async () => {
274
+ for (let i = 0; i < 6; i++) {
275
+ await appendFixEntry(tmpDir, makeEntry({ runId: `r${i}` }));
276
+ }
277
+ const history = await loadFixHistory(tmpDir);
278
+ const candidates = detectPromotionCandidates(history, 5);
279
+ expect(candidates).toHaveLength(1);
280
+ expect(candidates[0].aggregate.count).toBe(6);
281
+ expect(candidates[0].lessonText).toContain("@/apis/task/type");
282
+ expect(candidates[0].lessonText).toContain("6 次");
283
+ });
284
+
285
+ it("distinguishes file_not_found vs missing_export in lesson text", async () => {
286
+ for (let i = 0; i < 5; i++) {
287
+ await appendFixEntry(tmpDir, makeEntry({
288
+ runId: `r${i}`,
289
+ brokenImport: { source: "@/a", names: ["X"], reason: "missing_export", file: "s", line: 1 },
290
+ }));
291
+ }
292
+ const history = await loadFixHistory(tmpDir);
293
+ const candidates = detectPromotionCandidates(history, 5);
294
+ expect(candidates[0].lessonText).toContain("未导出");
295
+ });
296
+ });
297
+
298
+ // ─── computeFixHistoryStats ───────────────────────────────────────────────────
299
+
300
+ describe("computeFixHistoryStats", () => {
301
+ it("counts entries, patterns, runs, and stage/reason breakdown", async () => {
302
+ await appendFixEntry(tmpDir, makeEntry({
303
+ runId: "r1",
304
+ fix: { kind: "create_file", target: "a.ts", stage: "deterministic" },
305
+ }));
306
+ await appendFixEntry(tmpDir, makeEntry({
307
+ runId: "r1",
308
+ brokenImport: { source: "@/b", names: ["B"], reason: "missing_export", file: "x", line: 1 },
309
+ fix: { kind: "create_file", target: "b.ts", stage: "ai" },
310
+ }));
311
+ await appendFixEntry(tmpDir, makeEntry({
312
+ runId: "r2",
313
+ fix: { kind: "create_file", target: "a.ts", stage: "deterministic" },
314
+ }));
315
+
316
+ const history = await loadFixHistory(tmpDir);
317
+ const stats = computeFixHistoryStats(history);
318
+
319
+ expect(stats.totalEntries).toBe(3);
320
+ expect(stats.uniquePatterns).toBe(2);
321
+ expect(stats.uniqueRunIds).toBe(2);
322
+ expect(stats.byStage.deterministic).toBe(2);
323
+ expect(stats.byStage.ai).toBe(1);
324
+ expect(stats.byReason.file_not_found).toBe(2);
325
+ expect(stats.byReason.missing_export).toBe(1);
326
+ });
327
+
328
+ it("returns zeros for empty history", async () => {
329
+ const history = await loadFixHistory(tmpDir);
330
+ const stats = computeFixHistoryStats(history);
331
+ expect(stats.totalEntries).toBe(0);
332
+ expect(stats.uniquePatterns).toBe(0);
333
+ expect(stats.lastEntryTs).toBeUndefined();
334
+ });
335
+ });