ai-spec-dev 0.33.0 → 0.35.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/add-lesson.md +34 -0
- package/.claude/commands/check-layers.md +65 -0
- package/.claude/commands/installed-deps.md +35 -0
- package/.claude/commands/recall-lessons.md +40 -0
- package/.claude/commands/scan-singletons.md +45 -0
- package/.claude/commands/verify-imports.md +48 -0
- package/.claude/settings.local.json +11 -1
- package/README.md +531 -213
- package/RELEASE_LOG.md +305 -0
- package/cli/commands/create.ts +1233 -0
- package/cli/commands/dashboard.ts +62 -0
- package/cli/commands/init.ts +45 -8
- package/cli/commands/mock.ts +175 -0
- package/cli/commands/scan.ts +99 -0
- package/cli/commands/types.ts +69 -0
- package/cli/commands/vcr.ts +70 -0
- package/cli/index.ts +34 -2517
- package/core/combined-generator.ts +13 -3
- package/core/dashboard-generator.ts +340 -0
- package/core/design-dialogue.ts +124 -0
- package/core/dsl-feedback.ts +34 -4
- package/core/error-feedback.ts +46 -2
- package/core/project-index.ts +301 -0
- package/core/reviewer.ts +84 -6
- package/core/run-logger.ts +109 -3
- package/core/run-trend.ts +24 -4
- package/core/self-evaluator.ts +39 -11
- package/core/spec-generator.ts +14 -8
- package/core/task-generator.ts +17 -0
- package/core/types-generator.ts +219 -0
- package/core/vcr.ts +210 -0
- package/dist/cli/index.js +7297 -5640
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/index.mjs +8728 -7071
- package/dist/cli/index.mjs.map +1 -1
- package/dist/index.d.mts +19 -5
- package/dist/index.d.ts +19 -5
- package/dist/index.js +420 -224
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +418 -224
- package/dist/index.mjs.map +1 -1
- package/docs-assets/purpose/architecture-overview.svg +64 -0
- package/docs-assets/purpose/create-pipeline.svg +113 -0
- package/docs-assets/purpose/task-layering.svg +74 -0
- package/package.json +1 -1
- package/prompts/codegen.prompt.ts +97 -9
- package/prompts/design.prompt.ts +59 -0
- package/prompts/spec.prompt.ts +8 -1
- package/prompts/tasks.prompt.ts +27 -2
- package/purpose.md +600 -174
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* project-index.ts — Persistent project discovery & index.
|
|
3
|
+
*
|
|
4
|
+
* Scans a root directory for sub-projects (any dir with a recognisable
|
|
5
|
+
* project manifest), and maintains an incremental JSON index file at
|
|
6
|
+
* .ai-spec-index.json in the scan root.
|
|
7
|
+
*
|
|
8
|
+
* Incremental rules:
|
|
9
|
+
* - New project found → added with firstSeen = now
|
|
10
|
+
* - Existing project → techStack / type / role / hasConstitution refreshed, lastSeen = now
|
|
11
|
+
* - Previously indexed but directory gone → marked missing:true, NOT deleted
|
|
12
|
+
*
|
|
13
|
+
* The index is intentionally lightweight — no AI calls, pure filesystem scan.
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import * as fs from "fs-extra";
|
|
17
|
+
import * as path from "path";
|
|
18
|
+
import { detectRepoType, RepoType, RepoRole, WORKSPACE_CONFIG_FILE } from "./workspace-loader";
|
|
19
|
+
import { CONSTITUTION_FILE } from "./constitution-generator";
|
|
20
|
+
|
|
21
|
+
export const INDEX_FILE = ".ai-spec-index.json";
|
|
22
|
+
|
|
23
|
+
// ─── Key dependency lists for tech-stack extraction ──────────────────────────
|
|
24
|
+
|
|
25
|
+
const KEY_DEPS: string[] = [
|
|
26
|
+
// Frameworks
|
|
27
|
+
"express", "fastify", "koa", "@nestjs/core", "hapi",
|
|
28
|
+
"next", "react", "vue", "nuxt", "svelte",
|
|
29
|
+
"react-native", "expo",
|
|
30
|
+
// DB / ORM
|
|
31
|
+
"prisma", "@prisma/client", "mongoose", "typeorm", "sequelize", "drizzle-orm",
|
|
32
|
+
// Auth
|
|
33
|
+
"jsonwebtoken", "passport", "next-auth", "@clerk/nextjs",
|
|
34
|
+
// Build / Lang
|
|
35
|
+
"typescript", "vite", "webpack", "esbuild", "turbo",
|
|
36
|
+
// Testing
|
|
37
|
+
"jest", "vitest", "mocha", "cypress", "playwright",
|
|
38
|
+
// Infra
|
|
39
|
+
"redis", "bull", "socket.io", "graphql", "@trpc/server",
|
|
40
|
+
];
|
|
41
|
+
|
|
42
|
+
// ─── Types ────────────────────────────────────────────────────────────────────
|
|
43
|
+
|
|
44
|
+
export interface ProjectEntry {
|
|
45
|
+
/** Directory name */
|
|
46
|
+
name: string;
|
|
47
|
+
/** Path relative to scanRoot */
|
|
48
|
+
path: string;
|
|
49
|
+
type: RepoType;
|
|
50
|
+
role: RepoRole;
|
|
51
|
+
/** Key dependencies detected (subset of package.json deps or language markers) */
|
|
52
|
+
techStack: string[];
|
|
53
|
+
/** Whether .ai-spec-constitution.md exists */
|
|
54
|
+
hasConstitution: boolean;
|
|
55
|
+
/** Whether .ai-spec-workspace.json exists (this repo is a workspace root) */
|
|
56
|
+
hasWorkspace: boolean;
|
|
57
|
+
/** ISO timestamp of first discovery */
|
|
58
|
+
firstSeen: string;
|
|
59
|
+
/** ISO timestamp of last successful scan */
|
|
60
|
+
lastSeen: string;
|
|
61
|
+
/** true when the directory no longer exists on disk */
|
|
62
|
+
missing?: boolean;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export interface ProjectIndex {
|
|
66
|
+
/** Absolute path of the directory that was scanned */
|
|
67
|
+
scanRoot: string;
|
|
68
|
+
/** ISO timestamp of last scan */
|
|
69
|
+
lastScanned: string;
|
|
70
|
+
projects: ProjectEntry[];
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
74
|
+
|
|
75
|
+
/** Directories to always skip during scan */
|
|
76
|
+
const SKIP_DIRS = new Set([
|
|
77
|
+
"node_modules", ".git", ".svn", "dist", "build", "out", ".next",
|
|
78
|
+
".nuxt", "coverage", ".turbo", ".cache", "__pycache__", "vendor",
|
|
79
|
+
".ai-spec-vcr", ".ai-spec-logs", "specs",
|
|
80
|
+
]);
|
|
81
|
+
|
|
82
|
+
/** Manifest files that identify a directory as a project root */
|
|
83
|
+
const MANIFEST_FILES = [
|
|
84
|
+
"package.json",
|
|
85
|
+
"go.mod",
|
|
86
|
+
"Cargo.toml",
|
|
87
|
+
"pom.xml",
|
|
88
|
+
"build.gradle",
|
|
89
|
+
"build.gradle.kts",
|
|
90
|
+
"requirements.txt",
|
|
91
|
+
"pyproject.toml",
|
|
92
|
+
"setup.py",
|
|
93
|
+
"composer.json",
|
|
94
|
+
];
|
|
95
|
+
|
|
96
|
+
async function isProjectRoot(absPath: string): Promise<boolean> {
|
|
97
|
+
for (const manifest of MANIFEST_FILES) {
|
|
98
|
+
if (await fs.pathExists(path.join(absPath, manifest))) return true;
|
|
99
|
+
}
|
|
100
|
+
return false;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
async function extractTechStack(absPath: string, type: RepoType): Promise<string[]> {
|
|
104
|
+
const stack: string[] = [];
|
|
105
|
+
|
|
106
|
+
// Language marker for non-Node projects
|
|
107
|
+
if (type === "go") stack.push("go");
|
|
108
|
+
if (type === "rust") stack.push("rust");
|
|
109
|
+
if (type === "java") stack.push("java");
|
|
110
|
+
if (type === "python") stack.push("python");
|
|
111
|
+
if (type === "php") stack.push("php");
|
|
112
|
+
|
|
113
|
+
const pkgPath = path.join(absPath, "package.json");
|
|
114
|
+
if (!(await fs.pathExists(pkgPath))) return stack;
|
|
115
|
+
|
|
116
|
+
let pkg: Record<string, unknown> = {};
|
|
117
|
+
try { pkg = await fs.readJson(pkgPath); } catch { return stack; }
|
|
118
|
+
|
|
119
|
+
const allDeps = {
|
|
120
|
+
...((pkg.dependencies as Record<string, string>) ?? {}),
|
|
121
|
+
...((pkg.devDependencies as Record<string, string>) ?? {}),
|
|
122
|
+
};
|
|
123
|
+
const depKeys = new Set(Object.keys(allDeps));
|
|
124
|
+
|
|
125
|
+
for (const dep of KEY_DEPS) {
|
|
126
|
+
if (depKeys.has(dep)) stack.push(dep);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return stack;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// ─── Scan ─────────────────────────────────────────────────────────────────────
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Discover all project roots under `rootDir` up to `maxDepth` levels deep.
|
|
136
|
+
* Returns paths relative to rootDir.
|
|
137
|
+
*/
|
|
138
|
+
async function discoverProjects(
|
|
139
|
+
rootDir: string,
|
|
140
|
+
maxDepth: number
|
|
141
|
+
): Promise<string[]> {
|
|
142
|
+
const found: string[] = [];
|
|
143
|
+
|
|
144
|
+
async function walk(absDir: string, depth: number): Promise<void> {
|
|
145
|
+
if (depth > maxDepth) return;
|
|
146
|
+
|
|
147
|
+
let entries: fs.Dirent[];
|
|
148
|
+
try {
|
|
149
|
+
entries = await fs.readdir(absDir, { withFileTypes: true });
|
|
150
|
+
} catch {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
for (const entry of entries) {
|
|
155
|
+
if (!entry.isDirectory()) continue;
|
|
156
|
+
if (SKIP_DIRS.has(entry.name) || entry.name.startsWith(".")) continue;
|
|
157
|
+
|
|
158
|
+
const childAbs = path.join(absDir, entry.name);
|
|
159
|
+
|
|
160
|
+
// Skip git worktrees — they have a .git *file* (not directory)
|
|
161
|
+
const gitPath = path.join(childAbs, ".git");
|
|
162
|
+
if (await fs.pathExists(gitPath)) {
|
|
163
|
+
const gitStat = await fs.stat(gitPath);
|
|
164
|
+
if (gitStat.isFile()) continue; // git worktree — skip
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (await isProjectRoot(childAbs)) {
|
|
168
|
+
found.push(path.relative(rootDir, childAbs));
|
|
169
|
+
// Don't recurse into a project root — avoids picking up nested node_modules etc.
|
|
170
|
+
} else {
|
|
171
|
+
await walk(childAbs, depth + 1);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
await walk(rootDir, 0);
|
|
177
|
+
return found;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// ─── Index load / save ────────────────────────────────────────────────────────
|
|
181
|
+
|
|
182
|
+
export async function loadIndex(scanRoot: string): Promise<ProjectIndex | null> {
|
|
183
|
+
const filePath = path.join(scanRoot, INDEX_FILE);
|
|
184
|
+
try {
|
|
185
|
+
return await fs.readJson(filePath);
|
|
186
|
+
} catch {
|
|
187
|
+
return null;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
export async function saveIndex(scanRoot: string, index: ProjectIndex): Promise<string> {
|
|
192
|
+
const filePath = path.join(scanRoot, INDEX_FILE);
|
|
193
|
+
await fs.writeJson(filePath, index, { spaces: 2 });
|
|
194
|
+
return filePath;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// ─── Incremental merge ────────────────────────────────────────────────────────
|
|
198
|
+
|
|
199
|
+
export interface ScanResult {
|
|
200
|
+
index: ProjectIndex;
|
|
201
|
+
added: ProjectEntry[];
|
|
202
|
+
updated: ProjectEntry[];
|
|
203
|
+
unchanged: ProjectEntry[];
|
|
204
|
+
nowMissing: ProjectEntry[];
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Run an incremental scan of `scanRoot`, merge with the existing index, and
|
|
209
|
+
* return the updated index along with a change summary.
|
|
210
|
+
*/
|
|
211
|
+
export async function runScan(
|
|
212
|
+
scanRoot: string,
|
|
213
|
+
maxDepth = 2
|
|
214
|
+
): Promise<ScanResult> {
|
|
215
|
+
const now = new Date().toISOString();
|
|
216
|
+
const existing = await loadIndex(scanRoot);
|
|
217
|
+
const existingMap = new Map<string, ProjectEntry>(
|
|
218
|
+
(existing?.projects ?? []).map((p) => [p.path, p])
|
|
219
|
+
);
|
|
220
|
+
|
|
221
|
+
const discoveredPaths = await discoverProjects(scanRoot, maxDepth);
|
|
222
|
+
|
|
223
|
+
const added: ProjectEntry[] = [];
|
|
224
|
+
const updated: ProjectEntry[] = [];
|
|
225
|
+
const unchanged: ProjectEntry[] = [];
|
|
226
|
+
const seenPaths = new Set<string>();
|
|
227
|
+
|
|
228
|
+
for (const relPath of discoveredPaths) {
|
|
229
|
+
const absPath = path.join(scanRoot, relPath);
|
|
230
|
+
seenPaths.add(relPath);
|
|
231
|
+
|
|
232
|
+
const { type, role } = await detectRepoType(absPath);
|
|
233
|
+
const techStack = await extractTechStack(absPath, type);
|
|
234
|
+
const hasConstitution = await fs.pathExists(path.join(absPath, CONSTITUTION_FILE));
|
|
235
|
+
const hasWorkspace = await fs.pathExists(path.join(absPath, WORKSPACE_CONFIG_FILE));
|
|
236
|
+
const name = path.basename(relPath);
|
|
237
|
+
|
|
238
|
+
const prev = existingMap.get(relPath);
|
|
239
|
+
if (!prev) {
|
|
240
|
+
const entry: ProjectEntry = {
|
|
241
|
+
name,
|
|
242
|
+
path: relPath,
|
|
243
|
+
type,
|
|
244
|
+
role,
|
|
245
|
+
techStack,
|
|
246
|
+
hasConstitution,
|
|
247
|
+
hasWorkspace,
|
|
248
|
+
firstSeen: now,
|
|
249
|
+
lastSeen: now,
|
|
250
|
+
};
|
|
251
|
+
added.push(entry);
|
|
252
|
+
existingMap.set(relPath, entry);
|
|
253
|
+
} else {
|
|
254
|
+
// Check if anything changed
|
|
255
|
+
const changed =
|
|
256
|
+
prev.type !== type ||
|
|
257
|
+
prev.role !== role ||
|
|
258
|
+
prev.hasConstitution !== hasConstitution ||
|
|
259
|
+
prev.hasWorkspace !== hasWorkspace ||
|
|
260
|
+
JSON.stringify(prev.techStack.sort()) !== JSON.stringify(techStack.sort());
|
|
261
|
+
|
|
262
|
+
const entry: ProjectEntry = {
|
|
263
|
+
...prev,
|
|
264
|
+
type,
|
|
265
|
+
role,
|
|
266
|
+
techStack,
|
|
267
|
+
hasConstitution,
|
|
268
|
+
hasWorkspace,
|
|
269
|
+
lastSeen: now,
|
|
270
|
+
missing: undefined, // clear missing flag if it came back
|
|
271
|
+
};
|
|
272
|
+
existingMap.set(relPath, entry);
|
|
273
|
+
|
|
274
|
+
if (changed) {
|
|
275
|
+
updated.push(entry);
|
|
276
|
+
} else {
|
|
277
|
+
unchanged.push(entry);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// Mark previously known projects as missing if their directory is gone
|
|
283
|
+
const nowMissing: ProjectEntry[] = [];
|
|
284
|
+
for (const [relPath, entry] of existingMap) {
|
|
285
|
+
if (!seenPaths.has(relPath) && !entry.missing) {
|
|
286
|
+
const gone: ProjectEntry = { ...entry, missing: true };
|
|
287
|
+
existingMap.set(relPath, gone);
|
|
288
|
+
nowMissing.push(gone);
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
const projects = [...existingMap.values()].sort((a, b) => a.path.localeCompare(b.path));
|
|
293
|
+
|
|
294
|
+
const index: ProjectIndex = {
|
|
295
|
+
scanRoot,
|
|
296
|
+
lastScanned: now,
|
|
297
|
+
projects,
|
|
298
|
+
};
|
|
299
|
+
|
|
300
|
+
return { index, added, updated, unchanged, nowMissing };
|
|
301
|
+
}
|
package/core/reviewer.ts
CHANGED
|
@@ -4,10 +4,37 @@ import * as path from "path";
|
|
|
4
4
|
import * as fs from "fs-extra";
|
|
5
5
|
import { AIProvider } from "./spec-generator";
|
|
6
6
|
import {
|
|
7
|
+
specComplianceSystemPrompt,
|
|
7
8
|
reviewArchitectureSystemPrompt,
|
|
8
9
|
reviewImplementationSystemPrompt,
|
|
9
10
|
reviewImpactComplexitySystemPrompt,
|
|
10
11
|
} from "../prompts/codegen.prompt";
|
|
12
|
+
import { CONSTITUTION_FILE } from "./constitution-generator";
|
|
13
|
+
|
|
14
|
+
// ─── Constitution Lessons Helper ──────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Extract the §9 accumulated lessons section from a constitution file.
|
|
18
|
+
* Returns null if the section is absent or the file cannot be read.
|
|
19
|
+
*/
|
|
20
|
+
async function loadAccumulatedLessons(projectRoot: string): Promise<string | null> {
|
|
21
|
+
const constitutionPath = path.join(projectRoot, CONSTITUTION_FILE);
|
|
22
|
+
let content: string;
|
|
23
|
+
try {
|
|
24
|
+
content = await fs.readFile(constitutionPath, "utf-8");
|
|
25
|
+
} catch {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
const marker = "## 9. 积累教训";
|
|
29
|
+
const idx = content.indexOf(marker);
|
|
30
|
+
if (idx === -1) return null;
|
|
31
|
+
// Extract from §9 header to end of file (or next top-level section)
|
|
32
|
+
const section = content.slice(idx);
|
|
33
|
+
const nextSection = section.slice(marker.length).match(/\n## \d/);
|
|
34
|
+
return nextSection
|
|
35
|
+
? section.slice(0, marker.length + nextSection.index!)
|
|
36
|
+
: section;
|
|
37
|
+
}
|
|
11
38
|
|
|
12
39
|
// ─── Review History ────────────────────────────────────────────────────────────
|
|
13
40
|
|
|
@@ -15,6 +42,7 @@ interface ReviewHistoryEntry {
|
|
|
15
42
|
date: string;
|
|
16
43
|
specFile: string;
|
|
17
44
|
score: number;
|
|
45
|
+
complianceScore?: number;
|
|
18
46
|
topIssues: string[];
|
|
19
47
|
impactLevel?: "低" | "中" | "高";
|
|
20
48
|
complexityLevel?: "低" | "中" | "高";
|
|
@@ -55,6 +83,18 @@ function extractScore(reviewText: string): number {
|
|
|
55
83
|
return match ? parseFloat(match[1]) : 0;
|
|
56
84
|
}
|
|
57
85
|
|
|
86
|
+
/** Extract compliance score from Pass 0 output (looks for "ComplianceScore: X/10") */
|
|
87
|
+
export function extractComplianceScore(complianceText: string): number {
|
|
88
|
+
const match = complianceText.match(/ComplianceScore:\s*(\d+(?:\.\d+)?)\s*\/\s*10/i);
|
|
89
|
+
return match ? parseFloat(match[1]) : 0;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/** Count missing requirements from Pass 0 output */
|
|
93
|
+
export function extractMissingCount(complianceText: string): number {
|
|
94
|
+
const summaryMatch = complianceText.match(/Missing:\s*(\d+)/i);
|
|
95
|
+
return summaryMatch ? parseInt(summaryMatch[1], 10) : 0;
|
|
96
|
+
}
|
|
97
|
+
|
|
58
98
|
/** Extract impact level from Pass 3 review ("影响等级:低/中/高") */
|
|
59
99
|
function extractImpactLevel(reviewText: string): "低" | "中" | "高" | undefined {
|
|
60
100
|
const match = reviewText.match(/影响等级[::]\s*(低|中|高)/);
|
|
@@ -126,8 +166,9 @@ export class CodeReviewer {
|
|
|
126
166
|
}
|
|
127
167
|
|
|
128
168
|
/**
|
|
129
|
-
*
|
|
130
|
-
* Pass
|
|
169
|
+
* Four-pass review:
|
|
170
|
+
* Pass 0 — spec compliance (exhaustive requirement coverage audit)
|
|
171
|
+
* Pass 1 — architecture (layer separation, contract design, auth posture)
|
|
131
172
|
* Pass 2 — implementation details (validation, error handling, edge cases)
|
|
132
173
|
* + historical issue recurrence check
|
|
133
174
|
* Pass 3 — impact assessment + code complexity
|
|
@@ -137,11 +178,43 @@ export class CodeReviewer {
|
|
|
137
178
|
codeContext: string,
|
|
138
179
|
specFile?: string
|
|
139
180
|
): Promise<string> {
|
|
140
|
-
|
|
181
|
+
// ── Pass 0: Spec Compliance (skip if no spec provided) ───────────────────
|
|
182
|
+
let complianceReview = "";
|
|
183
|
+
if (specContent && specContent.trim() && specContent !== "(No spec — review for general code quality)") {
|
|
184
|
+
console.log(chalk.gray(" Pass 0/3: Spec compliance check..."));
|
|
185
|
+
const compliancePrompt = `Check whether the implementation covers every requirement in the spec.
|
|
141
186
|
|
|
142
|
-
|
|
143
|
-
|
|
187
|
+
=== Feature Spec ===
|
|
188
|
+
${specContent}
|
|
189
|
+
|
|
190
|
+
=== Code ===
|
|
191
|
+
${codeContext}`;
|
|
192
|
+
complianceReview = await this.provider.generate(compliancePrompt, specComplianceSystemPrompt);
|
|
193
|
+
|
|
194
|
+
// Surface compliance score immediately
|
|
195
|
+
const complianceScore = extractComplianceScore(complianceReview);
|
|
196
|
+
const missingCount = extractMissingCount(complianceReview);
|
|
197
|
+
if (complianceScore > 0) {
|
|
198
|
+
const scoreColor = complianceScore >= 8 ? chalk.green : complianceScore >= 6 ? chalk.yellow : chalk.red;
|
|
199
|
+
console.log(
|
|
200
|
+
chalk.gray(" Pass 0 result: ") +
|
|
201
|
+
scoreColor(`ComplianceScore ${complianceScore}/10`) +
|
|
202
|
+
(missingCount > 0 ? chalk.red(` · ${missingCount} missing requirement(s)`) : chalk.green(" · all requirements covered"))
|
|
203
|
+
);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
144
206
|
|
|
207
|
+
console.log(chalk.gray(` Pass 1/3: Architecture review...`));
|
|
208
|
+
|
|
209
|
+
// ── Pass 1: Architecture (+ §9 lessons cross-check) ──────────────────────
|
|
210
|
+
const accumulatedLessons = await loadAccumulatedLessons(this.projectRoot);
|
|
211
|
+
const archPrompt = `Review the architecture of this change.
|
|
212
|
+
${complianceReview
|
|
213
|
+
? `\n=== Spec Compliance Report (Pass 0 — already audited, do NOT re-audit missing requirements) ===\n${complianceReview}\n`
|
|
214
|
+
: ""}
|
|
215
|
+
${accumulatedLessons
|
|
216
|
+
? `\n=== §9 历史积累教训 (Accumulated Lessons — check if any are repeated in this code) ===\n${accumulatedLessons}\n`
|
|
217
|
+
: ""}
|
|
145
218
|
=== Feature Spec ===
|
|
146
219
|
${specContent || "(No spec — review for general code quality)"}
|
|
147
220
|
|
|
@@ -189,10 +262,14 @@ ${implReview}`;
|
|
|
189
262
|
|
|
190
263
|
// ── Combine ───────────────────────────────────────────────────────────────
|
|
191
264
|
const sep = "─".repeat(52);
|
|
192
|
-
const
|
|
265
|
+
const parts = complianceReview
|
|
266
|
+
? [complianceReview, archReview, implReview, impactReview]
|
|
267
|
+
: [archReview, implReview, impactReview];
|
|
268
|
+
const combined = parts.join(`\n\n${sep}\n\n`);
|
|
193
269
|
|
|
194
270
|
// ── Persist history ───────────────────────────────────────────────────────
|
|
195
271
|
const score = extractScore(implReview) || extractScore(archReview);
|
|
272
|
+
const complianceScore = extractComplianceScore(complianceReview);
|
|
196
273
|
const topIssues = extractTopIssues(implReview);
|
|
197
274
|
const impactLevel = extractImpactLevel(impactReview);
|
|
198
275
|
const complexityLevel = extractComplexityLevel(impactReview);
|
|
@@ -201,6 +278,7 @@ ${implReview}`;
|
|
|
201
278
|
date: new Date().toISOString().slice(0, 10),
|
|
202
279
|
specFile: path.relative(this.projectRoot, specFile),
|
|
203
280
|
score,
|
|
281
|
+
...(complianceScore > 0 ? { complianceScore } : {}),
|
|
204
282
|
topIssues,
|
|
205
283
|
...(impactLevel ? { impactLevel } : {}),
|
|
206
284
|
...(complexityLevel ? { complexityLevel } : {}),
|
package/core/run-logger.ts
CHANGED
|
@@ -4,6 +4,86 @@ import chalk from "chalk";
|
|
|
4
4
|
|
|
5
5
|
const LOG_DIR = ".ai-spec-logs";
|
|
6
6
|
|
|
7
|
+
// ─── JSONL helpers ────────────────────────────────────────────────────────────
|
|
8
|
+
// Each event is synchronously appended as one JSON line to a `.jsonl` shadow
|
|
9
|
+
// file alongside the full `.json`. If the process crashes mid-run the `.json`
|
|
10
|
+
// may be empty or stale, but every line written to the `.jsonl` is durable.
|
|
11
|
+
// `loadRunLogs` (run-trend.ts) can reconstruct a RunLog from orphan `.jsonl`
|
|
12
|
+
// files for crash recovery.
|
|
13
|
+
|
|
14
|
+
function appendJsonlLine(filePath: string, record: Record<string, unknown>): void {
|
|
15
|
+
try {
|
|
16
|
+
fs.appendFileSync(filePath, JSON.stringify(record) + "\n");
|
|
17
|
+
} catch {
|
|
18
|
+
// JSONL write must never crash the pipeline
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/** Reconstruct a RunLog from a `.jsonl` file (crash recovery path). */
|
|
23
|
+
export function reconstructRunLogFromJsonl(jsonlPath: string): RunLog | null {
|
|
24
|
+
let raw: string;
|
|
25
|
+
try {
|
|
26
|
+
raw = fs.readFileSync(jsonlPath, "utf-8");
|
|
27
|
+
} catch {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const log: Partial<RunLog> & { entries: LogEntry[]; filesWritten: string[]; errors: string[] } = {
|
|
32
|
+
entries: [],
|
|
33
|
+
filesWritten: [],
|
|
34
|
+
errors: [],
|
|
35
|
+
runId: "",
|
|
36
|
+
startedAt: "",
|
|
37
|
+
workingDir: "",
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
for (const line of raw.split("\n")) {
|
|
41
|
+
const trimmed = line.trim();
|
|
42
|
+
if (!trimmed) continue;
|
|
43
|
+
try {
|
|
44
|
+
const rec = JSON.parse(trimmed) as Record<string, unknown>;
|
|
45
|
+
switch (rec["type"]) {
|
|
46
|
+
case "header":
|
|
47
|
+
log.runId = rec["runId"] as string;
|
|
48
|
+
log.startedAt = rec["startedAt"] as string;
|
|
49
|
+
log.workingDir = rec["workingDir"] as string;
|
|
50
|
+
if (rec["provider"]) log.provider = rec["provider"] as string;
|
|
51
|
+
if (rec["model"]) log.model = rec["model"] as string;
|
|
52
|
+
if (rec["specPath"]) log.specPath = rec["specPath"] as string;
|
|
53
|
+
break;
|
|
54
|
+
case "meta":
|
|
55
|
+
if (rec["key"] === "promptHash") log.promptHash = rec["value"] as string;
|
|
56
|
+
if (rec["key"] === "harnessScore") log.harnessScore = rec["value"] as number;
|
|
57
|
+
break;
|
|
58
|
+
case "entry":
|
|
59
|
+
log.entries.push({
|
|
60
|
+
ts: rec["ts"] as string,
|
|
61
|
+
event: rec["event"] as string,
|
|
62
|
+
...(rec["durationMs"] !== undefined ? { durationMs: rec["durationMs"] as number } : {}),
|
|
63
|
+
...(rec["data"] ? { data: rec["data"] as Record<string, unknown> } : {}),
|
|
64
|
+
});
|
|
65
|
+
break;
|
|
66
|
+
case "file":
|
|
67
|
+
if (rec["path"]) log.filesWritten.push(rec["path"] as string);
|
|
68
|
+
break;
|
|
69
|
+
case "error":
|
|
70
|
+
if (rec["message"]) log.errors.push(rec["message"] as string);
|
|
71
|
+
break;
|
|
72
|
+
case "footer":
|
|
73
|
+
if (rec["endedAt"]) log.endedAt = rec["endedAt"] as string;
|
|
74
|
+
if (rec["totalDurationMs"]) log.totalDurationMs = rec["totalDurationMs"] as number;
|
|
75
|
+
if (rec["harnessScore"]) log.harnessScore = rec["harnessScore"] as number;
|
|
76
|
+
break;
|
|
77
|
+
}
|
|
78
|
+
} catch {
|
|
79
|
+
// corrupt line — skip
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (!log.runId || !log.startedAt) return null;
|
|
84
|
+
return log as RunLog;
|
|
85
|
+
}
|
|
86
|
+
|
|
7
87
|
// ─── Types ────────────────────────────────────────────────────────────────────
|
|
8
88
|
|
|
9
89
|
export interface LogEntry {
|
|
@@ -42,6 +122,7 @@ export class RunLogger {
|
|
|
42
122
|
private log: RunLog;
|
|
43
123
|
private readonly startMs: number;
|
|
44
124
|
private readonly logPath: string;
|
|
125
|
+
private readonly jsonlPath: string;
|
|
45
126
|
private readonly stageStartMs = new Map<string, number>();
|
|
46
127
|
|
|
47
128
|
constructor(
|
|
@@ -50,7 +131,8 @@ export class RunLogger {
|
|
|
50
131
|
meta?: { provider?: string; model?: string; specPath?: string }
|
|
51
132
|
) {
|
|
52
133
|
this.startMs = Date.now();
|
|
53
|
-
this.logPath
|
|
134
|
+
this.logPath = path.join(workingDir, LOG_DIR, `${runId}.json`);
|
|
135
|
+
this.jsonlPath = path.join(workingDir, LOG_DIR, `${runId}.jsonl`);
|
|
54
136
|
this.log = {
|
|
55
137
|
runId,
|
|
56
138
|
startedAt: new Date().toISOString(),
|
|
@@ -60,6 +142,16 @@ export class RunLogger {
|
|
|
60
142
|
filesWritten: [],
|
|
61
143
|
errors: [],
|
|
62
144
|
};
|
|
145
|
+
// Write JSONL header immediately — ensures the file exists even on early crash
|
|
146
|
+
fs.ensureDir(path.dirname(this.jsonlPath)).then(() => {
|
|
147
|
+
appendJsonlLine(this.jsonlPath, {
|
|
148
|
+
type: "header",
|
|
149
|
+
runId,
|
|
150
|
+
startedAt: this.log.startedAt,
|
|
151
|
+
workingDir,
|
|
152
|
+
...meta,
|
|
153
|
+
});
|
|
154
|
+
}).catch(() => {});
|
|
63
155
|
this.flush();
|
|
64
156
|
}
|
|
65
157
|
|
|
@@ -78,25 +170,30 @@ export class RunLogger {
|
|
|
78
170
|
const start = this.stageStartMs.get(event);
|
|
79
171
|
const durationMs = start !== undefined ? Date.now() - start : undefined;
|
|
80
172
|
this.push(`${event}:failed`, { ...data, error, durationMs });
|
|
81
|
-
|
|
173
|
+
const errorMsg = `[${event}] ${error}`;
|
|
174
|
+
this.log.errors.push(errorMsg);
|
|
175
|
+
appendJsonlLine(this.jsonlPath, { type: "error", message: errorMsg });
|
|
82
176
|
this.flush();
|
|
83
177
|
}
|
|
84
178
|
|
|
85
179
|
/** Record the prompt hash for this run (call once at run start). */
|
|
86
180
|
setPromptHash(hash: string): void {
|
|
87
181
|
this.log.promptHash = hash;
|
|
182
|
+
appendJsonlLine(this.jsonlPath, { type: "meta", key: "promptHash", value: hash });
|
|
88
183
|
this.flush();
|
|
89
184
|
}
|
|
90
185
|
|
|
91
186
|
/** Record the harness self-eval score (call once at run end). */
|
|
92
187
|
setHarnessScore(score: number): void {
|
|
93
188
|
this.log.harnessScore = score;
|
|
189
|
+
appendJsonlLine(this.jsonlPath, { type: "meta", key: "harnessScore", value: score });
|
|
94
190
|
this.flush();
|
|
95
191
|
}
|
|
96
192
|
|
|
97
193
|
fileWritten(filePath: string): void {
|
|
98
194
|
if (!this.log.filesWritten.includes(filePath)) {
|
|
99
195
|
this.log.filesWritten.push(filePath);
|
|
196
|
+
appendJsonlLine(this.jsonlPath, { type: "file", path: filePath });
|
|
100
197
|
this.flush();
|
|
101
198
|
}
|
|
102
199
|
}
|
|
@@ -104,6 +201,12 @@ export class RunLogger {
|
|
|
104
201
|
finish(): void {
|
|
105
202
|
this.log.endedAt = new Date().toISOString();
|
|
106
203
|
this.log.totalDurationMs = Date.now() - this.startMs;
|
|
204
|
+
appendJsonlLine(this.jsonlPath, {
|
|
205
|
+
type: "footer",
|
|
206
|
+
endedAt: this.log.endedAt,
|
|
207
|
+
totalDurationMs: this.log.totalDurationMs,
|
|
208
|
+
harnessScore: this.log.harnessScore,
|
|
209
|
+
});
|
|
107
210
|
this.flush();
|
|
108
211
|
}
|
|
109
212
|
|
|
@@ -123,7 +226,10 @@ export class RunLogger {
|
|
|
123
226
|
}
|
|
124
227
|
|
|
125
228
|
private push(event: string, data?: Record<string, unknown>): void {
|
|
126
|
-
|
|
229
|
+
const entry: LogEntry = { ts: new Date().toISOString(), event, ...(data ? { data } : {}) };
|
|
230
|
+
this.log.entries.push(entry);
|
|
231
|
+
// Append to JSONL synchronously — durable even on crash
|
|
232
|
+
appendJsonlLine(this.jsonlPath, { type: "entry", ...entry });
|
|
127
233
|
this.flush();
|
|
128
234
|
}
|
|
129
235
|
|
package/core/run-trend.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import * as fs from "fs-extra";
|
|
2
2
|
import * as path from "path";
|
|
3
3
|
import chalk from "chalk";
|
|
4
|
-
import { RunLog } from "./run-logger";
|
|
4
|
+
import { RunLog, reconstructRunLogFromJsonl } from "./run-logger";
|
|
5
5
|
|
|
6
6
|
const LOG_DIR = ".ai-spec-logs";
|
|
7
7
|
|
|
@@ -49,20 +49,40 @@ export async function loadRunLogs(workingDir: string): Promise<RunLog[]> {
|
|
|
49
49
|
if (!(await fs.pathExists(logDir))) return [];
|
|
50
50
|
|
|
51
51
|
const files = await fs.readdir(logDir);
|
|
52
|
-
const jsonFiles
|
|
52
|
+
const jsonFiles = new Set(files.filter((f) => f.endsWith(".json")));
|
|
53
|
+
const jsonlFiles = files.filter((f) => f.endsWith(".jsonl")).sort().reverse();
|
|
53
54
|
|
|
54
55
|
const logs: RunLog[] = [];
|
|
55
|
-
|
|
56
|
+
const seenRunIds = new Set<string>();
|
|
57
|
+
|
|
58
|
+
// Primary path: read complete .json files (newest-first)
|
|
59
|
+
for (const file of [...jsonFiles].sort().reverse()) {
|
|
56
60
|
try {
|
|
57
61
|
const log: RunLog = await fs.readJson(path.join(logDir, file));
|
|
58
|
-
// only include runs that have a startedAt (minimal validity check)
|
|
59
62
|
if (log.runId && log.startedAt) {
|
|
60
63
|
logs.push(log);
|
|
64
|
+
seenRunIds.add(log.runId);
|
|
61
65
|
}
|
|
62
66
|
} catch {
|
|
63
67
|
// corrupt file — skip silently
|
|
64
68
|
}
|
|
65
69
|
}
|
|
70
|
+
|
|
71
|
+
// Crash-recovery path: reconstruct from orphan .jsonl files (no matching .json)
|
|
72
|
+
for (const file of jsonlFiles) {
|
|
73
|
+
const runId = file.replace(/\.jsonl$/, "");
|
|
74
|
+
if (seenRunIds.has(runId)) continue; // already loaded via .json
|
|
75
|
+
const correspondingJson = `${runId}.json`;
|
|
76
|
+
if (jsonFiles.has(correspondingJson)) continue; // .json exists, prefer it
|
|
77
|
+
const log = reconstructRunLogFromJsonl(path.join(logDir, file));
|
|
78
|
+
if (log) {
|
|
79
|
+
logs.push(log);
|
|
80
|
+
seenRunIds.add(log.runId);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Sort newest-first by startedAt
|
|
85
|
+
logs.sort((a, b) => b.startedAt.localeCompare(a.startedAt));
|
|
66
86
|
return logs;
|
|
67
87
|
}
|
|
68
88
|
|