ai-spec-dev 0.1.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +18 -0
- package/README.md +1215 -146
- package/RELEASE_LOG.md +1489 -0
- package/cli/index.ts +1981 -0
- package/cli/welcome.ts +151 -0
- package/core/code-generator.ts +757 -0
- package/core/combined-generator.ts +63 -0
- package/core/constitution-consolidator.ts +141 -0
- package/core/constitution-generator.ts +89 -0
- package/core/context-loader.ts +453 -0
- package/core/contract-bridge.ts +217 -0
- package/core/dsl-extractor.ts +337 -0
- package/core/dsl-types.ts +166 -0
- package/core/dsl-validator.ts +450 -0
- package/core/error-feedback.ts +354 -0
- package/core/frontend-context-loader.ts +602 -0
- package/core/global-constitution.ts +88 -0
- package/core/key-store.ts +49 -0
- package/core/knowledge-memory.ts +171 -0
- package/core/mock-server-generator.ts +571 -0
- package/core/openapi-exporter.ts +361 -0
- package/core/requirement-decomposer.ts +198 -0
- package/core/reviewer.ts +259 -0
- package/core/spec-assessor.ts +99 -0
- package/core/spec-generator.ts +428 -0
- package/core/spec-refiner.ts +89 -0
- package/core/spec-updater.ts +227 -0
- package/core/spec-versioning.ts +213 -0
- package/core/task-generator.ts +174 -0
- package/core/test-generator.ts +273 -0
- package/core/workspace-loader.ts +256 -0
- package/dist/cli/index.js +6717 -672
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/index.mjs +6717 -670
- package/dist/cli/index.mjs.map +1 -1
- package/dist/index.d.mts +147 -27
- package/dist/index.d.ts +147 -27
- package/dist/index.js +2337 -286
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2329 -285
- package/dist/index.mjs.map +1 -1
- package/git/worktree.ts +109 -0
- package/index.ts +9 -0
- package/package.json +4 -28
- package/prompts/codegen.prompt.ts +259 -0
- package/prompts/consolidate.prompt.ts +73 -0
- package/prompts/constitution.prompt.ts +63 -0
- package/prompts/decompose.prompt.ts +168 -0
- package/prompts/dsl.prompt.ts +203 -0
- package/prompts/frontend-spec.prompt.ts +191 -0
- package/prompts/global-constitution.prompt.ts +61 -0
- package/prompts/spec-assess.prompt.ts +53 -0
- package/prompts/spec.prompt.ts +102 -0
- package/prompts/tasks.prompt.ts +35 -0
- package/prompts/testgen.prompt.ts +84 -0
- package/prompts/update.prompt.ts +131 -0
- package/purpose.docx +0 -0
- package/purpose.md +444 -0
- package/tsconfig.json +14 -0
- package/tsup.config.ts +10 -0
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
import chalk from "chalk";
|
|
2
|
+
import * as fs from "fs-extra";
|
|
3
|
+
import * as path from "path";
|
|
4
|
+
import { AIProvider } from "./spec-generator";
|
|
5
|
+
import { SpecDSL } from "./dsl-types";
|
|
6
|
+
import { testGenSystemPrompt, testGenFrontendSystemPrompt, tddTestGenSystemPrompt } from "../prompts/testgen.prompt";
|
|
7
|
+
import { loadFrontendContext, FrontendContext } from "./frontend-context-loader";
|
|
8
|
+
import { FRONTEND_FRAMEWORKS } from "./context-loader";
|
|
9
|
+
|
|
10
|
+
// ─── Prompt builders ──────────────────────────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
function buildBackendTestGenPrompt(dsl: SpecDSL, testDir: string): string {
|
|
13
|
+
const lines: string[] = [
|
|
14
|
+
`Generate test skeleton files for the "${dsl.feature.title}" feature.`,
|
|
15
|
+
`Test files should be placed under: ${testDir}\n`,
|
|
16
|
+
];
|
|
17
|
+
|
|
18
|
+
if (dsl.models.length > 0) {
|
|
19
|
+
lines.push("=== Data Models ===");
|
|
20
|
+
for (const m of dsl.models) {
|
|
21
|
+
lines.push(`${m.name}:`);
|
|
22
|
+
for (const f of m.fields) {
|
|
23
|
+
const flags = [f.required ? "required" : "", f.unique ? "unique" : ""].filter(Boolean).join(", ");
|
|
24
|
+
lines.push(` ${f.name}: ${f.type}${flags ? ` (${flags})` : ""}`);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
lines.push("");
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (dsl.endpoints.length > 0) {
|
|
31
|
+
lines.push("=== API Endpoints ===");
|
|
32
|
+
for (const ep of dsl.endpoints) {
|
|
33
|
+
lines.push(`${ep.id}: ${ep.method} ${ep.path} [auth: ${ep.auth}] → ${ep.successStatus}`);
|
|
34
|
+
lines.push(` ${ep.description}`);
|
|
35
|
+
if (ep.request?.body) {
|
|
36
|
+
const fields = Object.entries(ep.request.body).map(([k, v]) => `${k}: ${v}`).join(", ");
|
|
37
|
+
lines.push(` body: { ${fields} }`);
|
|
38
|
+
}
|
|
39
|
+
if (ep.errors && ep.errors.length > 0) {
|
|
40
|
+
lines.push(` errors: ${ep.errors.map((e) => `${e.status} ${e.code}`).join(", ")}`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
lines.push("");
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (dsl.behaviors.length > 0) {
|
|
47
|
+
lines.push("=== Business Behaviors (include edge-case tests for these) ===");
|
|
48
|
+
for (const b of dsl.behaviors) {
|
|
49
|
+
lines.push(`- ${b.description}`);
|
|
50
|
+
if (b.constraints) lines.push(` rules: ${b.constraints.join("; ")}`);
|
|
51
|
+
}
|
|
52
|
+
lines.push("");
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
lines.push(
|
|
56
|
+
"Generate one test file per logical module (e.g. one for API routes, one for service/model tests).",
|
|
57
|
+
'Output a JSON array of {"file": "path", "content": "full test file source"}.'
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
return lines.join("\n");
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function buildFrontendTestGenPrompt(dsl: SpecDSL, testDir: string, ctx: FrontendContext): string {
|
|
64
|
+
const lines: string[] = [
|
|
65
|
+
`Generate test skeleton files for the "${dsl.feature.title}" frontend feature.`,
|
|
66
|
+
`Test framework : ${ctx.testFramework}`,
|
|
67
|
+
`Test files should be placed under: ${testDir}\n`,
|
|
68
|
+
];
|
|
69
|
+
|
|
70
|
+
// Component specs (primary for frontend)
|
|
71
|
+
if (dsl.components && dsl.components.length > 0) {
|
|
72
|
+
lines.push("=== Component Specs ===");
|
|
73
|
+
for (const cmp of dsl.components) {
|
|
74
|
+
lines.push(`${cmp.id}: ${cmp.name} — ${cmp.description}`);
|
|
75
|
+
if (cmp.props.length > 0) {
|
|
76
|
+
lines.push(` props: ${cmp.props.map((p) => `${p.name}${p.required ? "" : "?"}: ${p.type}`).join(", ")}`);
|
|
77
|
+
}
|
|
78
|
+
if (cmp.events.length > 0) {
|
|
79
|
+
lines.push(` events: ${cmp.events.map((e) => `${e.name}(${e.payload ?? ""})`).join(", ")}`);
|
|
80
|
+
}
|
|
81
|
+
if (Object.keys(cmp.state).length > 0) {
|
|
82
|
+
lines.push(` state: ${Object.entries(cmp.state).map(([k, v]) => `${k}: ${v}`).join(", ")}`);
|
|
83
|
+
}
|
|
84
|
+
if (cmp.apiCalls.length > 0) {
|
|
85
|
+
lines.push(` api calls: ${cmp.apiCalls.join(", ")}`);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
lines.push("");
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Endpoints (for API hook tests)
|
|
92
|
+
if (dsl.endpoints.length > 0) {
|
|
93
|
+
lines.push("=== API Endpoints (write hook/service tests for these) ===");
|
|
94
|
+
for (const ep of dsl.endpoints) {
|
|
95
|
+
lines.push(`${ep.id}: ${ep.method} ${ep.path} [auth: ${ep.auth}] → ${ep.successStatus}`);
|
|
96
|
+
if (ep.errors && ep.errors.length > 0) {
|
|
97
|
+
lines.push(` errors: ${ep.errors.map((e) => `${e.status} ${e.code}`).join(", ")}`);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
lines.push("");
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Existing hooks (to import from)
|
|
104
|
+
if (ctx.hookFiles.length > 0) {
|
|
105
|
+
lines.push("=== Existing custom hooks (import from these, don't create duplicates) ===");
|
|
106
|
+
ctx.hookFiles.forEach((f) => lines.push(` - ${f}`));
|
|
107
|
+
lines.push("");
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Existing API wrappers
|
|
111
|
+
if (ctx.apiWrapperContent.length > 0) {
|
|
112
|
+
lines.push("=== Existing API wrappers (reference these call patterns) ===");
|
|
113
|
+
ctx.apiWrapperContent.forEach((c) => {
|
|
114
|
+
lines.push("```");
|
|
115
|
+
lines.push(c.slice(0, 400));
|
|
116
|
+
lines.push("```");
|
|
117
|
+
});
|
|
118
|
+
lines.push("");
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
lines.push(
|
|
122
|
+
"Generate test files: one per component (RTL) and one for API hooks/services.",
|
|
123
|
+
'Output a JSON array of {"file": "path", "content": "full source"}.'
|
|
124
|
+
);
|
|
125
|
+
|
|
126
|
+
return lines.join("\n");
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// ─── Parser ───────────────────────────────────────────────────────────────────
|
|
130
|
+
|
|
131
|
+
interface TestFileResult {
|
|
132
|
+
file: string;
|
|
133
|
+
content: string;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function parseTestFiles(raw: string): TestFileResult[] {
|
|
137
|
+
const fenced = raw.match(/```(?:json)?\n(\[[\s\S]*?\])\n```/);
|
|
138
|
+
const jsonStr = fenced ? fenced[1] : (raw.match(/\[[\s\S]*\]/)?.[0] ?? "");
|
|
139
|
+
try {
|
|
140
|
+
const parsed = JSON.parse(jsonStr);
|
|
141
|
+
if (Array.isArray(parsed)) return parsed as TestFileResult[];
|
|
142
|
+
} catch {
|
|
143
|
+
// fall through
|
|
144
|
+
}
|
|
145
|
+
return [];
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// ─── Frontend detection ───────────────────────────────────────────────────────
|
|
149
|
+
|
|
150
|
+
async function isFrontendProject(workingDir: string): Promise<boolean> {
|
|
151
|
+
const pkgPath = path.join(workingDir, "package.json");
|
|
152
|
+
if (!(await fs.pathExists(pkgPath))) return false;
|
|
153
|
+
try {
|
|
154
|
+
const pkg = await fs.readJson(pkgPath);
|
|
155
|
+
const deps = { ...(pkg.dependencies ?? {}), ...(pkg.devDependencies ?? {}) };
|
|
156
|
+
const keys = Object.keys(deps);
|
|
157
|
+
return keys.some((k) => (FRONTEND_FRAMEWORKS as readonly string[]).includes(k));
|
|
158
|
+
} catch {
|
|
159
|
+
return false;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// ─── TestGenerator ────────────────────────────────────────────────────────────
|
|
164
|
+
|
|
165
|
+
export class TestGenerator {
|
|
166
|
+
constructor(private provider: AIProvider) {}
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Generate test skeleton files from a validated DSL.
|
|
170
|
+
* Automatically detects frontend vs backend and uses the appropriate template.
|
|
171
|
+
* Returns the list of test file paths written.
|
|
172
|
+
*/
|
|
173
|
+
async generate(dsl: SpecDSL, workingDir: string): Promise<string[]> {
|
|
174
|
+
console.log(chalk.blue("\n─── Test Generation ─────────────────────────────"));
|
|
175
|
+
|
|
176
|
+
const testDir = await this.detectTestDir(workingDir);
|
|
177
|
+
const frontend = await isFrontendProject(workingDir);
|
|
178
|
+
|
|
179
|
+
let prompt: string;
|
|
180
|
+
let systemPrompt: string;
|
|
181
|
+
|
|
182
|
+
if (frontend) {
|
|
183
|
+
const ctx = await loadFrontendContext(workingDir);
|
|
184
|
+
console.log(chalk.gray(` Mode: frontend (${ctx.framework} / ${ctx.testFramework})`));
|
|
185
|
+
console.log(chalk.gray(` Test directory: ${testDir}`));
|
|
186
|
+
prompt = buildFrontendTestGenPrompt(dsl, testDir, ctx);
|
|
187
|
+
systemPrompt = testGenFrontendSystemPrompt;
|
|
188
|
+
} else {
|
|
189
|
+
console.log(chalk.gray(` Mode: backend`));
|
|
190
|
+
console.log(chalk.gray(` Test directory: ${testDir}`));
|
|
191
|
+
prompt = buildBackendTestGenPrompt(dsl, testDir);
|
|
192
|
+
systemPrompt = testGenSystemPrompt;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
let rawOutput: string;
|
|
196
|
+
try {
|
|
197
|
+
rawOutput = await this.provider.generate(prompt, systemPrompt);
|
|
198
|
+
} catch (err) {
|
|
199
|
+
console.log(chalk.yellow(` ⚠ Test generation AI call failed: ${(err as Error).message}`));
|
|
200
|
+
return [];
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const testFiles = parseTestFiles(rawOutput);
|
|
204
|
+
if (testFiles.length === 0) {
|
|
205
|
+
console.log(chalk.yellow(" ⚠ Could not parse test files from AI output. Skipping."));
|
|
206
|
+
return [];
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const writtenFiles: string[] = [];
|
|
210
|
+
for (const tf of testFiles) {
|
|
211
|
+
const fullPath = path.join(workingDir, tf.file);
|
|
212
|
+
await fs.ensureDir(path.dirname(fullPath));
|
|
213
|
+
await fs.writeFile(fullPath, tf.content, "utf-8");
|
|
214
|
+
console.log(chalk.green(` + ${tf.file}`));
|
|
215
|
+
writtenFiles.push(tf.file);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
console.log(chalk.green(` ✔ ${writtenFiles.length} test file(s) generated.`));
|
|
219
|
+
return writtenFiles;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* TDD mode: generate test files with real assertions BEFORE the implementation exists.
|
|
224
|
+
* These tests will initially fail — the error feedback loop drives implementation to pass them.
|
|
225
|
+
* Only supports backend projects (uses supertest + DSL endpoints/models).
|
|
226
|
+
*/
|
|
227
|
+
async generateTdd(dsl: SpecDSL, workingDir: string): Promise<string[]> {
|
|
228
|
+
console.log(chalk.blue("\n─── TDD Test Generation (pre-implementation) ────"));
|
|
229
|
+
|
|
230
|
+
const testDir = await this.detectTestDir(workingDir);
|
|
231
|
+
console.log(chalk.gray(` Mode: TDD (real assertions — tests will fail until implementation is complete)`));
|
|
232
|
+
console.log(chalk.gray(` Test directory: ${testDir}`));
|
|
233
|
+
|
|
234
|
+
const prompt = buildBackendTestGenPrompt(dsl, testDir);
|
|
235
|
+
|
|
236
|
+
let rawOutput: string;
|
|
237
|
+
try {
|
|
238
|
+
rawOutput = await this.provider.generate(prompt, tddTestGenSystemPrompt);
|
|
239
|
+
} catch (err) {
|
|
240
|
+
console.log(chalk.yellow(` ⚠ TDD test generation failed: ${(err as Error).message}`));
|
|
241
|
+
return [];
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
const testFiles = parseTestFiles(rawOutput);
|
|
245
|
+
if (testFiles.length === 0) {
|
|
246
|
+
console.log(chalk.yellow(" ⚠ Could not parse TDD test files from AI output. Skipping."));
|
|
247
|
+
return [];
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
const writtenFiles: string[] = [];
|
|
251
|
+
for (const tf of testFiles) {
|
|
252
|
+
const fullPath = path.join(workingDir, tf.file);
|
|
253
|
+
await fs.ensureDir(path.dirname(fullPath));
|
|
254
|
+
await fs.writeFile(fullPath, tf.content, "utf-8");
|
|
255
|
+
console.log(chalk.green(` + ${tf.file}`));
|
|
256
|
+
writtenFiles.push(tf.file);
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
console.log(
|
|
260
|
+
chalk.green(` ✔ ${writtenFiles.length} TDD test file(s) written.`) +
|
|
261
|
+
chalk.gray(" (expected to fail — implementation will make them pass)")
|
|
262
|
+
);
|
|
263
|
+
return writtenFiles;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
private async detectTestDir(workingDir: string): Promise<string> {
|
|
267
|
+
const candidates = ["tests", "test", "__tests__", "src/__tests__", "spec"];
|
|
268
|
+
for (const c of candidates) {
|
|
269
|
+
if (await fs.pathExists(path.join(workingDir, c))) return c;
|
|
270
|
+
}
|
|
271
|
+
return "tests";
|
|
272
|
+
}
|
|
273
|
+
}
|
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
import * as fs from "fs-extra";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import { glob } from "glob";
|
|
4
|
+
|
|
5
|
+
// ─── Types ────────────────────────────────────────────────────────────────────
|
|
6
|
+
|
|
7
|
+
export type RepoType =
|
|
8
|
+
| "node-express"
|
|
9
|
+
| "node-koa"
|
|
10
|
+
| "react"
|
|
11
|
+
| "next"
|
|
12
|
+
| "vue"
|
|
13
|
+
| "react-native"
|
|
14
|
+
| "go"
|
|
15
|
+
| "python"
|
|
16
|
+
| "java"
|
|
17
|
+
| "rust"
|
|
18
|
+
| "php"
|
|
19
|
+
| "unknown";
|
|
20
|
+
|
|
21
|
+
export type RepoRole = "backend" | "frontend" | "mobile" | "shared";
|
|
22
|
+
|
|
23
|
+
export interface RepoConfig {
|
|
24
|
+
name: string;
|
|
25
|
+
/** Relative path from workspace root (the directory containing .ai-spec-workspace.json) */
|
|
26
|
+
path: string;
|
|
27
|
+
type: RepoType;
|
|
28
|
+
role: RepoRole;
|
|
29
|
+
/** Contents of .ai-spec-constitution.md, loaded at runtime */
|
|
30
|
+
constitution?: string;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export interface WorkspaceConfig {
|
|
34
|
+
name: string;
|
|
35
|
+
repos: RepoConfig[];
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export const WORKSPACE_CONFIG_FILE = ".ai-spec-workspace.json";
|
|
39
|
+
|
|
40
|
+
// ─── Type Detection ───────────────────────────────────────────────────────────
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Detect the repo type and role from its package.json dependencies.
|
|
44
|
+
*/
|
|
45
|
+
export async function detectRepoType(
|
|
46
|
+
repoAbsPath: string
|
|
47
|
+
): Promise<{ type: RepoType; role: RepoRole }> {
|
|
48
|
+
// ── Non-Node language detection (check before package.json) ──────────────
|
|
49
|
+
if (await fs.pathExists(path.join(repoAbsPath, "go.mod"))) {
|
|
50
|
+
return { type: "go", role: "backend" };
|
|
51
|
+
}
|
|
52
|
+
if (await fs.pathExists(path.join(repoAbsPath, "composer.json"))) {
|
|
53
|
+
return { type: "php", role: "backend" };
|
|
54
|
+
}
|
|
55
|
+
if (await fs.pathExists(path.join(repoAbsPath, "Cargo.toml"))) {
|
|
56
|
+
return { type: "rust", role: "backend" };
|
|
57
|
+
}
|
|
58
|
+
if (
|
|
59
|
+
(await fs.pathExists(path.join(repoAbsPath, "pom.xml"))) ||
|
|
60
|
+
(await fs.pathExists(path.join(repoAbsPath, "build.gradle"))) ||
|
|
61
|
+
(await fs.pathExists(path.join(repoAbsPath, "build.gradle.kts")))
|
|
62
|
+
) {
|
|
63
|
+
return { type: "java", role: "backend" };
|
|
64
|
+
}
|
|
65
|
+
if (
|
|
66
|
+
(await fs.pathExists(path.join(repoAbsPath, "requirements.txt"))) ||
|
|
67
|
+
(await fs.pathExists(path.join(repoAbsPath, "pyproject.toml"))) ||
|
|
68
|
+
(await fs.pathExists(path.join(repoAbsPath, "setup.py")))
|
|
69
|
+
) {
|
|
70
|
+
return { type: "python", role: "backend" };
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// ── Node.js detection via package.json ────────────────────────────────────
|
|
74
|
+
const pkgPath = path.join(repoAbsPath, "package.json");
|
|
75
|
+
if (!(await fs.pathExists(pkgPath))) {
|
|
76
|
+
return { type: "unknown", role: "shared" };
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
let pkg: Record<string, unknown> = {};
|
|
80
|
+
try {
|
|
81
|
+
pkg = await fs.readJson(pkgPath);
|
|
82
|
+
} catch {
|
|
83
|
+
return { type: "unknown", role: "shared" };
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const deps = {
|
|
87
|
+
...((pkg.dependencies as Record<string, string>) ?? {}),
|
|
88
|
+
...((pkg.devDependencies as Record<string, string>) ?? {}),
|
|
89
|
+
};
|
|
90
|
+
const depKeys = Object.keys(deps);
|
|
91
|
+
|
|
92
|
+
const has = (...names: string[]) => names.some((n) => depKeys.includes(n));
|
|
93
|
+
|
|
94
|
+
if (has("react-native", "expo")) {
|
|
95
|
+
return { type: "react-native", role: "mobile" };
|
|
96
|
+
}
|
|
97
|
+
if (has("next")) {
|
|
98
|
+
return { type: "next", role: "frontend" };
|
|
99
|
+
}
|
|
100
|
+
if (has("react")) {
|
|
101
|
+
return { type: "react", role: "frontend" };
|
|
102
|
+
}
|
|
103
|
+
if (has("vue", "@vue/cli-service")) {
|
|
104
|
+
return { type: "vue", role: "frontend" };
|
|
105
|
+
}
|
|
106
|
+
if (has("koa", "@koa/router")) {
|
|
107
|
+
return { type: "node-koa", role: "backend" };
|
|
108
|
+
}
|
|
109
|
+
if (
|
|
110
|
+
has("express", "@nestjs/core", "fastify", "hapi") ||
|
|
111
|
+
has("prisma", "@prisma/client", "mongoose", "typeorm", "sequelize")
|
|
112
|
+
) {
|
|
113
|
+
return { type: "node-express", role: "backend" };
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
return { type: "unknown", role: "shared" };
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// ─── WorkspaceLoader ─────────────────────────────────────────────────────────
|
|
120
|
+
|
|
121
|
+
export class WorkspaceLoader {
|
|
122
|
+
constructor(private workspaceRoot: string) {}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Load and validate .ai-spec-workspace.json from the workspace root.
|
|
126
|
+
* Returns null if the file does not exist (graceful degradation).
|
|
127
|
+
*/
|
|
128
|
+
async load(): Promise<WorkspaceConfig | null> {
|
|
129
|
+
const configPath = path.join(this.workspaceRoot, WORKSPACE_CONFIG_FILE);
|
|
130
|
+
if (!(await fs.pathExists(configPath))) {
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
let raw: unknown;
|
|
135
|
+
try {
|
|
136
|
+
raw = await fs.readJson(configPath);
|
|
137
|
+
} catch (err) {
|
|
138
|
+
throw new Error(
|
|
139
|
+
`Failed to parse ${WORKSPACE_CONFIG_FILE}: ${(err as Error).message}`
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (
|
|
144
|
+
typeof raw !== "object" ||
|
|
145
|
+
raw === null ||
|
|
146
|
+
!("name" in raw) ||
|
|
147
|
+
!("repos" in raw)
|
|
148
|
+
) {
|
|
149
|
+
throw new Error(
|
|
150
|
+
`${WORKSPACE_CONFIG_FILE} is missing required fields: name, repos`
|
|
151
|
+
);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const config = raw as WorkspaceConfig;
|
|
155
|
+
|
|
156
|
+
if (!Array.isArray(config.repos) || config.repos.length === 0) {
|
|
157
|
+
throw new Error(`${WORKSPACE_CONFIG_FILE}: repos must be a non-empty array`);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Load constitutions at runtime
|
|
161
|
+
const resolvedRepos = await this.resolveRepoPaths(config);
|
|
162
|
+
return { ...config, repos: resolvedRepos };
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Scan sibling directories for repos by looking for package.json.
|
|
167
|
+
* Auto-detects type and role from dependencies.
|
|
168
|
+
*/
|
|
169
|
+
async autoDetect(names?: string[]): Promise<RepoConfig[]> {
|
|
170
|
+
const entries = await fs.readdir(this.workspaceRoot);
|
|
171
|
+
const repos: RepoConfig[] = [];
|
|
172
|
+
|
|
173
|
+
for (const entry of entries) {
|
|
174
|
+
const absPath = path.join(this.workspaceRoot, entry);
|
|
175
|
+
const stat = await fs.stat(absPath).catch(() => null);
|
|
176
|
+
if (!stat || !stat.isDirectory()) continue;
|
|
177
|
+
if (entry.startsWith(".") || entry === "node_modules") continue;
|
|
178
|
+
if (names && !names.includes(entry)) continue;
|
|
179
|
+
|
|
180
|
+
// Accept any recognizable project (package.json, go.mod, Cargo.toml, pom.xml, etc.)
|
|
181
|
+
const hasManifest =
|
|
182
|
+
(await fs.pathExists(path.join(absPath, "package.json"))) ||
|
|
183
|
+
(await fs.pathExists(path.join(absPath, "go.mod"))) ||
|
|
184
|
+
(await fs.pathExists(path.join(absPath, "Cargo.toml"))) ||
|
|
185
|
+
(await fs.pathExists(path.join(absPath, "pom.xml"))) ||
|
|
186
|
+
(await fs.pathExists(path.join(absPath, "build.gradle"))) ||
|
|
187
|
+
(await fs.pathExists(path.join(absPath, "requirements.txt"))) ||
|
|
188
|
+
(await fs.pathExists(path.join(absPath, "pyproject.toml"))) ||
|
|
189
|
+
(await fs.pathExists(path.join(absPath, "composer.json")));
|
|
190
|
+
if (!hasManifest) continue;
|
|
191
|
+
|
|
192
|
+
const { type, role } = await detectRepoType(absPath);
|
|
193
|
+
repos.push({ name: entry, path: entry, type, role });
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return repos;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Resolve relative paths to absolute and load constitutions.
|
|
201
|
+
*/
|
|
202
|
+
async resolveRepoPaths(config: WorkspaceConfig): Promise<RepoConfig[]> {
|
|
203
|
+
const resolved: RepoConfig[] = [];
|
|
204
|
+
|
|
205
|
+
for (const repo of config.repos) {
|
|
206
|
+
const absPath = path.resolve(this.workspaceRoot, repo.path);
|
|
207
|
+
let constitution: string | undefined;
|
|
208
|
+
|
|
209
|
+
const constitutionFile = path.join(absPath, ".ai-spec-constitution.md");
|
|
210
|
+
if (await fs.pathExists(constitutionFile)) {
|
|
211
|
+
constitution = await fs.readFile(constitutionFile, "utf-8");
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
resolved.push({ ...repo, constitution });
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
return resolved;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Save a workspace config to disk.
|
|
222
|
+
*/
|
|
223
|
+
async save(config: WorkspaceConfig): Promise<string> {
|
|
224
|
+
const configPath = path.join(this.workspaceRoot, WORKSPACE_CONFIG_FILE);
|
|
225
|
+
// Strip runtime-loaded constitutions before saving
|
|
226
|
+
const toSave: WorkspaceConfig = {
|
|
227
|
+
name: config.name,
|
|
228
|
+
repos: config.repos.map(({ constitution: _c, ...rest }) => rest),
|
|
229
|
+
};
|
|
230
|
+
await fs.writeJson(configPath, toSave, { spaces: 2 });
|
|
231
|
+
return configPath;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Resolve the absolute path of a repo given its config.
|
|
236
|
+
*/
|
|
237
|
+
resolveAbsPath(repo: RepoConfig): string {
|
|
238
|
+
return path.resolve(this.workspaceRoot, repo.path);
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
/**
|
|
242
|
+
* Find which repos are backend (contract providers) and which depend on them.
|
|
243
|
+
*/
|
|
244
|
+
static getProcessingOrder(repos: RepoConfig[]): RepoConfig[] {
|
|
245
|
+
// Backends first, then frontends/mobile, then shared
|
|
246
|
+
const roleOrder: Record<RepoRole, number> = {
|
|
247
|
+
backend: 0,
|
|
248
|
+
shared: 1,
|
|
249
|
+
frontend: 2,
|
|
250
|
+
mobile: 3,
|
|
251
|
+
};
|
|
252
|
+
return [...repos].sort(
|
|
253
|
+
(a, b) => roleOrder[a.role] - roleOrder[b.role]
|
|
254
|
+
);
|
|
255
|
+
}
|
|
256
|
+
}
|