@shahmarasy/prodo 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +157 -0
- package/bin/prodo.cjs +6 -0
- package/dist/agent-command-installer.d.ts +4 -0
- package/dist/agent-command-installer.js +158 -0
- package/dist/agents.d.ts +15 -0
- package/dist/agents.js +47 -0
- package/dist/artifact-registry.d.ts +11 -0
- package/dist/artifact-registry.js +49 -0
- package/dist/artifacts.d.ts +9 -0
- package/dist/artifacts.js +514 -0
- package/dist/cli.d.ts +9 -0
- package/dist/cli.js +305 -0
- package/dist/consistency.d.ts +8 -0
- package/dist/consistency.js +268 -0
- package/dist/constants.d.ts +7 -0
- package/dist/constants.js +64 -0
- package/dist/doctor.d.ts +1 -0
- package/dist/doctor.js +123 -0
- package/dist/errors.d.ts +3 -0
- package/dist/errors.js +10 -0
- package/dist/hook-executor.d.ts +1 -0
- package/dist/hook-executor.js +175 -0
- package/dist/init-tui.d.ts +21 -0
- package/dist/init-tui.js +161 -0
- package/dist/init.d.ts +10 -0
- package/dist/init.js +307 -0
- package/dist/markdown.d.ts +11 -0
- package/dist/markdown.js +66 -0
- package/dist/normalize.d.ts +7 -0
- package/dist/normalize.js +73 -0
- package/dist/normalized-brief.d.ts +39 -0
- package/dist/normalized-brief.js +170 -0
- package/dist/output-index.d.ts +13 -0
- package/dist/output-index.js +55 -0
- package/dist/paths.d.ts +16 -0
- package/dist/paths.js +76 -0
- package/dist/preset-loader.d.ts +4 -0
- package/dist/preset-loader.js +210 -0
- package/dist/project-config.d.ts +14 -0
- package/dist/project-config.js +69 -0
- package/dist/providers/index.d.ts +2 -0
- package/dist/providers/index.js +12 -0
- package/dist/providers/mock-provider.d.ts +7 -0
- package/dist/providers/mock-provider.js +168 -0
- package/dist/providers/openai-provider.d.ts +11 -0
- package/dist/providers/openai-provider.js +69 -0
- package/dist/registry.d.ts +13 -0
- package/dist/registry.js +115 -0
- package/dist/settings.d.ts +6 -0
- package/dist/settings.js +34 -0
- package/dist/template-resolver.d.ts +11 -0
- package/dist/template-resolver.js +28 -0
- package/dist/templates.d.ts +33 -0
- package/dist/templates.js +428 -0
- package/dist/types.d.ts +35 -0
- package/dist/types.js +5 -0
- package/dist/utils.d.ts +6 -0
- package/dist/utils.js +53 -0
- package/dist/validate.d.ts +9 -0
- package/dist/validate.js +226 -0
- package/dist/validator.d.ts +5 -0
- package/dist/validator.js +80 -0
- package/dist/version.d.ts +1 -0
- package/dist/version.js +30 -0
- package/dist/workflow-commands.d.ts +7 -0
- package/dist/workflow-commands.js +28 -0
- package/package.json +45 -0
- package/presets/fintech/preset.json +1 -0
- package/presets/fintech/prompts/prd.md +3 -0
- package/presets/marketplace/preset.json +1 -0
- package/presets/marketplace/prompts/prd.md +3 -0
- package/presets/saas/preset.json +1 -0
- package/presets/saas/prompts/prd.md +3 -0
- package/src/agent-command-installer.ts +174 -0
- package/src/agents.ts +56 -0
- package/src/artifact-registry.ts +69 -0
- package/src/artifacts.ts +606 -0
- package/src/cli.ts +322 -0
- package/src/consistency.ts +303 -0
- package/src/constants.ts +72 -0
- package/src/doctor.ts +137 -0
- package/src/errors.ts +7 -0
- package/src/hook-executor.ts +196 -0
- package/src/init-tui.ts +193 -0
- package/src/init.ts +375 -0
- package/src/markdown.ts +73 -0
- package/src/normalize.ts +89 -0
- package/src/normalized-brief.ts +206 -0
- package/src/output-index.ts +59 -0
- package/src/paths.ts +72 -0
- package/src/preset-loader.ts +237 -0
- package/src/project-config.ts +78 -0
- package/src/providers/index.ts +12 -0
- package/src/providers/mock-provider.ts +188 -0
- package/src/providers/openai-provider.ts +87 -0
- package/src/registry.ts +119 -0
- package/src/settings.ts +34 -0
- package/src/template-resolver.ts +33 -0
- package/src/templates.ts +440 -0
- package/src/types.ts +46 -0
- package/src/utils.ts +50 -0
- package/src/validate.ts +246 -0
- package/src/validator.ts +96 -0
- package/src/version.ts +24 -0
- package/src/workflow-commands.ts +31 -0
- package/templates/artifacts/prd.md +219 -0
- package/templates/artifacts/stories.md +49 -0
- package/templates/artifacts/techspec.md +42 -0
- package/templates/artifacts/wireframe.html +260 -0
- package/templates/artifacts/wireframe.md +22 -0
- package/templates/artifacts/workflow.md +22 -0
- package/templates/artifacts/workflow.mmd +6 -0
- package/templates/commands/prodo-normalize.md +24 -0
- package/templates/commands/prodo-prd.md +24 -0
- package/templates/commands/prodo-stories.md +24 -0
- package/templates/commands/prodo-techspec.md +24 -0
- package/templates/commands/prodo-validate.md +24 -0
- package/templates/commands/prodo-wireframe.md +24 -0
- package/templates/commands/prodo-workflow.md +24 -0
package/src/cli.ts
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import { Command } from "commander";
|
|
2
|
+
import { createHash } from "node:crypto";
|
|
3
|
+
import fs from "node:fs/promises";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import { loadAgentCommandSet, resolveAgent } from "./agents";
|
|
6
|
+
import { resolveAi, type SupportedAi } from "./agent-command-installer";
|
|
7
|
+
import { listArtifactTypes } from "./artifact-registry";
|
|
8
|
+
import { generateArtifact } from "./artifacts";
|
|
9
|
+
import { runDoctor } from "./doctor";
|
|
10
|
+
import { UserError } from "./errors";
|
|
11
|
+
import { runHookPhase } from "./hook-executor";
|
|
12
|
+
import { runInit } from "./init";
|
|
13
|
+
import { finishInitInteractive, gatherInitSelections } from "./init-tui";
|
|
14
|
+
import { runNormalize } from "./normalize";
|
|
15
|
+
import { briefPath } from "./paths";
|
|
16
|
+
import { type ArtifactType } from "./types";
|
|
17
|
+
import { fileExists } from "./utils";
|
|
18
|
+
import { runValidate } from "./validate";
|
|
19
|
+
import { readCliVersion } from "./version";
|
|
20
|
+
|
|
21
|
+
type RunOptions = {
|
|
22
|
+
forcedCommand?: string;
|
|
23
|
+
cwd?: string;
|
|
24
|
+
argv?: string[];
|
|
25
|
+
log?: (message: string) => void;
|
|
26
|
+
error?: (message: string) => void;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
const dynamicImport = new Function("specifier", "return import(specifier)") as (
|
|
30
|
+
specifier: string
|
|
31
|
+
) => Promise<unknown>;
|
|
32
|
+
|
|
33
|
+
function mapForcedCommand(forcedCommand: string): ArtifactType | "init" | "validate" | "normalize" | undefined {
|
|
34
|
+
if (forcedCommand === "prodo-init") return "init";
|
|
35
|
+
if (forcedCommand === "prodo-validate") return "validate";
|
|
36
|
+
if (forcedCommand === "prodo-normalize") return "normalize";
|
|
37
|
+
if (forcedCommand === "prodo-prd") return "prd";
|
|
38
|
+
if (forcedCommand === "prodo-workflow") return "workflow";
|
|
39
|
+
if (forcedCommand === "prodo-wireframe") return "wireframe";
|
|
40
|
+
if (forcedCommand === "prodo-stories") return "stories";
|
|
41
|
+
if (forcedCommand === "prodo-techspec") return "techspec";
|
|
42
|
+
return undefined;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function runArtifactCommand(
|
|
46
|
+
type: ArtifactType,
|
|
47
|
+
opts: { from?: string; out?: string; agent?: string },
|
|
48
|
+
cwd: string,
|
|
49
|
+
log: (message: string) => void,
|
|
50
|
+
options?: { suggestValidate?: boolean }
|
|
51
|
+
): Promise<void> {
|
|
52
|
+
await runHookPhase(cwd, `before_${type}`, log);
|
|
53
|
+
const agent = resolveAgent(opts.agent);
|
|
54
|
+
const file = await generateArtifact({
|
|
55
|
+
artifactType: type,
|
|
56
|
+
cwd,
|
|
57
|
+
normalizedBriefOverride: opts.from,
|
|
58
|
+
outPath: opts.out,
|
|
59
|
+
agent
|
|
60
|
+
});
|
|
61
|
+
const agentMsg = agent ? ` [agent=${agent}]` : "";
|
|
62
|
+
log(`${type.toUpperCase()} generated${agentMsg}: ${file}`);
|
|
63
|
+
if (options?.suggestValidate !== false) {
|
|
64
|
+
log("Tip: run `prodo validate` to check cross-artifact consistency.");
|
|
65
|
+
}
|
|
66
|
+
await runHookPhase(cwd, `after_${type}`, log);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
type BriefSnapshot = {
|
|
70
|
+
hash: string;
|
|
71
|
+
mtimeMs: number;
|
|
72
|
+
size: number;
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
async function snapshotBrief(cwd: string): Promise<BriefSnapshot | null> {
|
|
76
|
+
const file = briefPath(cwd);
|
|
77
|
+
if (!(await fileExists(file))) return null;
|
|
78
|
+
const [raw, stat] = await Promise.all([fs.readFile(file), fs.stat(file)]);
|
|
79
|
+
return {
|
|
80
|
+
hash: createHash("sha256").update(raw).digest("hex"),
|
|
81
|
+
mtimeMs: stat.mtimeMs,
|
|
82
|
+
size: stat.size
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
async function withBriefReadOnlyGuard(cwd: string, task: () => Promise<void>): Promise<void> {
|
|
87
|
+
const before = await snapshotBrief(cwd);
|
|
88
|
+
await task();
|
|
89
|
+
const after = await snapshotBrief(cwd);
|
|
90
|
+
if (!before) return;
|
|
91
|
+
if (!after) {
|
|
92
|
+
throw new UserError("Input file `brief.md` was removed during execution. Input files are read-only.");
|
|
93
|
+
}
|
|
94
|
+
if (before.hash !== after.hash || before.size !== after.size || before.mtimeMs !== after.mtimeMs) {
|
|
95
|
+
throw new UserError("Input file `brief.md` was modified during execution. Input files are read-only.");
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export async function runCli(options: RunOptions = {}): Promise<number> {
|
|
100
|
+
const cwd = options.cwd ?? process.cwd();
|
|
101
|
+
const argv = options.argv ?? process.argv;
|
|
102
|
+
const out = options.log ?? console.log;
|
|
103
|
+
const err = options.error ?? console.error;
|
|
104
|
+
const forced = options.forcedCommand ? mapForcedCommand(options.forcedCommand) : undefined;
|
|
105
|
+
|
|
106
|
+
const program = new Command();
|
|
107
|
+
const version = await readCliVersion(cwd);
|
|
108
|
+
program
|
|
109
|
+
.name("prodo")
|
|
110
|
+
.description("CLI-first, prompt-powered product artifact kit")
|
|
111
|
+
.version(`prodo ${version}`, "-v, --version", "Show Prodo version")
|
|
112
|
+
.showHelpAfterError();
|
|
113
|
+
const artifactTypes = await listArtifactTypes(cwd);
|
|
114
|
+
|
|
115
|
+
program
|
|
116
|
+
.command("init [target]")
|
|
117
|
+
.option("--ai <name>", "agent integration: codex | gemini-cli | claude-cli")
|
|
118
|
+
.option("--lang <code>", "document language (e.g. en, tr)")
|
|
119
|
+
.option("--preset <name>", "preset to install during initialization")
|
|
120
|
+
.action(async (target, opts) => {
|
|
121
|
+
const projectRoot = path.resolve(cwd, target ?? ".");
|
|
122
|
+
const selected = await gatherInitSelections({
|
|
123
|
+
projectRoot,
|
|
124
|
+
aiInput: opts.ai,
|
|
125
|
+
langInput: opts.lang
|
|
126
|
+
});
|
|
127
|
+
const selectedAi = selected.ai as SupportedAi | undefined;
|
|
128
|
+
|
|
129
|
+
if (selected.interactive) {
|
|
130
|
+
const clack = (await dynamicImport("@clack/prompts")) as typeof import("@clack/prompts");
|
|
131
|
+
const s = clack.spinner();
|
|
132
|
+
s.start("Scaffolding Prodo workspace...");
|
|
133
|
+
const result = await runInit(projectRoot, {
|
|
134
|
+
ai: selectedAi,
|
|
135
|
+
lang: selected.lang,
|
|
136
|
+
preset: opts.preset,
|
|
137
|
+
script: selected.script
|
|
138
|
+
});
|
|
139
|
+
s.stop("Scaffold complete.");
|
|
140
|
+
await finishInitInteractive({
|
|
141
|
+
projectRoot,
|
|
142
|
+
settingsPath: result.settingsPath,
|
|
143
|
+
ai: selectedAi,
|
|
144
|
+
script: selected.script,
|
|
145
|
+
lang: selected.lang
|
|
146
|
+
});
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const result = await runInit(projectRoot, {
|
|
151
|
+
ai: selectedAi,
|
|
152
|
+
lang: selected.lang,
|
|
153
|
+
preset: opts.preset,
|
|
154
|
+
script: selected.script
|
|
155
|
+
});
|
|
156
|
+
out(`Initialized Prodo scaffold at ${path.join(projectRoot, ".prodo")}`);
|
|
157
|
+
if (selectedAi) {
|
|
158
|
+
out(`Agent command set installed for ${selectedAi}.`);
|
|
159
|
+
out(`Installed ${result.installedAgentFiles.length} command files.`);
|
|
160
|
+
out("Primary workflow: run `prodo generate` after editing brief.md.");
|
|
161
|
+
out("Advanced: artifact-level slash commands remain available when needed.");
|
|
162
|
+
} else {
|
|
163
|
+
out("No agent selected. Use `prodo generate` for end-to-end generation.");
|
|
164
|
+
out("Advanced commands are still available when needed.");
|
|
165
|
+
}
|
|
166
|
+
out(`Settings file: ${result.settingsPath}`);
|
|
167
|
+
out("Next: edit brief.md, then run `prodo generate`.");
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
program
|
|
171
|
+
.command("generate")
|
|
172
|
+
.description("Run end-to-end pipeline: normalize -> generate artifacts -> validate")
|
|
173
|
+
.option("--agent <name>", "agent profile: codex | gemini-cli | claude-cli")
|
|
174
|
+
.option("--strict", "treat validation warnings as errors")
|
|
175
|
+
.option("--report <path>", "validation report output path")
|
|
176
|
+
.action(async (opts) => {
|
|
177
|
+
if (opts.agent) resolveAgent(opts.agent);
|
|
178
|
+
await withBriefReadOnlyGuard(cwd, async () => {
|
|
179
|
+
await runHookPhase(cwd, "before_normalize", out);
|
|
180
|
+
const normalizedPath = await runNormalize({ cwd });
|
|
181
|
+
out(`Normalized brief written to: ${normalizedPath}`);
|
|
182
|
+
await runHookPhase(cwd, "after_normalize", out);
|
|
183
|
+
|
|
184
|
+
for (const type of artifactTypes) {
|
|
185
|
+
await runArtifactCommand(type, { from: normalizedPath, agent: opts.agent }, cwd, out, {
|
|
186
|
+
suggestValidate: false
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
await runHookPhase(cwd, "before_validate", out);
|
|
191
|
+
const result = await runValidate(cwd, {
|
|
192
|
+
strict: Boolean(opts.strict),
|
|
193
|
+
report: opts.report
|
|
194
|
+
});
|
|
195
|
+
out(`Validation report written to: ${result.reportPath}`);
|
|
196
|
+
if (!result.pass) {
|
|
197
|
+
throw new UserError("Validation failed. Review report and fix issues.");
|
|
198
|
+
}
|
|
199
|
+
out("Generation pipeline completed. Validation passed.");
|
|
200
|
+
await runHookPhase(cwd, "after_validate", out);
|
|
201
|
+
});
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
program
|
|
205
|
+
.command("normalize")
|
|
206
|
+
.description("Advanced: normalize brief without full pipeline")
|
|
207
|
+
.option("--brief <path>", "path to start brief markdown")
|
|
208
|
+
.option("--out <path>", "output normalized brief json path")
|
|
209
|
+
.option("--agent <name>", "agent profile: codex | gemini-cli | claude-cli")
|
|
210
|
+
.action(async (opts: { brief?: string; out?: string; agent?: string }) => {
|
|
211
|
+
if (opts.agent) resolveAgent(opts.agent);
|
|
212
|
+
await withBriefReadOnlyGuard(cwd, async () => {
|
|
213
|
+
await runHookPhase(cwd, "before_normalize", out);
|
|
214
|
+
const outPath = await runNormalize({
|
|
215
|
+
cwd,
|
|
216
|
+
brief: opts.brief,
|
|
217
|
+
out: opts.out
|
|
218
|
+
});
|
|
219
|
+
out(`Normalized brief written to: ${outPath}`);
|
|
220
|
+
await runHookPhase(cwd, "after_normalize", out);
|
|
221
|
+
});
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
program
|
|
225
|
+
.command("doctor")
|
|
226
|
+
.alias("check")
|
|
227
|
+
.description("Check local environment and toolchain readiness")
|
|
228
|
+
.action(async () => {
|
|
229
|
+
await runDoctor(cwd, out);
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
for (const type of artifactTypes) {
|
|
233
|
+
program
|
|
234
|
+
.command(type)
|
|
235
|
+
.description(`Advanced: generate only ${type} artifact`)
|
|
236
|
+
.option("--from <path>", "path to normalized-brief.json")
|
|
237
|
+
.option("--out <path>", "output file path")
|
|
238
|
+
.option("--agent <name>", "agent profile: codex | gemini-cli | claude-cli")
|
|
239
|
+
.action(async (opts: { from?: string; out?: string; agent?: string }) => {
|
|
240
|
+
await withBriefReadOnlyGuard(cwd, async () => {
|
|
241
|
+
await runArtifactCommand(type, opts, cwd, out);
|
|
242
|
+
});
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
program
|
|
247
|
+
.command("agent-commands")
|
|
248
|
+
.requiredOption("--agent <name>", "agent profile: codex | gemini-cli | claude-cli")
|
|
249
|
+
.action(async (opts) => {
|
|
250
|
+
const agent = resolveAgent(opts.agent);
|
|
251
|
+
if (!agent) throw new UserError("Agent is required.");
|
|
252
|
+
const set = await loadAgentCommandSet(cwd, agent);
|
|
253
|
+
out(`Agent: ${set.agent}`);
|
|
254
|
+
if (set.description) out(`Description: ${set.description}`);
|
|
255
|
+
out("");
|
|
256
|
+
out("Recommended sequence:");
|
|
257
|
+
for (const item of set.recommended_sequence ?? []) {
|
|
258
|
+
out(`- ${item.command}: ${item.purpose}`);
|
|
259
|
+
}
|
|
260
|
+
if (set.artifact_shortcuts) {
|
|
261
|
+
out("");
|
|
262
|
+
out("Artifact shortcuts:");
|
|
263
|
+
for (const [key, command] of Object.entries(set.artifact_shortcuts)) {
|
|
264
|
+
out(`- ${key}: ${command}`);
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
program
|
|
270
|
+
.command("validate")
|
|
271
|
+
.description("Advanced: run validation only")
|
|
272
|
+
.option("--strict", "treat warnings as errors")
|
|
273
|
+
.option("--report <path>", "report output path")
|
|
274
|
+
.option("--agent <name>", "agent profile: codex | gemini-cli | claude-cli")
|
|
275
|
+
.action(async (opts: { strict?: boolean; report?: string; agent?: string }) => {
|
|
276
|
+
if (opts.agent) resolveAgent(opts.agent);
|
|
277
|
+
await withBriefReadOnlyGuard(cwd, async () => {
|
|
278
|
+
await runHookPhase(cwd, "before_validate", out);
|
|
279
|
+
const result = await runValidate(cwd, {
|
|
280
|
+
strict: Boolean(opts.strict),
|
|
281
|
+
report: opts.report
|
|
282
|
+
});
|
|
283
|
+
out(`Validation report written to: ${result.reportPath}`);
|
|
284
|
+
if (!result.pass) {
|
|
285
|
+
throw new UserError("Validation failed. Review report and fix issues.");
|
|
286
|
+
}
|
|
287
|
+
out("Validation passed.");
|
|
288
|
+
await runHookPhase(cwd, "after_validate", out);
|
|
289
|
+
});
|
|
290
|
+
});
|
|
291
|
+
|
|
292
|
+
try {
|
|
293
|
+
if (forced) {
|
|
294
|
+
if (forced === "init") {
|
|
295
|
+
await program.parseAsync(["node", "prodo", "init", ...argv.slice(2)]);
|
|
296
|
+
} else if (forced === "normalize") {
|
|
297
|
+
await program.parseAsync(["node", "prodo", "normalize", ...argv.slice(2)]);
|
|
298
|
+
} else if (forced === "validate") {
|
|
299
|
+
await program.parseAsync(["node", "prodo", "validate", ...argv.slice(2)]);
|
|
300
|
+
} else {
|
|
301
|
+
await program.parseAsync(["node", "prodo", forced, ...argv.slice(2)]);
|
|
302
|
+
}
|
|
303
|
+
} else {
|
|
304
|
+
await program.parseAsync(argv);
|
|
305
|
+
}
|
|
306
|
+
return 0;
|
|
307
|
+
} catch (error) {
|
|
308
|
+
if (error instanceof UserError) {
|
|
309
|
+
err(error.message);
|
|
310
|
+
return 1;
|
|
311
|
+
}
|
|
312
|
+
const unknown = error as Error;
|
|
313
|
+
err(unknown.message);
|
|
314
|
+
return 1;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
if (require.main === module) {
|
|
319
|
+
runCli().then((code) => {
|
|
320
|
+
process.exitCode = code;
|
|
321
|
+
});
|
|
322
|
+
}
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { listArtifactTypes, getArtifactDefinition } from "./artifact-registry";
|
|
3
|
+
import { contractIds, parseNormalizedBriefOrThrow } from "./normalized-brief";
|
|
4
|
+
import { createProvider } from "./providers";
|
|
5
|
+
import type { ArtifactDoc, ArtifactType, ContractCoverage, ValidationIssue } from "./types";
|
|
6
|
+
|
|
7
|
+
type LoadedArtifact = {
|
|
8
|
+
type: ArtifactType;
|
|
9
|
+
file: string;
|
|
10
|
+
doc: ArtifactDoc;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
function asStringArray(value: unknown): string[] {
|
|
14
|
+
if (!Array.isArray(value)) return [];
|
|
15
|
+
return value.filter((item): item is string => typeof item === "string");
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function readCoverage(frontmatter: Record<string, unknown>): ContractCoverage {
|
|
19
|
+
const coverage = frontmatter.contract_coverage as Partial<ContractCoverage> | undefined;
|
|
20
|
+
return {
|
|
21
|
+
goals: asStringArray(coverage?.goals),
|
|
22
|
+
core_features: asStringArray(coverage?.core_features),
|
|
23
|
+
constraints: asStringArray(coverage?.constraints)
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async function checkMissingArtifacts(cwd: string, loadedArtifacts: LoadedArtifact[]): Promise<ValidationIssue[]> {
|
|
28
|
+
const expectedTypes = await listArtifactTypes(cwd);
|
|
29
|
+
const present = new Set(loadedArtifacts.map((item) => item.type));
|
|
30
|
+
const missing = expectedTypes.filter((type) => !present.has(type));
|
|
31
|
+
if (missing.length === 0) return [];
|
|
32
|
+
return [
|
|
33
|
+
{
|
|
34
|
+
level: "warning",
|
|
35
|
+
code: "missing_artifacts",
|
|
36
|
+
check: "schema",
|
|
37
|
+
message: `Some artifacts are missing from outputs: ${missing.join(", ")}`,
|
|
38
|
+
suggestion: "Run the corresponding prodo-* commands before final validation."
|
|
39
|
+
}
|
|
40
|
+
];
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async function checkContractCoverage(
|
|
44
|
+
cwd: string,
|
|
45
|
+
loaded: LoadedArtifact[],
|
|
46
|
+
normalizedBrief: Record<string, unknown>
|
|
47
|
+
): Promise<ValidationIssue[]> {
|
|
48
|
+
const issues: ValidationIssue[] = [];
|
|
49
|
+
const normalized = parseNormalizedBriefOrThrow(normalizedBrief);
|
|
50
|
+
const expected = contractIds(normalized.contracts);
|
|
51
|
+
|
|
52
|
+
for (const artifact of loaded) {
|
|
53
|
+
const def = await getArtifactDefinition(cwd, artifact.type);
|
|
54
|
+
const coverage = readCoverage(artifact.doc.frontmatter);
|
|
55
|
+
for (const key of def.required_contracts) {
|
|
56
|
+
const missing = expected[key].filter((id) => !coverage[key].includes(id));
|
|
57
|
+
if (missing.length === 0) continue;
|
|
58
|
+
issues.push({
|
|
59
|
+
level: "error",
|
|
60
|
+
code: "missing_contract_coverage",
|
|
61
|
+
check: "tag_coverage",
|
|
62
|
+
artifactType: artifact.type,
|
|
63
|
+
file: artifact.file,
|
|
64
|
+
field: `frontmatter.contract_coverage.${key}`,
|
|
65
|
+
message: `Artifact is missing required contract IDs for ${key}: ${missing.join(", ")}`,
|
|
66
|
+
suggestion: "Regenerate artifact and include explicit contract tags such as [G1], [F2], [C1]."
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return issues;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function checkUpstreamReferences(loaded: LoadedArtifact[]): ValidationIssue[] {
|
|
75
|
+
const issues: ValidationIssue[] = [];
|
|
76
|
+
const filesByName = new Set(loaded.map((item) => path.normalize(item.file)));
|
|
77
|
+
|
|
78
|
+
for (const artifact of loaded) {
|
|
79
|
+
const upstream = artifact.doc.frontmatter.upstream_artifacts;
|
|
80
|
+
if (!Array.isArray(upstream)) continue;
|
|
81
|
+
|
|
82
|
+
for (const rawItem of upstream) {
|
|
83
|
+
if (typeof rawItem !== "string") continue;
|
|
84
|
+
const resolved = path.normalize(path.resolve(path.dirname(artifact.file), rawItem));
|
|
85
|
+
if (!filesByName.has(resolved)) {
|
|
86
|
+
issues.push({
|
|
87
|
+
level: "error",
|
|
88
|
+
code: "broken_upstream_reference",
|
|
89
|
+
check: "schema",
|
|
90
|
+
artifactType: artifact.type,
|
|
91
|
+
file: artifact.file,
|
|
92
|
+
field: "frontmatter.upstream_artifacts",
|
|
93
|
+
message: `Referenced upstream artifact not found: ${rawItem}`,
|
|
94
|
+
suggestion: "Regenerate this artifact or update upstream_artifacts paths to existing outputs."
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return issues;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function taggedLinesByContract(body: string): Array<{ contractId: string; line: string }> {
|
|
104
|
+
const lines = body
|
|
105
|
+
.split(/\r?\n/)
|
|
106
|
+
.map((line) => line.trim())
|
|
107
|
+
.filter((line) => line.length > 0);
|
|
108
|
+
const tagged: Array<{ contractId: string; line: string }> = [];
|
|
109
|
+
for (const line of lines) {
|
|
110
|
+
const matches = line.match(/\[([GFC][0-9]+)\]/g) ?? [];
|
|
111
|
+
for (const match of matches) {
|
|
112
|
+
tagged.push({ contractId: match.slice(1, -1), line });
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
return tagged;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
function parseJsonObject<T>(raw: string, fallback: T): T {
|
|
119
|
+
const trimmed = raw.trim();
|
|
120
|
+
const fenced = trimmed.match(/```(?:json)?\s*([\s\S]*?)```/i);
|
|
121
|
+
const candidate = fenced ? fenced[1] : trimmed;
|
|
122
|
+
try {
|
|
123
|
+
return JSON.parse(candidate) as T;
|
|
124
|
+
} catch {
|
|
125
|
+
return fallback;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
function hasEnglishLeak(body: string): boolean {
|
|
130
|
+
const markers = [" the ", " and ", " with ", " user ", " should ", " must "];
|
|
131
|
+
const normalized = ` ${body.toLowerCase().replace(/\s+/g, " ")} `;
|
|
132
|
+
return markers.filter((item) => normalized.includes(item)).length >= 2;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
function checkLanguageConsistency(loaded: LoadedArtifact[]): ValidationIssue[] {
|
|
136
|
+
const issues: ValidationIssue[] = [];
|
|
137
|
+
const languages = new Set<string>();
|
|
138
|
+
for (const artifact of loaded) {
|
|
139
|
+
const lang = String((artifact.doc.frontmatter.language ?? "")).toLowerCase();
|
|
140
|
+
if (lang) languages.add(lang);
|
|
141
|
+
if (lang.startsWith("tr") && hasEnglishLeak(artifact.doc.body)) {
|
|
142
|
+
issues.push({
|
|
143
|
+
level: "error",
|
|
144
|
+
code: "language_mixed_content",
|
|
145
|
+
check: "schema",
|
|
146
|
+
artifactType: artifact.type,
|
|
147
|
+
file: artifact.file,
|
|
148
|
+
message: "Artifact contains mixed language content while target language is Turkish.",
|
|
149
|
+
suggestion: "Regenerate artifact with strict Turkish output."
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
if (languages.size > 1) {
|
|
154
|
+
issues.push({
|
|
155
|
+
level: "error",
|
|
156
|
+
code: "language_inconsistent_across_artifacts",
|
|
157
|
+
check: "schema",
|
|
158
|
+
message: "Artifacts have inconsistent language settings.",
|
|
159
|
+
suggestion: "Regenerate artifacts so all frontmatter.language values match."
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
return issues;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async function checkContractRelevance(
|
|
166
|
+
loaded: LoadedArtifact[],
|
|
167
|
+
normalizedBrief: Record<string, unknown>
|
|
168
|
+
): Promise<ValidationIssue[]> {
|
|
169
|
+
const normalized = parseNormalizedBriefOrThrow(normalizedBrief);
|
|
170
|
+
const contractMap = new Map<string, string>();
|
|
171
|
+
for (const item of normalized.contracts.goals) contractMap.set(item.id, item.text);
|
|
172
|
+
for (const item of normalized.contracts.core_features) contractMap.set(item.id, item.text);
|
|
173
|
+
for (const item of normalized.contracts.constraints) contractMap.set(item.id, item.text);
|
|
174
|
+
|
|
175
|
+
const provider = createProvider();
|
|
176
|
+
const issues: ValidationIssue[] = [];
|
|
177
|
+
for (const artifact of loaded) {
|
|
178
|
+
const taggedLines = taggedLinesByContract(artifact.doc.body);
|
|
179
|
+
for (const tagged of taggedLines) {
|
|
180
|
+
const contractText = contractMap.get(tagged.contractId);
|
|
181
|
+
if (!contractText) {
|
|
182
|
+
issues.push({
|
|
183
|
+
level: "error",
|
|
184
|
+
code: "unknown_contract_tag",
|
|
185
|
+
check: "contract_relevance",
|
|
186
|
+
artifactType: artifact.type,
|
|
187
|
+
file: artifact.file,
|
|
188
|
+
field: tagged.contractId,
|
|
189
|
+
message: `Unknown contract tag used: ${tagged.contractId}`,
|
|
190
|
+
suggestion: "Use only contract IDs that exist in normalized brief contracts."
|
|
191
|
+
});
|
|
192
|
+
continue;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
const response = await provider.generate(
|
|
196
|
+
"Evaluate if tagged line semantically matches contract text.",
|
|
197
|
+
{
|
|
198
|
+
contract_id: tagged.contractId,
|
|
199
|
+
contract_text: contractText,
|
|
200
|
+
context_text: tagged.line
|
|
201
|
+
},
|
|
202
|
+
{
|
|
203
|
+
artifactType: "contract_relevance",
|
|
204
|
+
requiredHeadings: [],
|
|
205
|
+
requiredContracts: []
|
|
206
|
+
}
|
|
207
|
+
);
|
|
208
|
+
|
|
209
|
+
const verdict = parseJsonObject<{ relevant?: boolean; score?: number; reason?: string }>(response.body, {});
|
|
210
|
+
const relevant = Boolean(verdict.relevant);
|
|
211
|
+
if (!relevant) {
|
|
212
|
+
issues.push({
|
|
213
|
+
level: "error",
|
|
214
|
+
code: "irrelevant_contract_tag_usage",
|
|
215
|
+
check: "contract_relevance",
|
|
216
|
+
artifactType: artifact.type,
|
|
217
|
+
file: artifact.file,
|
|
218
|
+
field: tagged.contractId,
|
|
219
|
+
message: `Tag ${tagged.contractId} does not match nearby content semantically.`,
|
|
220
|
+
suggestion: verdict.reason ?? "Rewrite the tagged sentence so it clearly addresses the referenced contract."
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
return issues;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
async function checkSemanticPairs(loaded: LoadedArtifact[]): Promise<ValidationIssue[]> {
|
|
229
|
+
const byType = new Map<ArtifactType, LoadedArtifact>();
|
|
230
|
+
for (const artifact of loaded) byType.set(artifact.type, artifact);
|
|
231
|
+
|
|
232
|
+
const pairs: Array<[ArtifactType, ArtifactType]> = [
|
|
233
|
+
["prd", "stories"],
|
|
234
|
+
["workflow", "techspec"],
|
|
235
|
+
["workflow", "wireframe"]
|
|
236
|
+
];
|
|
237
|
+
const provider = createProvider();
|
|
238
|
+
const issues: ValidationIssue[] = [];
|
|
239
|
+
|
|
240
|
+
for (const [leftType, rightType] of pairs) {
|
|
241
|
+
const left = byType.get(leftType);
|
|
242
|
+
const right = byType.get(rightType);
|
|
243
|
+
if (!left || !right) continue;
|
|
244
|
+
|
|
245
|
+
const result = await provider.generate(
|
|
246
|
+
"Compare paired artifacts semantically and return contradictions.",
|
|
247
|
+
{
|
|
248
|
+
pair: {
|
|
249
|
+
left_type: leftType,
|
|
250
|
+
left_file: left.file,
|
|
251
|
+
left_coverage: readCoverage(left.doc.frontmatter),
|
|
252
|
+
left_body: left.doc.body,
|
|
253
|
+
right_type: rightType,
|
|
254
|
+
right_file: right.file,
|
|
255
|
+
right_coverage: readCoverage(right.doc.frontmatter),
|
|
256
|
+
right_body: right.doc.body
|
|
257
|
+
}
|
|
258
|
+
},
|
|
259
|
+
{
|
|
260
|
+
artifactType: "semantic_consistency",
|
|
261
|
+
requiredHeadings: [],
|
|
262
|
+
requiredContracts: []
|
|
263
|
+
}
|
|
264
|
+
);
|
|
265
|
+
|
|
266
|
+
const parsed = parseJsonObject<{ issues?: Array<Record<string, unknown>> }>(result.body, { issues: [] });
|
|
267
|
+
for (const item of parsed.issues ?? []) {
|
|
268
|
+
issues.push({
|
|
269
|
+
level: (item.level === "warning" ? "warning" : "error") as "error" | "warning",
|
|
270
|
+
code: typeof item.code === "string" ? item.code : "semantic_inconsistency",
|
|
271
|
+
check: "semantic_consistency",
|
|
272
|
+
file: typeof item.file === "string" ? item.file : left.file,
|
|
273
|
+
field: typeof item.contract_id === "string" ? item.contract_id : undefined,
|
|
274
|
+
message:
|
|
275
|
+
typeof item.message === "string"
|
|
276
|
+
? item.message
|
|
277
|
+
: `Semantic mismatch between ${leftType} and ${rightType}.`,
|
|
278
|
+
suggestion:
|
|
279
|
+
typeof item.suggestion === "string"
|
|
280
|
+
? item.suggestion
|
|
281
|
+
: `Align ${leftType} and ${rightType} decisions and regenerate.`
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
return issues;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
export async function checkConsistency(
|
|
290
|
+
cwd: string,
|
|
291
|
+
loadedArtifacts: LoadedArtifact[],
|
|
292
|
+
normalizedBrief: Record<string, unknown>
|
|
293
|
+
): Promise<ValidationIssue[]> {
|
|
294
|
+
const baseIssues = [
|
|
295
|
+
...(await checkMissingArtifacts(cwd, loadedArtifacts)),
|
|
296
|
+
...(await checkContractCoverage(cwd, loadedArtifacts, normalizedBrief)),
|
|
297
|
+
...checkUpstreamReferences(loadedArtifacts),
|
|
298
|
+
...checkLanguageConsistency(loadedArtifacts)
|
|
299
|
+
];
|
|
300
|
+
const relevanceIssues = await checkContractRelevance(loadedArtifacts, normalizedBrief);
|
|
301
|
+
const semanticIssues = await checkSemanticPairs(loadedArtifacts);
|
|
302
|
+
return [...baseIssues, ...relevanceIssues, ...semanticIssues];
|
|
303
|
+
}
|
package/src/constants.ts
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import type { ContractCoverage, CoreArtifactType } from "./types";
|
|
2
|
+
|
|
3
|
+
export const PRODO_DIR = ".prodo";
|
|
4
|
+
export const DEFAULT_STATUS = "draft";
|
|
5
|
+
|
|
6
|
+
const CORE_OUTPUT_DIR_BY_ARTIFACT: Record<CoreArtifactType, string> = {
|
|
7
|
+
prd: "prd",
|
|
8
|
+
workflow: "workflows",
|
|
9
|
+
wireframe: "wireframes",
|
|
10
|
+
stories: "stories",
|
|
11
|
+
techspec: "techspec"
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
const CORE_REQUIRED_HEADINGS: Record<CoreArtifactType, string[]> = {
|
|
15
|
+
prd: ["## Problem", "## Goals", "## Scope", "## Requirements"],
|
|
16
|
+
workflow: [
|
|
17
|
+
"## Flow Purpose",
|
|
18
|
+
"## Actors",
|
|
19
|
+
"## Preconditions",
|
|
20
|
+
"## Main Flow",
|
|
21
|
+
"## Edge Cases",
|
|
22
|
+
"## Postconditions"
|
|
23
|
+
],
|
|
24
|
+
wireframe: [
|
|
25
|
+
"## Screen Purpose",
|
|
26
|
+
"## Primary Actor",
|
|
27
|
+
"## Main Sections",
|
|
28
|
+
"## Fields/Inputs",
|
|
29
|
+
"## Actions/Buttons",
|
|
30
|
+
"## States/Messages",
|
|
31
|
+
"## Notes"
|
|
32
|
+
],
|
|
33
|
+
stories: ["## User Stories", "## Acceptance Criteria"],
|
|
34
|
+
techspec: ["## Architecture", "## Data Model", "## APIs", "## Risks"]
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const CORE_UPSTREAM_BY_ARTIFACT: Record<CoreArtifactType, CoreArtifactType[]> = {
|
|
38
|
+
prd: [],
|
|
39
|
+
workflow: ["prd"],
|
|
40
|
+
wireframe: ["prd", "workflow"],
|
|
41
|
+
stories: ["prd"],
|
|
42
|
+
techspec: ["prd", "stories"]
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
const CORE_REQUIRED_CONTRACTS_BY_ARTIFACT: Record<
|
|
46
|
+
CoreArtifactType,
|
|
47
|
+
Array<keyof ContractCoverage>
|
|
48
|
+
> = {
|
|
49
|
+
prd: ["goals", "core_features"],
|
|
50
|
+
workflow: ["core_features"],
|
|
51
|
+
wireframe: ["core_features"],
|
|
52
|
+
stories: ["core_features"],
|
|
53
|
+
techspec: ["core_features", "constraints"]
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
export function defaultOutputDir(artifactType: string): string {
|
|
57
|
+
return CORE_OUTPUT_DIR_BY_ARTIFACT[artifactType as CoreArtifactType] ?? `${artifactType}s`;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function defaultRequiredHeadings(artifactType: string): string[] {
|
|
61
|
+
return CORE_REQUIRED_HEADINGS[artifactType as CoreArtifactType] ?? ["## Summary", "## Details"];
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export function defaultUpstreamByArtifact(artifactType: string): string[] {
|
|
65
|
+
return CORE_UPSTREAM_BY_ARTIFACT[artifactType as CoreArtifactType] ?? [];
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function defaultRequiredContractsByArtifact(
|
|
69
|
+
artifactType: string
|
|
70
|
+
): Array<keyof ContractCoverage> {
|
|
71
|
+
return CORE_REQUIRED_CONTRACTS_BY_ARTIFACT[artifactType as CoreArtifactType] ?? ["core_features"];
|
|
72
|
+
}
|