@shahmarasy/prodo 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +157 -0
- package/bin/prodo.cjs +6 -0
- package/dist/agent-command-installer.d.ts +4 -0
- package/dist/agent-command-installer.js +158 -0
- package/dist/agents.d.ts +15 -0
- package/dist/agents.js +47 -0
- package/dist/artifact-registry.d.ts +11 -0
- package/dist/artifact-registry.js +49 -0
- package/dist/artifacts.d.ts +9 -0
- package/dist/artifacts.js +514 -0
- package/dist/cli.d.ts +9 -0
- package/dist/cli.js +305 -0
- package/dist/consistency.d.ts +8 -0
- package/dist/consistency.js +268 -0
- package/dist/constants.d.ts +7 -0
- package/dist/constants.js +64 -0
- package/dist/doctor.d.ts +1 -0
- package/dist/doctor.js +123 -0
- package/dist/errors.d.ts +3 -0
- package/dist/errors.js +10 -0
- package/dist/hook-executor.d.ts +1 -0
- package/dist/hook-executor.js +175 -0
- package/dist/init-tui.d.ts +21 -0
- package/dist/init-tui.js +161 -0
- package/dist/init.d.ts +10 -0
- package/dist/init.js +307 -0
- package/dist/markdown.d.ts +11 -0
- package/dist/markdown.js +66 -0
- package/dist/normalize.d.ts +7 -0
- package/dist/normalize.js +73 -0
- package/dist/normalized-brief.d.ts +39 -0
- package/dist/normalized-brief.js +170 -0
- package/dist/output-index.d.ts +13 -0
- package/dist/output-index.js +55 -0
- package/dist/paths.d.ts +16 -0
- package/dist/paths.js +76 -0
- package/dist/preset-loader.d.ts +4 -0
- package/dist/preset-loader.js +210 -0
- package/dist/project-config.d.ts +14 -0
- package/dist/project-config.js +69 -0
- package/dist/providers/index.d.ts +2 -0
- package/dist/providers/index.js +12 -0
- package/dist/providers/mock-provider.d.ts +7 -0
- package/dist/providers/mock-provider.js +168 -0
- package/dist/providers/openai-provider.d.ts +11 -0
- package/dist/providers/openai-provider.js +69 -0
- package/dist/registry.d.ts +13 -0
- package/dist/registry.js +115 -0
- package/dist/settings.d.ts +6 -0
- package/dist/settings.js +34 -0
- package/dist/template-resolver.d.ts +11 -0
- package/dist/template-resolver.js +28 -0
- package/dist/templates.d.ts +33 -0
- package/dist/templates.js +428 -0
- package/dist/types.d.ts +35 -0
- package/dist/types.js +5 -0
- package/dist/utils.d.ts +6 -0
- package/dist/utils.js +53 -0
- package/dist/validate.d.ts +9 -0
- package/dist/validate.js +226 -0
- package/dist/validator.d.ts +5 -0
- package/dist/validator.js +80 -0
- package/dist/version.d.ts +1 -0
- package/dist/version.js +30 -0
- package/dist/workflow-commands.d.ts +7 -0
- package/dist/workflow-commands.js +28 -0
- package/package.json +45 -0
- package/presets/fintech/preset.json +1 -0
- package/presets/fintech/prompts/prd.md +3 -0
- package/presets/marketplace/preset.json +1 -0
- package/presets/marketplace/prompts/prd.md +3 -0
- package/presets/saas/preset.json +1 -0
- package/presets/saas/prompts/prd.md +3 -0
- package/src/agent-command-installer.ts +174 -0
- package/src/agents.ts +56 -0
- package/src/artifact-registry.ts +69 -0
- package/src/artifacts.ts +606 -0
- package/src/cli.ts +322 -0
- package/src/consistency.ts +303 -0
- package/src/constants.ts +72 -0
- package/src/doctor.ts +137 -0
- package/src/errors.ts +7 -0
- package/src/hook-executor.ts +196 -0
- package/src/init-tui.ts +193 -0
- package/src/init.ts +375 -0
- package/src/markdown.ts +73 -0
- package/src/normalize.ts +89 -0
- package/src/normalized-brief.ts +206 -0
- package/src/output-index.ts +59 -0
- package/src/paths.ts +72 -0
- package/src/preset-loader.ts +237 -0
- package/src/project-config.ts +78 -0
- package/src/providers/index.ts +12 -0
- package/src/providers/mock-provider.ts +188 -0
- package/src/providers/openai-provider.ts +87 -0
- package/src/registry.ts +119 -0
- package/src/settings.ts +34 -0
- package/src/template-resolver.ts +33 -0
- package/src/templates.ts +440 -0
- package/src/types.ts +46 -0
- package/src/utils.ts +50 -0
- package/src/validate.ts +246 -0
- package/src/validator.ts +96 -0
- package/src/version.ts +24 -0
- package/src/workflow-commands.ts +31 -0
- package/templates/artifacts/prd.md +219 -0
- package/templates/artifacts/stories.md +49 -0
- package/templates/artifacts/techspec.md +42 -0
- package/templates/artifacts/wireframe.html +260 -0
- package/templates/artifacts/wireframe.md +22 -0
- package/templates/artifacts/workflow.md +22 -0
- package/templates/artifacts/workflow.mmd +6 -0
- package/templates/commands/prodo-normalize.md +24 -0
- package/templates/commands/prodo-prd.md +24 -0
- package/templates/commands/prodo-stories.md +24 -0
- package/templates/commands/prodo-techspec.md +24 -0
- package/templates/commands/prodo-validate.md +24 -0
- package/templates/commands/prodo-wireframe.md +24 -0
- package/templates/commands/prodo-workflow.md +24 -0
package/dist/validate.js
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.runValidate = runValidate;
|
|
7
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
8
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
9
|
+
const gray_matter_1 = __importDefault(require("gray-matter"));
|
|
10
|
+
const artifact_registry_1 = require("./artifact-registry");
|
|
11
|
+
const consistency_1 = require("./consistency");
|
|
12
|
+
const errors_1 = require("./errors");
|
|
13
|
+
const output_index_1 = require("./output-index");
|
|
14
|
+
const paths_1 = require("./paths");
|
|
15
|
+
const template_resolver_1 = require("./template-resolver");
|
|
16
|
+
const utils_1 = require("./utils");
|
|
17
|
+
const validator_1 = require("./validator");
|
|
18
|
+
function sidecarPath(filePath) {
|
|
19
|
+
const parsed = node_path_1.default.parse(filePath);
|
|
20
|
+
return node_path_1.default.join(parsed.dir, `${parsed.name}.artifact.json`);
|
|
21
|
+
}
|
|
22
|
+
async function loadArtifactDoc(filePath) {
|
|
23
|
+
const sidecar = sidecarPath(filePath);
|
|
24
|
+
if (await (0, utils_1.fileExists)(sidecar)) {
|
|
25
|
+
const payload = await (0, utils_1.readJsonFile)(sidecar);
|
|
26
|
+
return {
|
|
27
|
+
frontmatter: payload.frontmatter ?? {},
|
|
28
|
+
body: typeof payload.body === "string" ? payload.body : ""
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
const raw = await promises_1.default.readFile(filePath, "utf8");
|
|
32
|
+
const parsed = (0, gray_matter_1.default)(raw);
|
|
33
|
+
return {
|
|
34
|
+
frontmatter: parsed.data,
|
|
35
|
+
body: parsed.content
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
async function loadLatestArtifacts(cwd) {
|
|
39
|
+
const defs = await (0, artifact_registry_1.listArtifactDefinitions)(cwd);
|
|
40
|
+
const loaded = [];
|
|
41
|
+
for (const def of defs) {
|
|
42
|
+
const type = def.name;
|
|
43
|
+
const active = await (0, output_index_1.getActiveArtifactPath)(cwd, type);
|
|
44
|
+
const fallback = async () => {
|
|
45
|
+
const files = await (0, utils_1.listFilesSortedByMtime)((0, paths_1.outputDirPath)(cwd, type, def.output_dir));
|
|
46
|
+
return files[0];
|
|
47
|
+
};
|
|
48
|
+
const latest = active ?? (await fallback());
|
|
49
|
+
if (!latest)
|
|
50
|
+
continue;
|
|
51
|
+
const parsed = await loadArtifactDoc(latest);
|
|
52
|
+
loaded.push({
|
|
53
|
+
type,
|
|
54
|
+
file: latest,
|
|
55
|
+
doc: parsed
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
return loaded;
|
|
59
|
+
}
|
|
60
|
+
async function listHtmlFiles(dir) {
|
|
61
|
+
if (!(await (0, utils_1.fileExists)(dir)))
|
|
62
|
+
return [];
|
|
63
|
+
const entries = await promises_1.default.readdir(dir, { withFileTypes: true });
|
|
64
|
+
return entries
|
|
65
|
+
.filter((entry) => entry.isFile() && entry.name.toLowerCase().endsWith(".html"))
|
|
66
|
+
.map((entry) => node_path_1.default.join(dir, entry.name));
|
|
67
|
+
}
|
|
68
|
+
function formatIssue(issue) {
|
|
69
|
+
const location = issue.file ? ` (${issue.file})` : "";
|
|
70
|
+
const field = issue.field ? ` [${issue.field}]` : "";
|
|
71
|
+
const check = issue.check ? ` [${issue.check}]` : "";
|
|
72
|
+
const fix = issue.suggestion ? `\n Suggestion: ${issue.suggestion}` : "";
|
|
73
|
+
return `- [${issue.level.toUpperCase()}] ${issue.code}${check}${field}: ${issue.message}${location}${fix}`;
|
|
74
|
+
}
|
|
75
|
+
async function writeReport(targetPath, issues) {
|
|
76
|
+
const status = issues.some((issue) => issue.level === "error") ? "FAIL" : "PASS";
|
|
77
|
+
const schemaIssues = issues.filter((issue) => issue.check === "schema");
|
|
78
|
+
const coverageIssues = issues.filter((issue) => issue.check === "tag_coverage");
|
|
79
|
+
const relevanceIssues = issues.filter((issue) => issue.check === "contract_relevance");
|
|
80
|
+
const semanticIssues = issues.filter((issue) => issue.check === "semantic_consistency");
|
|
81
|
+
const gate = (set) => (set.some((issue) => issue.level === "error") ? "FAIL" : "PASS");
|
|
82
|
+
const content = [
|
|
83
|
+
"# Prodo Validation Report",
|
|
84
|
+
"",
|
|
85
|
+
`Status: **${status}**`,
|
|
86
|
+
`Generated at: ${new Date().toISOString()}`,
|
|
87
|
+
"",
|
|
88
|
+
"## Gate Results",
|
|
89
|
+
`- Schema pass: ${gate(schemaIssues)}`,
|
|
90
|
+
`- Tag coverage pass: ${gate(coverageIssues)}`,
|
|
91
|
+
`- Contract relevance pass: ${gate(relevanceIssues)}`,
|
|
92
|
+
`- Semantic consistency pass: ${gate(semanticIssues)}`,
|
|
93
|
+
"",
|
|
94
|
+
"## Findings",
|
|
95
|
+
issues.length === 0 ? "- No issues found." : issues.map(formatIssue).join("\n"),
|
|
96
|
+
""
|
|
97
|
+
].join("\n");
|
|
98
|
+
await (0, utils_1.ensureDir)(node_path_1.default.dirname(targetPath));
|
|
99
|
+
await promises_1.default.writeFile(targetPath, content, "utf8");
|
|
100
|
+
}
|
|
101
|
+
async function runValidate(cwd, options) {
|
|
102
|
+
const normalizedPath = (0, paths_1.normalizedBriefPath)(cwd);
|
|
103
|
+
if (!(await (0, utils_1.fileExists)(normalizedPath))) {
|
|
104
|
+
throw new errors_1.UserError("Missing `.prodo/briefs/normalized-brief.json`. Run `prodo-init` and create it first.");
|
|
105
|
+
}
|
|
106
|
+
const normalizedBrief = await (0, utils_1.readJsonFile)(normalizedPath);
|
|
107
|
+
const loaded = await loadLatestArtifacts(cwd);
|
|
108
|
+
const issues = [];
|
|
109
|
+
for (const artifact of loaded) {
|
|
110
|
+
const template = await (0, template_resolver_1.resolveTemplate)({
|
|
111
|
+
cwd,
|
|
112
|
+
artifactType: artifact.type
|
|
113
|
+
});
|
|
114
|
+
const headings = template ? (0, template_resolver_1.extractRequiredHeadingsFromTemplate)(template.content) : [];
|
|
115
|
+
const schemaCheck = await (0, validator_1.validateSchema)(cwd, artifact.type, artifact.doc, headings);
|
|
116
|
+
issues.push(...schemaCheck.issues.map((issue) => ({ ...issue, file: artifact.file })));
|
|
117
|
+
if (artifact.type === "workflow") {
|
|
118
|
+
const ext = node_path_1.default.extname(artifact.file).toLowerCase();
|
|
119
|
+
if (ext !== ".md") {
|
|
120
|
+
issues.push({
|
|
121
|
+
level: "error",
|
|
122
|
+
code: "workflow_markdown_missing",
|
|
123
|
+
check: "schema",
|
|
124
|
+
artifactType: artifact.type,
|
|
125
|
+
file: artifact.file,
|
|
126
|
+
message: "Workflow explanation artifact must be Markdown (.md).",
|
|
127
|
+
suggestion: "Regenerate workflow so explanation is written to .md."
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
const mmdPath = node_path_1.default.join(node_path_1.default.dirname(artifact.file), `${node_path_1.default.parse(artifact.file).name}.mmd`);
|
|
131
|
+
if (!(await (0, utils_1.fileExists)(mmdPath))) {
|
|
132
|
+
issues.push({
|
|
133
|
+
level: "error",
|
|
134
|
+
code: "workflow_mermaid_missing",
|
|
135
|
+
check: "schema",
|
|
136
|
+
artifactType: artifact.type,
|
|
137
|
+
file: artifact.file,
|
|
138
|
+
message: "Workflow Mermaid companion file (.mmd) is missing.",
|
|
139
|
+
suggestion: "Regenerate workflow so markdown and .mmd are produced as a pair."
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
const mmdRaw = await promises_1.default.readFile(mmdPath, "utf8");
|
|
144
|
+
const mermaidLike = /(^|\n)\s*flowchart\s+/i.test(mmdRaw) || /(^|\n)\s*graph\s+/i.test(mmdRaw);
|
|
145
|
+
if (!mermaidLike) {
|
|
146
|
+
issues.push({
|
|
147
|
+
level: "error",
|
|
148
|
+
code: "workflow_mermaid_invalid",
|
|
149
|
+
check: "schema",
|
|
150
|
+
artifactType: artifact.type,
|
|
151
|
+
file: mmdPath,
|
|
152
|
+
message: "Workflow Mermaid file is invalid or prose-only.",
|
|
153
|
+
suggestion: "Ensure .mmd file contains valid Mermaid diagram syntax."
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
if (artifact.type === "wireframe") {
|
|
159
|
+
const ext = node_path_1.default.extname(artifact.file).toLowerCase();
|
|
160
|
+
if (ext !== ".md") {
|
|
161
|
+
issues.push({
|
|
162
|
+
level: "error",
|
|
163
|
+
code: "wireframe_markdown_missing",
|
|
164
|
+
check: "schema",
|
|
165
|
+
artifactType: artifact.type,
|
|
166
|
+
file: artifact.file,
|
|
167
|
+
message: "Wireframe explanation artifact must be Markdown (.md).",
|
|
168
|
+
suggestion: "Regenerate wireframe so explanation is written to .md."
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
const htmlPath = node_path_1.default.join(node_path_1.default.dirname(artifact.file), `${node_path_1.default.parse(artifact.file).name}.html`);
|
|
172
|
+
if (!(await (0, utils_1.fileExists)(htmlPath))) {
|
|
173
|
+
issues.push({
|
|
174
|
+
level: "error",
|
|
175
|
+
code: "wireframe_html_missing",
|
|
176
|
+
check: "schema",
|
|
177
|
+
artifactType: artifact.type,
|
|
178
|
+
file: artifact.file,
|
|
179
|
+
message: "Wireframe HTML companion file is missing.",
|
|
180
|
+
suggestion: "Regenerate wireframe so markdown and .html are produced as a pair."
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
else {
|
|
184
|
+
const htmlRaw = await promises_1.default.readFile(htmlPath, "utf8");
|
|
185
|
+
const htmlLooksValid = /<!doctype html>/i.test(htmlRaw) || /<html[\s>]/i.test(htmlRaw);
|
|
186
|
+
if (!htmlLooksValid) {
|
|
187
|
+
issues.push({
|
|
188
|
+
level: "error",
|
|
189
|
+
code: "wireframe_html_invalid",
|
|
190
|
+
check: "schema",
|
|
191
|
+
artifactType: artifact.type,
|
|
192
|
+
file: htmlPath,
|
|
193
|
+
message: "Wireframe output is not valid HTML content.",
|
|
194
|
+
suggestion: "Ensure wireframe companion HTML contains a valid document structure."
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
const htmlFiles = await listHtmlFiles(node_path_1.default.dirname(artifact.file));
|
|
199
|
+
if (htmlFiles.length < 1) {
|
|
200
|
+
issues.push({
|
|
201
|
+
level: "error",
|
|
202
|
+
code: "wireframe_screens_missing",
|
|
203
|
+
check: "schema",
|
|
204
|
+
artifactType: artifact.type,
|
|
205
|
+
file: artifact.file,
|
|
206
|
+
message: "Wireframe must include at least one HTML screen artifact.",
|
|
207
|
+
suggestion: "Regenerate wireframe to create paired .md and .html screen files."
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
issues.push(...(await (0, consistency_1.checkConsistency)(cwd, loaded, normalizedBrief)));
|
|
213
|
+
if (options.strict) {
|
|
214
|
+
for (const issue of issues) {
|
|
215
|
+
if (issue.level === "warning")
|
|
216
|
+
issue.level = "error";
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
const finalReportPath = options.report ? node_path_1.default.resolve(cwd, options.report) : (0, paths_1.reportPath)(cwd);
|
|
220
|
+
if (!(0, utils_1.isPathInside)(node_path_1.default.join(cwd, "product-docs"), finalReportPath)) {
|
|
221
|
+
throw new errors_1.UserError("Validation report must be inside `product-docs/`.");
|
|
222
|
+
}
|
|
223
|
+
await writeReport(finalReportPath, issues);
|
|
224
|
+
const pass = !issues.some((issue) => issue.level === "error");
|
|
225
|
+
return { pass, reportPath: finalReportPath, issues };
|
|
226
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { ArtifactDoc, ArtifactType, ValidationIssue } from "./types";
|
|
2
|
+
export declare function validateSchema(cwd: string, artifactType: ArtifactType, doc: ArtifactDoc, requiredHeadingsOverride?: string[]): Promise<{
|
|
3
|
+
issues: ValidationIssue[];
|
|
4
|
+
requiredHeadings: string[];
|
|
5
|
+
}>;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.validateSchema = validateSchema;
|
|
7
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
8
|
+
const _2020_1 = __importDefault(require("ajv/dist/2020"));
|
|
9
|
+
const ajv_formats_1 = __importDefault(require("ajv-formats"));
|
|
10
|
+
const js_yaml_1 = __importDefault(require("js-yaml"));
|
|
11
|
+
const markdown_1 = require("./markdown");
|
|
12
|
+
const paths_1 = require("./paths");
|
|
13
|
+
const ajv = new _2020_1.default({ allErrors: true, strict: false });
|
|
14
|
+
(0, ajv_formats_1.default)(ajv);
|
|
15
|
+
async function resolveSchema(cwd, artifactType) {
|
|
16
|
+
const raw = await promises_1.default.readFile((0, paths_1.schemaPath)(cwd, artifactType), "utf8");
|
|
17
|
+
return js_yaml_1.default.load(raw);
|
|
18
|
+
}
|
|
19
|
+
function requiredHeadingsFromSchema(schema) {
|
|
20
|
+
if (!Array.isArray(schema.x_required_headings))
|
|
21
|
+
return [];
|
|
22
|
+
return schema.x_required_headings.filter((item) => typeof item === "string");
|
|
23
|
+
}
|
|
24
|
+
async function validateSchema(cwd, artifactType, doc, requiredHeadingsOverride) {
|
|
25
|
+
const schema = await resolveSchema(cwd, artifactType);
|
|
26
|
+
const requiredHeadings = requiredHeadingsOverride && requiredHeadingsOverride.length > 0
|
|
27
|
+
? requiredHeadingsOverride
|
|
28
|
+
: requiredHeadingsFromSchema(schema);
|
|
29
|
+
const workingSchema = { ...schema };
|
|
30
|
+
delete workingSchema.x_required_headings;
|
|
31
|
+
const validate = ajv.compile(workingSchema);
|
|
32
|
+
const valid = validate(doc);
|
|
33
|
+
const issues = [];
|
|
34
|
+
if (!valid && validate.errors) {
|
|
35
|
+
for (const err of validate.errors) {
|
|
36
|
+
issues.push({
|
|
37
|
+
level: "error",
|
|
38
|
+
code: "schema_validation_failed",
|
|
39
|
+
check: "schema",
|
|
40
|
+
artifactType,
|
|
41
|
+
field: err.instancePath || err.schemaPath,
|
|
42
|
+
message: `Schema validation error: ${err.message ?? "unknown error"}`,
|
|
43
|
+
suggestion: "Adjust the generated content to satisfy schema requirements."
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
const sections = (0, markdown_1.sectionTextMap)(doc.body);
|
|
48
|
+
const trMode = String(doc.frontmatter.language ?? "").toLowerCase().startsWith("tr");
|
|
49
|
+
if (trMode) {
|
|
50
|
+
return { issues, requiredHeadings: [] };
|
|
51
|
+
}
|
|
52
|
+
for (const heading of requiredHeadings) {
|
|
53
|
+
if (!doc.body.includes(heading)) {
|
|
54
|
+
issues.push({
|
|
55
|
+
level: "error",
|
|
56
|
+
code: "missing_required_heading",
|
|
57
|
+
check: "schema",
|
|
58
|
+
artifactType,
|
|
59
|
+
field: heading,
|
|
60
|
+
message: `Required section missing: ${heading}`,
|
|
61
|
+
suggestion: "Regenerate or manually edit the artifact to include all required headings."
|
|
62
|
+
});
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
const content = sections.get(heading) ?? "";
|
|
66
|
+
const isPlaceholder = /(tbd|to be defined|i don't know|unknown|n\/a)/i.test(content);
|
|
67
|
+
if (content.trim().length < 20 || isPlaceholder) {
|
|
68
|
+
issues.push({
|
|
69
|
+
level: "error",
|
|
70
|
+
code: "weak_required_heading_content",
|
|
71
|
+
check: "schema",
|
|
72
|
+
artifactType,
|
|
73
|
+
field: heading,
|
|
74
|
+
message: `Section has weak or placeholder content: ${heading}`,
|
|
75
|
+
suggestion: "Replace placeholders with concrete, actionable details."
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return { issues, requiredHeadings };
|
|
80
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function readCliVersion(cwd: string): Promise<string>;
|
package/dist/version.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.readCliVersion = readCliVersion;
|
|
7
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
8
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
9
|
+
const utils_1 = require("./utils");
|
|
10
|
+
async function readCliVersion(cwd) {
|
|
11
|
+
const candidates = [
|
|
12
|
+
node_path_1.default.join(cwd, "package.json"),
|
|
13
|
+
node_path_1.default.resolve(__dirname, "..", "package.json")
|
|
14
|
+
];
|
|
15
|
+
for (const candidate of candidates) {
|
|
16
|
+
if (!(await (0, utils_1.fileExists)(candidate)))
|
|
17
|
+
continue;
|
|
18
|
+
try {
|
|
19
|
+
const raw = await promises_1.default.readFile(candidate, "utf8");
|
|
20
|
+
const parsed = JSON.parse(raw);
|
|
21
|
+
if (typeof parsed.version === "string" && parsed.version.trim().length > 0) {
|
|
22
|
+
return parsed.version.trim();
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
// ignore and continue
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return "0.0.0";
|
|
30
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.WORKFLOW_COMMANDS = void 0;
|
|
4
|
+
exports.buildWorkflowCommands = buildWorkflowCommands;
|
|
5
|
+
const BASE_WORKFLOW_COMMANDS = [
|
|
6
|
+
{ name: "prodo-normalize", cliSubcommand: "normalize", description: "Normalize start brief into normalized brief JSON." },
|
|
7
|
+
{ name: "prodo-prd", cliSubcommand: "prd", description: "Generate PRD artifact from normalized brief." },
|
|
8
|
+
{ name: "prodo-workflow", cliSubcommand: "workflow", description: "Generate workflow artifact." },
|
|
9
|
+
{ name: "prodo-wireframe", cliSubcommand: "wireframe", description: "Generate wireframe artifact." },
|
|
10
|
+
{ name: "prodo-stories", cliSubcommand: "stories", description: "Generate stories artifact." },
|
|
11
|
+
{ name: "prodo-techspec", cliSubcommand: "techspec", description: "Generate technical specification artifact." },
|
|
12
|
+
{ name: "prodo-validate", cliSubcommand: "validate", description: "Run schema and cross-artifact consistency validation." }
|
|
13
|
+
];
|
|
14
|
+
exports.WORKFLOW_COMMANDS = BASE_WORKFLOW_COMMANDS;
|
|
15
|
+
function buildWorkflowCommands(artifactTypes) {
|
|
16
|
+
const commandByName = new Map(BASE_WORKFLOW_COMMANDS.map((item) => [item.name, item]));
|
|
17
|
+
for (const type of artifactTypes) {
|
|
18
|
+
const name = `prodo-${type}`;
|
|
19
|
+
if (commandByName.has(name))
|
|
20
|
+
continue;
|
|
21
|
+
commandByName.set(name, {
|
|
22
|
+
name,
|
|
23
|
+
cliSubcommand: type,
|
|
24
|
+
description: `Generate ${type} artifact from normalized brief.`
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
return Array.from(commandByName.values());
|
|
28
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@shahmarasy/prodo",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "CLI-first, prompt-powered product artifact kit",
|
|
5
|
+
"main": "dist/cli.js",
|
|
6
|
+
"types": "dist/cli.d.ts",
|
|
7
|
+
"bin": {
|
|
8
|
+
"prodo": "bin/prodo.cjs"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"bin/",
|
|
12
|
+
"dist/",
|
|
13
|
+
"templates/",
|
|
14
|
+
"presets/",
|
|
15
|
+
"src/",
|
|
16
|
+
"README.md"
|
|
17
|
+
],
|
|
18
|
+
"publishConfig": {
|
|
19
|
+
"access": "public"
|
|
20
|
+
},
|
|
21
|
+
"engines": {
|
|
22
|
+
"node": ">=20"
|
|
23
|
+
},
|
|
24
|
+
"scripts": {
|
|
25
|
+
"build": "tsc -p tsconfig.json",
|
|
26
|
+
"test": "npm run build && node --test tests/*.test.cjs",
|
|
27
|
+
"verify:release": "npm run build && node scripts/verify-release-build.cjs"
|
|
28
|
+
},
|
|
29
|
+
"keywords": [],
|
|
30
|
+
"author": "",
|
|
31
|
+
"license": "ISC",
|
|
32
|
+
"dependencies": {
|
|
33
|
+
"@clack/prompts": "^1.1.0",
|
|
34
|
+
"ajv": "^8.18.0",
|
|
35
|
+
"ajv-formats": "^3.0.1",
|
|
36
|
+
"commander": "^14.0.3",
|
|
37
|
+
"gray-matter": "^4.0.3",
|
|
38
|
+
"js-yaml": "^4.1.1"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@types/js-yaml": "^4.0.9",
|
|
42
|
+
"@types/node": "^25.5.0",
|
|
43
|
+
"typescript": "^5.9.3"
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"name":"fintech","priority":1,"min_prodo_version":"0.1.0","version":"1.0.0"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"name":"marketplace","priority":1,"min_prodo_version":"0.1.0","version":"1.0.0"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"name":"saas","priority":1,"min_prodo_version":"0.1.0","version":"1.0.0"}
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import yaml from "js-yaml";
|
|
4
|
+
import { UserError } from "./errors";
|
|
5
|
+
import { ensureDir, fileExists } from "./utils";
|
|
6
|
+
|
|
7
|
+
export const AI_ALIASES: Record<string, "codex" | "gemini-cli" | "claude-cli"> = {
|
|
8
|
+
codex: "codex",
|
|
9
|
+
gemini: "gemini-cli",
|
|
10
|
+
"gemmini-cli": "gemini-cli",
|
|
11
|
+
"gemmini": "gemini-cli",
|
|
12
|
+
"gemini-cli": "gemini-cli",
|
|
13
|
+
claude: "claude-cli",
|
|
14
|
+
"claude-cli": "claude-cli"
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export type SupportedAi = "codex" | "gemini-cli" | "claude-cli";
|
|
18
|
+
|
|
19
|
+
type AgentConfig = {
|
|
20
|
+
baseDir: string;
|
|
21
|
+
format: "markdown" | "toml" | "skill";
|
|
22
|
+
extension: string;
|
|
23
|
+
argsPlaceholder: string;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
const AGENT_CONFIG: Record<SupportedAi, AgentConfig> = {
|
|
27
|
+
"claude-cli": {
|
|
28
|
+
baseDir: ".claude/commands",
|
|
29
|
+
format: "markdown",
|
|
30
|
+
extension: ".md",
|
|
31
|
+
argsPlaceholder: "$ARGUMENTS"
|
|
32
|
+
},
|
|
33
|
+
"gemini-cli": {
|
|
34
|
+
baseDir: ".gemini/commands",
|
|
35
|
+
format: "toml",
|
|
36
|
+
extension: ".toml",
|
|
37
|
+
argsPlaceholder: "{{args}}"
|
|
38
|
+
},
|
|
39
|
+
codex: {
|
|
40
|
+
baseDir: ".agents/skills",
|
|
41
|
+
format: "skill",
|
|
42
|
+
extension: "/SKILL.md",
|
|
43
|
+
argsPlaceholder: "$ARGUMENTS"
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
type ParsedTemplate = {
|
|
48
|
+
frontmatter: Record<string, unknown>;
|
|
49
|
+
body: string;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
function parseFrontmatter(content: string): ParsedTemplate {
|
|
53
|
+
if (!content.startsWith("---")) return { frontmatter: {}, body: content };
|
|
54
|
+
const end = content.indexOf("\n---", 4);
|
|
55
|
+
if (end === -1) return { frontmatter: {}, body: content };
|
|
56
|
+
const fmRaw = content.slice(3, end).trim();
|
|
57
|
+
const body = content.slice(end + 4).trimStart();
|
|
58
|
+
const frontmatter = (yaml.load(fmRaw) as Record<string, unknown>) ?? {};
|
|
59
|
+
return { frontmatter, body };
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function renderFrontmatter(frontmatter: Record<string, unknown>): string {
|
|
63
|
+
if (Object.keys(frontmatter).length === 0) return "";
|
|
64
|
+
return `---\n${yaml.dump(frontmatter)}---\n`;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function resolveScriptBlock(frontmatter: Record<string, unknown>, argsPlaceholder: string): string {
|
|
68
|
+
const run = frontmatter.run as Record<string, unknown> | undefined;
|
|
69
|
+
const runAction = typeof run?.action === "string" ? run.action.trim() : "";
|
|
70
|
+
const runMode = typeof run?.mode === "string" ? run.mode.trim() : "";
|
|
71
|
+
if (runAction) {
|
|
72
|
+
const modeSuffix = runMode ? ` (${runMode})` : "";
|
|
73
|
+
return `- Internal action: ${runAction}${modeSuffix}`;
|
|
74
|
+
}
|
|
75
|
+
const runCommand = typeof run?.command === "string" ? run.command.replace("{ARGS}", argsPlaceholder) : "";
|
|
76
|
+
if (runCommand) {
|
|
77
|
+
return `- Command: ${runCommand}`;
|
|
78
|
+
}
|
|
79
|
+
const scripts = frontmatter.scripts as Record<string, unknown> | undefined;
|
|
80
|
+
const sh = typeof scripts?.sh === "string" ? scripts.sh.replace("{ARGS}", argsPlaceholder) : "";
|
|
81
|
+
const ps = typeof scripts?.ps === "string" ? scripts.ps.replace("{ARGS}", argsPlaceholder) : "";
|
|
82
|
+
return [`- Bash: ${sh}`, `- PowerShell: ${ps}`].filter((line) => line.length > 8).join("\n");
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function toTomlPrompt(body: string, frontmatter: Record<string, unknown>, argsPlaceholder: string): string {
|
|
86
|
+
const description = String(frontmatter.description ?? "Prodo command");
|
|
87
|
+
const scriptsBlock = resolveScriptBlock(frontmatter, argsPlaceholder);
|
|
88
|
+
const promptBody = body.replaceAll("$ARGUMENTS", argsPlaceholder);
|
|
89
|
+
return `description = "${description.replace(/"/g, '\\"')}"
|
|
90
|
+
|
|
91
|
+
prompt = """
|
|
92
|
+
${promptBody}
|
|
93
|
+
|
|
94
|
+
Script options:
|
|
95
|
+
${scriptsBlock}
|
|
96
|
+
"""`;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function toSkill(name: string, body: string, frontmatter: Record<string, unknown>): string {
|
|
100
|
+
const description = String(frontmatter.description ?? "Prodo workflow command");
|
|
101
|
+
return `---
|
|
102
|
+
name: ${name}
|
|
103
|
+
description: ${description}
|
|
104
|
+
compatibility: Requires Prodo project scaffold (.prodo)
|
|
105
|
+
metadata:
|
|
106
|
+
author: prodo
|
|
107
|
+
source: .prodo/commands/${name}.md
|
|
108
|
+
---
|
|
109
|
+
|
|
110
|
+
${body}`;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
export function resolveAi(ai?: string): SupportedAi | undefined {
|
|
114
|
+
if (!ai) return undefined;
|
|
115
|
+
const normalized = AI_ALIASES[ai.trim().toLowerCase()];
|
|
116
|
+
if (!normalized) {
|
|
117
|
+
throw new UserError("Unsupported --ai value. Use: codex | gemini-cli | claude-cli");
|
|
118
|
+
}
|
|
119
|
+
return normalized;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async function loadCommandTemplateNames(commandTemplatesDir: string): Promise<string[]> {
|
|
123
|
+
if (!(await fileExists(commandTemplatesDir))) {
|
|
124
|
+
throw new UserError(`Missing command templates directory: ${commandTemplatesDir}`);
|
|
125
|
+
}
|
|
126
|
+
const entries = await fs.readdir(commandTemplatesDir, { withFileTypes: true });
|
|
127
|
+
return entries
|
|
128
|
+
.filter((entry) => entry.isFile())
|
|
129
|
+
.map((entry) => entry.name)
|
|
130
|
+
.filter((name) => name.endsWith(".md") && name.startsWith("prodo-"))
|
|
131
|
+
.map((name) => name.replace(/\.md$/, ""))
|
|
132
|
+
.sort();
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
export async function installAgentCommands(projectRoot: string, ai: SupportedAi): Promise<string[]> {
|
|
136
|
+
const cfg = AGENT_CONFIG[ai];
|
|
137
|
+
const target = path.join(projectRoot, cfg.baseDir);
|
|
138
|
+
const commandTemplatesDir = path.join(projectRoot, ".prodo", "commands");
|
|
139
|
+
const commandNames = await loadCommandTemplateNames(commandTemplatesDir);
|
|
140
|
+
await ensureDir(target);
|
|
141
|
+
|
|
142
|
+
const written: string[] = [];
|
|
143
|
+
for (const commandName of commandNames) {
|
|
144
|
+
const templatePath = path.join(commandTemplatesDir, `${commandName}.md`);
|
|
145
|
+
if (!(await fileExists(templatePath))) {
|
|
146
|
+
throw new UserError(`Missing command template: ${templatePath}`);
|
|
147
|
+
}
|
|
148
|
+
const raw = await fs.readFile(templatePath, "utf8");
|
|
149
|
+
const parsed = parseFrontmatter(raw);
|
|
150
|
+
|
|
151
|
+
if (cfg.format === "skill") {
|
|
152
|
+
const skillDir = path.join(target, commandName);
|
|
153
|
+
await ensureDir(skillDir);
|
|
154
|
+
const outPath = path.join(skillDir, "SKILL.md");
|
|
155
|
+
const content = toSkill(commandName, parsed.body.replaceAll("$ARGUMENTS", cfg.argsPlaceholder), parsed.frontmatter);
|
|
156
|
+
await fs.writeFile(outPath, content, "utf8");
|
|
157
|
+
written.push(outPath);
|
|
158
|
+
continue;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (cfg.format === "toml") {
|
|
162
|
+
const outPath = path.join(target, `${commandName}${cfg.extension}`);
|
|
163
|
+
await fs.writeFile(outPath, toTomlPrompt(parsed.body, parsed.frontmatter, cfg.argsPlaceholder), "utf8");
|
|
164
|
+
written.push(outPath);
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const outPath = path.join(target, `${commandName}${cfg.extension}`);
|
|
169
|
+
const replacedBody = parsed.body.replaceAll("$ARGUMENTS", cfg.argsPlaceholder);
|
|
170
|
+
await fs.writeFile(outPath, `${renderFrontmatter(parsed.frontmatter)}\n${replacedBody}`, "utf8");
|
|
171
|
+
written.push(outPath);
|
|
172
|
+
}
|
|
173
|
+
return written;
|
|
174
|
+
}
|