sdd-cli 0.1.18 → 0.1.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +44 -0
  2. package/dist/cli.js +67 -3
  3. package/dist/commands/ai-exec.js +3 -2
  4. package/dist/commands/ai-status.js +2 -1
  5. package/dist/commands/doctor.d.ts +1 -1
  6. package/dist/commands/doctor.js +218 -10
  7. package/dist/commands/gen-architecture.js +6 -5
  8. package/dist/commands/gen-best-practices.js +6 -5
  9. package/dist/commands/gen-functional-spec.js +6 -5
  10. package/dist/commands/gen-project-readme.js +6 -5
  11. package/dist/commands/gen-technical-spec.js +6 -5
  12. package/dist/commands/gen-utils.js +2 -1
  13. package/dist/commands/hello.js +19 -8
  14. package/dist/commands/import-issue.d.ts +1 -0
  15. package/dist/commands/import-issue.js +53 -0
  16. package/dist/commands/import-jira.d.ts +1 -0
  17. package/dist/commands/import-jira.js +127 -0
  18. package/dist/commands/learn-deliver.js +6 -5
  19. package/dist/commands/learn-refine.js +8 -7
  20. package/dist/commands/learn-start.js +3 -2
  21. package/dist/commands/pr-audit.js +6 -5
  22. package/dist/commands/pr-bridge-check.d.ts +1 -0
  23. package/dist/commands/pr-bridge-check.js +88 -0
  24. package/dist/commands/pr-bridge.d.ts +1 -0
  25. package/dist/commands/pr-bridge.js +124 -0
  26. package/dist/commands/pr-finish.js +6 -5
  27. package/dist/commands/pr-report.js +6 -5
  28. package/dist/commands/pr-respond.js +7 -6
  29. package/dist/commands/pr-risk.d.ts +1 -0
  30. package/dist/commands/pr-risk.js +112 -0
  31. package/dist/commands/pr-start.js +4 -3
  32. package/dist/commands/req-archive.js +4 -3
  33. package/dist/commands/req-create.js +8 -7
  34. package/dist/commands/req-export.js +4 -3
  35. package/dist/commands/req-finish.js +9 -8
  36. package/dist/commands/req-lint.js +16 -6
  37. package/dist/commands/req-list.js +4 -3
  38. package/dist/commands/req-plan.js +12 -11
  39. package/dist/commands/req-refine.js +9 -8
  40. package/dist/commands/req-report.js +10 -9
  41. package/dist/commands/req-start.js +10 -9
  42. package/dist/commands/req-status.js +4 -3
  43. package/dist/commands/scope-list.d.ts +1 -0
  44. package/dist/commands/scope-list.js +15 -0
  45. package/dist/commands/scope-status.d.ts +1 -0
  46. package/dist/commands/scope-status.js +33 -0
  47. package/dist/commands/status.js +15 -6
  48. package/dist/commands/test-plan.js +6 -5
  49. package/dist/context/flags.d.ts +2 -0
  50. package/dist/context/flags.js +9 -1
  51. package/dist/errors.d.ts +2 -0
  52. package/dist/errors.js +10 -0
  53. package/dist/telemetry/local-metrics.d.ts +2 -0
  54. package/dist/telemetry/local-metrics.js +85 -0
  55. package/dist/workspace/index.d.ts +4 -0
  56. package/dist/workspace/index.js +129 -27
  57. package/package.json +24 -2
package/README.md CHANGED
@@ -157,11 +157,18 @@ Use `--questions` when you want the manual question-by-question flow.
157
157
  - `sdd-cli init` -- create SDD workspace and config
158
158
  - `sdd-cli list` -- list flows, router flows, templates, prompt packs, and projects
159
159
  - `sdd-cli status --next` -- show current project state and exact next command
160
+ - `sdd-cli scope list` -- list monorepo workspace scopes
161
+ - `sdd-cli scope status <scope>` -- show status summary for one scope
160
162
  - `sdd-cli doctor` -- validate completeness and consistency
163
+ - `sdd-cli doctor --fix` -- apply safe remediations for missing requirement ops files
161
164
 
162
165
  ### Router
163
166
  - `sdd-cli route` -- classify user intent and route to the right flow
164
167
 
168
+ ### Imports
169
+ - `sdd-cli import issue <github-issue-url>` -- import issue context and bootstrap autopilot
170
+ - `sdd-cli import jira <ticket-or-browse-url>` -- import Jira context and bootstrap autopilot
171
+
165
172
  ### Requirement lifecycle
166
173
  - `sdd-cli req create`
167
174
  - `sdd-cli req refine`
@@ -185,10 +192,22 @@ Use `--questions` when you want the manual question-by-question flow.
185
192
  - `sdd-cli learn refine`
186
193
  - `sdd-cli learn deliver`
187
194
 
195
+ ### PR review
196
+ - `sdd-cli pr start`
197
+ - `sdd-cli pr audit`
198
+ - `sdd-cli pr respond`
199
+ - `sdd-cli pr finish`
200
+ - `sdd-cli pr report`
201
+ - `sdd-cli pr bridge`
202
+ - `sdd-cli pr risk`
203
+ - `sdd-cli pr bridge-check`
204
+
188
205
  ### Flags
189
206
  - `--approve` -- run without extra confirmations
190
207
  - `--improve` -- re-open and enhance existing docs
191
208
  - `--output <path>` -- override workspace output
209
+ - `--scope <name>` -- isolate artifacts by monorepo scope namespace
210
+ - `--metrics-local` -- record local opt-in telemetry snapshots in `workspace/metrics`
192
211
  - `--project <name>` -- set project name
193
212
  - `--parallel` -- generate in parallel
194
213
  - `--questions` -- use manual question-driven discovery flow
@@ -232,6 +251,8 @@ For a full onboarding walkthrough, see:
232
251
 
233
252
  - Adoption execution tracker: `AGENTS.md`
234
253
  - 90-day roadmap: `docs/ADOPTION_ROADMAP_90D.md`
254
+ - Value backlog: `docs/VALUE_BACKLOG.md`
255
+ - Error codes and remediation guide: `docs/ERROR_CODES.md`
235
256
 
236
257
  ## Where files are stored (clean repos)
237
258
 
@@ -244,6 +265,29 @@ By default, the tool writes to a dedicated workspace, not into your repo:
244
265
  Optional:
245
266
  - `--output ./docs/sdd` to keep SDD next to the repo
246
267
  - `--output ../_sdd/<project>` for a separate shared directory
268
+ - `--scope apps-payments` to isolate workspaces for one monorepo domain
269
+
270
+ ## Release notes automation
271
+
272
+ - Generate notes from conventional commits:
273
+ `npm run release:notes`
274
+ - Write notes to `docs/releases/<version>.md`:
275
+ `npm run release:notes -- --write --version v0.1.20`
276
+ - Generate post-release quality summary:
277
+ `npm run release:metrics`
278
+ - Promote `Unreleased` changelog entries into a version:
279
+ `npm run release:changelog -- --version v0.1.20`
280
+ - Verify tag/version consistency:
281
+ `npm run verify:release-tag -- --tag v0.1.20`
282
+ - Verify npm publish bundle before publishing:
283
+ `npm run verify:publish`
284
+
285
+ ## Local metrics (opt-in)
286
+
287
+ - Enable local snapshots:
288
+ `sdd-cli --metrics-local hello "your intent"`
289
+ - View summary from current workspace root:
290
+ `npm run metrics:summary -- <workspace-path>`
247
291
 
248
292
  ## Lifecycle folders
249
293
 
package/dist/cli.js CHANGED
@@ -46,9 +46,12 @@ const route_1 = require("./commands/route");
46
46
  const doctor_1 = require("./commands/doctor");
47
47
  const quickstart_1 = require("./commands/quickstart");
48
48
  const status_1 = require("./commands/status");
49
+ const import_issue_1 = require("./commands/import-issue");
50
+ const import_jira_1 = require("./commands/import-jira");
49
51
  const paths_1 = require("./paths");
50
52
  const flags_1 = require("./context/flags");
51
53
  const prompt_1 = require("./ui/prompt");
54
+ const local_metrics_1 = require("./telemetry/local-metrics");
52
55
  const program = new commander_1.Command();
53
56
  function getVersion() {
54
57
  try {
@@ -72,7 +75,9 @@ program
72
75
  .option("--beginner", "Enable extra step-by-step guidance in hello flow")
73
76
  .option("--from-step <step>", "Resume or start autopilot from step: create|plan|start|test|finish")
74
77
  .option("--project <name>", "Select or name the project")
75
- .option("--output <path>", "Override workspace output root");
78
+ .option("--output <path>", "Override workspace output root")
79
+ .option("--scope <name>", "Target a monorepo scope namespace inside the workspace")
80
+ .option("--metrics-local", "Enable local opt-in telemetry snapshots in workspace/metrics");
76
81
  program.hook("preAction", (thisCommand, actionCommand) => {
77
82
  const opts = typeof actionCommand.optsWithGlobals === "function" ? actionCommand.optsWithGlobals() : thisCommand.opts();
78
83
  (0, flags_1.setFlags)({
@@ -84,8 +89,14 @@ program.hook("preAction", (thisCommand, actionCommand) => {
84
89
  beginner: Boolean(opts.beginner),
85
90
  fromStep: typeof opts.fromStep === "string" ? opts.fromStep : undefined,
86
91
  project: typeof opts.project === "string" ? opts.project : undefined,
87
- output: typeof opts.output === "string" ? opts.output : undefined
92
+ output: typeof opts.output === "string" ? opts.output : undefined,
93
+ scope: typeof opts.scope === "string" ? opts.scope : undefined,
94
+ metricsLocal: Boolean(opts.metricsLocal)
88
95
  });
96
+ const commandPath = typeof actionCommand.name === "function"
97
+ ? `${thisCommand.name()} ${actionCommand.name()}`.trim()
98
+ : thisCommand.name();
99
+ (0, local_metrics_1.recordCommandMetric)(commandPath);
89
100
  });
90
101
  program.hook("postAction", () => {
91
102
  (0, prompt_1.closePrompt)();
@@ -119,6 +130,22 @@ program
119
130
  .description("Show project requirement counts and next recommended command")
120
131
  .option("--next", "Print exact next command to run")
121
132
  .action((options) => (0, status_1.runStatus)(Boolean(options.next)));
133
+ const scopeCmd = program.command("scope").description("Monorepo scope workspace commands");
134
+ scopeCmd
135
+ .command("list")
136
+ .description("List known workspace scopes")
137
+ .action(async () => {
138
+ const { runScopeList } = await Promise.resolve().then(() => __importStar(require("./commands/scope-list")));
139
+ runScopeList();
140
+ });
141
+ scopeCmd
142
+ .command("status")
143
+ .description("Show project status summary for a scope")
144
+ .argument("[scope]", "Scope name")
145
+ .action(async (scope) => {
146
+ const { runScopeStatus } = await Promise.resolve().then(() => __importStar(require("./commands/scope-status")));
147
+ runScopeStatus(scope);
148
+ });
122
149
  const req = program.command("req").description("Requirement lifecycle commands");
123
150
  req
124
151
  .command("create")
@@ -234,6 +261,27 @@ pr
234
261
  const { runPrReport } = await Promise.resolve().then(() => __importStar(require("./commands/pr-report")));
235
262
  await runPrReport();
236
263
  });
264
+ pr
265
+ .command("bridge")
266
+ .description("Link PR review artifacts into a requirement")
267
+ .action(async () => {
268
+ const { runPrBridge } = await Promise.resolve().then(() => __importStar(require("./commands/pr-bridge")));
269
+ await runPrBridge();
270
+ });
271
+ pr
272
+ .command("risk")
273
+ .description("Generate PR risk severity rollup and unresolved summary")
274
+ .action(async () => {
275
+ const { runPrRisk } = await Promise.resolve().then(() => __importStar(require("./commands/pr-risk")));
276
+ await runPrRisk();
277
+ });
278
+ pr
279
+ .command("bridge-check")
280
+ .description("Validate PR bridge integrity for a requirement")
281
+ .action(async () => {
282
+ const { runPrBridgeCheck } = await Promise.resolve().then(() => __importStar(require("./commands/pr-bridge-check")));
283
+ await runPrBridgeCheck();
284
+ });
237
285
  const test = program.command("test").description("Test planning commands");
238
286
  test
239
287
  .command("plan")
@@ -317,7 +365,8 @@ program
317
365
  .description("Validate workspace artifacts and schemas")
318
366
  .argument("[project]", "Optional project name to validate")
319
367
  .argument("[requirementId]", "Optional requirement ID to validate")
320
- .action((project, requirementId) => (0, doctor_1.runDoctor)(project, requirementId));
368
+ .option("--fix", "Apply safe remediations (missing changelog/progress-log)")
369
+ .action((project, requirementId, options) => (0, doctor_1.runDoctor)(project, requirementId, Boolean(options.fix)));
321
370
  const ai = program.command("ai").description("Codex provider commands");
322
371
  ai
323
372
  .command("status")
@@ -334,4 +383,19 @@ ai
334
383
  const { runAiExec } = await Promise.resolve().then(() => __importStar(require("./commands/ai-exec")));
335
384
  await runAiExec(prompt.join(" ").trim());
336
385
  });
386
+ const importCmd = program.command("import").description("Import external work items into SDD flow");
387
+ importCmd
388
+ .command("issue")
389
+ .description("Import a GitHub issue URL and bootstrap autopilot")
390
+ .argument("<url>", "GitHub issue URL")
391
+ .action(async (url) => {
392
+ await (0, import_issue_1.runImportIssue)(url);
393
+ });
394
+ importCmd
395
+ .command("jira")
396
+ .description("Import a Jira ticket and bootstrap autopilot")
397
+ .argument("<ticket>", "Jira ticket key or browse URL")
398
+ .action(async (ticket) => {
399
+ await (0, import_jira_1.runImportJira)(ticket);
400
+ });
337
401
  program.parse(process.argv);
@@ -3,15 +3,16 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.runAiExec = runAiExec;
4
4
  const codex_1 = require("../providers/codex");
5
5
  const prompt_1 = require("../ui/prompt");
6
+ const errors_1 = require("../errors");
6
7
  async function runAiExec(promptArg) {
7
8
  const prompt = promptArg || (await (0, prompt_1.ask)("Prompt: "));
8
9
  if (!prompt) {
9
- console.log("Prompt is required.");
10
+ (0, errors_1.printError)("SDD-1501", "Prompt is required.");
10
11
  return;
11
12
  }
12
13
  const result = (0, codex_1.codexExec)(prompt);
13
14
  if (!result.ok) {
14
- console.log(`Codex error: ${result.error}`);
15
+ (0, errors_1.printError)("SDD-1502", `Codex error: ${result.error}`);
15
16
  return;
16
17
  }
17
18
  console.log(result.output);
@@ -2,10 +2,11 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.runAiStatus = runAiStatus;
4
4
  const codex_1 = require("../providers/codex");
5
+ const errors_1 = require("../errors");
5
6
  function runAiStatus() {
6
7
  const result = (0, codex_1.codexVersion)();
7
8
  if (!result.ok) {
8
- console.log(`Codex not available: ${result.error}`);
9
+ (0, errors_1.printError)("SDD-1503", `Codex not available: ${result.error}`);
9
10
  return;
10
11
  }
11
12
  console.log(`Codex available: ${result.output}`);
@@ -1 +1 @@
1
- export declare function runDoctor(projectName?: string, reqId?: string): void;
1
+ export declare function runDoctor(projectName?: string, reqId?: string, autoFix?: boolean): void;
@@ -44,15 +44,177 @@ function inferSchema(filePath) {
44
44
  return "project-readme.schema.json";
45
45
  return null;
46
46
  }
47
- function runDoctor(projectName, reqId) {
47
+ function printError(code, message) {
48
+ console.log(`[${code}] ${message}`);
49
+ }
50
+ function ensureOpsFiles(requirementDir) {
51
+ const fixed = [];
52
+ const changelog = path_1.default.join(requirementDir, "changelog.md");
53
+ const progressLog = path_1.default.join(requirementDir, "progress-log.md");
54
+ if (!fs_1.default.existsSync(changelog)) {
55
+ fs_1.default.writeFileSync(changelog, "# Changelog\n\n", "utf-8");
56
+ fixed.push(changelog);
57
+ }
58
+ if (!fs_1.default.existsSync(progressLog)) {
59
+ fs_1.default.writeFileSync(progressLog, "# Progress Log\n\n", "utf-8");
60
+ fixed.push(progressLog);
61
+ }
62
+ return fixed;
63
+ }
64
+ function ensureRequirementsLayout(projectRoot) {
65
+ const fixed = [];
66
+ const statuses = ["backlog", "wip", "in-progress", "done", "archived"];
67
+ const base = path_1.default.join(projectRoot, "requirements");
68
+ fs_1.default.mkdirSync(base, { recursive: true });
69
+ for (const status of statuses) {
70
+ const dir = path_1.default.join(base, status);
71
+ if (!fs_1.default.existsSync(dir)) {
72
+ fs_1.default.mkdirSync(dir, { recursive: true });
73
+ fixed.push(dir);
74
+ }
75
+ }
76
+ return fixed;
77
+ }
78
+ function inferRequirementStatus(requirementDir) {
79
+ const parts = requirementDir.split(path_1.default.sep);
80
+ const reqIdx = parts.lastIndexOf("requirements");
81
+ if (reqIdx >= 0 && parts[reqIdx + 1]) {
82
+ return parts[reqIdx + 1];
83
+ }
84
+ return "backlog";
85
+ }
86
+ function buildJsonSkeleton(fileName, reqId, status) {
87
+ if (fileName === "requirement.json") {
88
+ return {
89
+ id: reqId,
90
+ title: reqId,
91
+ objective: "Autofixed requirement placeholder",
92
+ status,
93
+ actors: ["user"],
94
+ scope: { in: ["core workflow"], out: ["to refine"] },
95
+ acceptanceCriteria: ["placeholder acceptance criteria"],
96
+ nfrs: {
97
+ security: "baseline",
98
+ performance: "baseline",
99
+ availability: "baseline"
100
+ },
101
+ constraints: [],
102
+ risks: [],
103
+ links: [],
104
+ updatedAt: new Date().toISOString()
105
+ };
106
+ }
107
+ if (fileName === "functional-spec.json") {
108
+ return {
109
+ overview: "autofixed functional overview",
110
+ actors: ["user"],
111
+ useCases: ["placeholder use case"],
112
+ flows: ["placeholder flow"],
113
+ rules: ["placeholder rule"],
114
+ errors: ["placeholder error"],
115
+ acceptanceCriteria: ["placeholder acceptance"]
116
+ };
117
+ }
118
+ if (fileName === "technical-spec.json") {
119
+ return {
120
+ stack: ["node"],
121
+ interfaces: ["cli"],
122
+ dataModel: ["requirement json"],
123
+ security: ["baseline"],
124
+ errors: ["handled"],
125
+ performance: ["baseline"],
126
+ observability: ["logs"]
127
+ };
128
+ }
129
+ if (fileName === "architecture.json") {
130
+ return {
131
+ context: "autofixed architecture context",
132
+ containers: ["cli runtime"],
133
+ components: ["commands"],
134
+ deployment: ["local"],
135
+ diagrams: ["context.mmd"]
136
+ };
137
+ }
138
+ if (fileName === "test-plan.json") {
139
+ return {
140
+ criticalPaths: ["placeholder path"],
141
+ edgeCases: ["placeholder edge case"],
142
+ coverageTarget: "80%",
143
+ acceptanceTests: ["placeholder acceptance test"],
144
+ regressions: ["placeholder regression"]
145
+ };
146
+ }
147
+ if (fileName === "quality.json") {
148
+ return {
149
+ rules: ["single-responsibility"],
150
+ thresholds: { coverage: "80%", complexity: "10" },
151
+ profiles: {}
152
+ };
153
+ }
154
+ return {};
155
+ }
156
+ function expectedJsonByStatus(status) {
157
+ if (status === "wip") {
158
+ return ["requirement.json", "functional-spec.json", "technical-spec.json", "architecture.json", "test-plan.json"];
159
+ }
160
+ if (status === "in-progress" || status === "done") {
161
+ return [
162
+ "requirement.json",
163
+ "functional-spec.json",
164
+ "technical-spec.json",
165
+ "architecture.json",
166
+ "test-plan.json",
167
+ "quality.json"
168
+ ];
169
+ }
170
+ return ["requirement.json"];
171
+ }
172
+ function ensureJsonSkeletons(requirementDir) {
173
+ const fixed = [];
174
+ const status = inferRequirementStatus(requirementDir);
175
+ const reqId = path_1.default.basename(requirementDir);
176
+ for (const fileName of expectedJsonByStatus(status)) {
177
+ const filePath = path_1.default.join(requirementDir, fileName);
178
+ if (fs_1.default.existsSync(filePath)) {
179
+ continue;
180
+ }
181
+ const payload = buildJsonSkeleton(fileName, reqId, status);
182
+ fs_1.default.writeFileSync(filePath, JSON.stringify(payload, null, 2), "utf-8");
183
+ fixed.push(filePath);
184
+ }
185
+ return fixed;
186
+ }
187
+ function collectRequirementDirs(root) {
188
+ const base = path_1.default.join(root, "requirements");
189
+ const statuses = ["backlog", "wip", "in-progress", "done", "archived"];
190
+ const dirs = [];
191
+ for (const status of statuses) {
192
+ const statusDir = path_1.default.join(base, status);
193
+ if (!fs_1.default.existsSync(statusDir)) {
194
+ continue;
195
+ }
196
+ const entries = fs_1.default.readdirSync(statusDir, { withFileTypes: true });
197
+ for (const entry of entries) {
198
+ if (entry.isDirectory()) {
199
+ dirs.push(path_1.default.join(statusDir, entry.name));
200
+ }
201
+ }
202
+ }
203
+ return dirs;
204
+ }
205
+ function runDoctor(projectName, reqId, autoFix) {
48
206
  const workspace = (0, index_1.getWorkspaceInfo)();
207
+ (0, index_1.ensureWorkspace)(workspace);
49
208
  let root = workspace.root;
209
+ let projectRootForFix = root;
50
210
  if (projectName) {
51
211
  try {
52
212
  root = (0, index_1.getProjectInfo)(workspace, projectName).root;
213
+ projectRootForFix = root;
53
214
  }
54
215
  catch (error) {
55
- console.log(error.message);
216
+ printError("SDD-2001", error.message);
217
+ process.exitCode = 1;
56
218
  return;
57
219
  }
58
220
  }
@@ -67,18 +229,44 @@ function runDoctor(projectName, reqId) {
67
229
  ];
68
230
  root = candidates.find((candidate) => fs_1.default.existsSync(candidate)) ?? root;
69
231
  }
232
+ const rootForFixReport = projectRootForFix;
70
233
  let failures = 0;
234
+ let fixes = 0;
235
+ const fixEntries = [];
71
236
  const promptResult = (0, validate_prompt_packs_1.validatePromptPacks)();
72
237
  if (!promptResult.valid) {
73
238
  failures += promptResult.errors.length;
74
- console.log("Prompt pack validation failed:");
75
- promptResult.errors.forEach((error) => console.log(`- ${error}`));
239
+ printError("SDD-2002", "Prompt pack validation failed:");
240
+ promptResult.errors.forEach((error) => printError("SDD-2002", error));
76
241
  }
77
242
  const templateResult = (0, validate_2.validateTemplates)();
78
243
  if (!templateResult.valid) {
79
244
  failures += templateResult.errors.length;
80
- console.log("Template validation failed:");
81
- templateResult.errors.forEach((error) => console.log(`- ${error}`));
245
+ printError("SDD-2003", "Template validation failed:");
246
+ templateResult.errors.forEach((error) => printError("SDD-2003", error));
247
+ }
248
+ if (autoFix) {
249
+ const layoutFixed = ensureRequirementsLayout(projectRootForFix);
250
+ fixes += layoutFixed.length;
251
+ layoutFixed.forEach((dir) => {
252
+ fixEntries.push(dir);
253
+ console.log(`[SDD-2009] Fixed: ${dir}`);
254
+ });
255
+ const requirementDirs = reqId ? [root] : collectRequirementDirs(root);
256
+ for (const dir of requirementDirs) {
257
+ const fixed = ensureOpsFiles(dir);
258
+ fixes += fixed.length;
259
+ fixed.forEach((filePath) => {
260
+ fixEntries.push(filePath);
261
+ console.log(`[SDD-2004] Fixed: ${filePath}`);
262
+ });
263
+ const jsonFixed = ensureJsonSkeletons(dir);
264
+ fixes += jsonFixed.length;
265
+ jsonFixed.forEach((filePath) => {
266
+ fixEntries.push(filePath);
267
+ console.log(`[SDD-2008] Fixed: ${filePath}`);
268
+ });
269
+ }
82
270
  }
83
271
  const jsonFiles = collectJsonFiles(root);
84
272
  if (jsonFiles.length === 0) {
@@ -89,17 +277,36 @@ function runDoctor(projectName, reqId) {
89
277
  if (!schema) {
90
278
  continue;
91
279
  }
92
- const data = JSON.parse(fs_1.default.readFileSync(filePath, "utf-8"));
280
+ let data;
281
+ try {
282
+ data = JSON.parse(fs_1.default.readFileSync(filePath, "utf-8"));
283
+ }
284
+ catch (error) {
285
+ failures += 1;
286
+ printError("SDD-2005", `Invalid JSON: ${filePath}`);
287
+ printError("SDD-2005", error.message);
288
+ continue;
289
+ }
93
290
  const result = (0, validate_1.validateJson)(schema, data);
94
291
  if (!result.valid) {
95
292
  failures += 1;
96
- console.log(`Invalid: ${filePath}`);
97
- result.errors.forEach((error) => console.log(`- ${error}`));
293
+ printError("SDD-2006", `Invalid: ${filePath}`);
294
+ result.errors.forEach((error) => printError("SDD-2006", error));
98
295
  }
99
296
  else {
100
297
  console.log(`Valid: ${filePath}`);
101
298
  }
102
299
  }
300
+ if (fixes > 0) {
301
+ console.log(`[SDD-2004] Applied fixes: ${fixes}`);
302
+ const report = {
303
+ generatedAt: new Date().toISOString(),
304
+ root: rootForFixReport,
305
+ fixes,
306
+ entries: fixEntries
307
+ };
308
+ fs_1.default.writeFileSync(path_1.default.join(rootForFixReport, "doctor-fix-report.json"), JSON.stringify(report, null, 2), "utf-8");
309
+ }
103
310
  if (failures === 0 && jsonFiles.length > 0) {
104
311
  console.log("All JSON artifacts are valid.");
105
312
  }
@@ -107,6 +314,7 @@ function runDoctor(projectName, reqId) {
107
314
  console.log("Prompt packs and templates are valid.");
108
315
  }
109
316
  else {
110
- console.log(`Validation failed for ${failures} artifact(s).`);
317
+ printError("SDD-2007", `Validation failed for ${failures} artifact(s).`);
318
+ process.exitCode = 1;
111
319
  }
112
320
  }
@@ -13,11 +13,12 @@ const validate_1 = require("../validation/validate");
13
13
  const gen_utils_1 = require("./gen-utils");
14
14
  const flags_1 = require("../context/flags");
15
15
  const index_1 = require("../workspace/index");
16
+ const errors_1 = require("../errors");
16
17
  async function runGenArchitecture() {
17
18
  const projectName = await (0, prompt_1.askProjectName)();
18
19
  const reqId = await (0, prompt_1.ask)("Requirement ID (REQ-...): ");
19
20
  if (!projectName || !reqId) {
20
- console.log("Project name and requirement ID are required.");
21
+ (0, errors_1.printError)("SDD-1631", "Project name and requirement ID are required.");
21
22
  return;
22
23
  }
23
24
  const workspace = (0, index_1.getWorkspaceInfo)();
@@ -26,12 +27,12 @@ async function runGenArchitecture() {
26
27
  project = (0, index_1.getProjectInfo)(workspace, projectName);
27
28
  }
28
29
  catch (error) {
29
- console.log(error.message);
30
+ (0, errors_1.printError)("SDD-1632", error.message);
30
31
  return;
31
32
  }
32
33
  const requirementDir = (0, gen_utils_1.findRequirementDir)(project.name, reqId);
33
34
  if (!requirementDir) {
34
- console.log("Requirement not found.");
35
+ (0, errors_1.printError)("SDD-1633", "Requirement not found.");
35
36
  return;
36
37
  }
37
38
  const context = await (0, prompt_1.ask)("Architecture context: ");
@@ -50,8 +51,8 @@ async function runGenArchitecture() {
50
51
  };
51
52
  const validation = (0, validate_1.validateJson)("architecture.schema.json", architectureJson);
52
53
  if (!validation.valid) {
53
- console.log("Architecture validation failed:");
54
- validation.errors.forEach((error) => console.log(`- ${error}`));
54
+ (0, errors_1.printError)("SDD-1634", "Architecture validation failed.");
55
+ validation.errors.forEach((error) => (0, errors_1.printError)("SDD-1634", error));
55
56
  return;
56
57
  }
57
58
  const template = (0, render_1.loadTemplate)("architecture");
@@ -11,6 +11,7 @@ const validate_1 = require("../validation/validate");
11
11
  const gen_utils_1 = require("./gen-utils");
12
12
  const flags_1 = require("../context/flags");
13
13
  const index_1 = require("../workspace/index");
14
+ const errors_1 = require("../errors");
14
15
  function renderQualityYaml(rules, coverage, complexity) {
15
16
  const ruleLines = rules.length > 0 ? rules.map((rule) => ` - ${rule}`).join("\n") : " - N/A";
16
17
  return [
@@ -27,7 +28,7 @@ async function runGenBestPractices() {
27
28
  const projectName = await (0, prompt_1.askProjectName)();
28
29
  const reqId = await (0, prompt_1.ask)("Requirement ID (REQ-...): ");
29
30
  if (!projectName || !reqId) {
30
- console.log("Project name and requirement ID are required.");
31
+ (0, errors_1.printError)("SDD-1641", "Project name and requirement ID are required.");
31
32
  return;
32
33
  }
33
34
  const workspace = (0, index_1.getWorkspaceInfo)();
@@ -36,12 +37,12 @@ async function runGenBestPractices() {
36
37
  project = (0, index_1.getProjectInfo)(workspace, projectName);
37
38
  }
38
39
  catch (error) {
39
- console.log(error.message);
40
+ (0, errors_1.printError)("SDD-1642", error.message);
40
41
  return;
41
42
  }
42
43
  const requirementDir = (0, gen_utils_1.findRequirementDir)(project.name, reqId);
43
44
  if (!requirementDir) {
44
- console.log("Requirement not found.");
45
+ (0, errors_1.printError)("SDD-1643", "Requirement not found.");
45
46
  return;
46
47
  }
47
48
  const rules = await (0, prompt_1.ask)("Quality rules - comma separated: ");
@@ -61,8 +62,8 @@ async function runGenBestPractices() {
61
62
  };
62
63
  const validation = (0, validate_1.validateJson)("quality.schema.json", qualityJson);
63
64
  if (!validation.valid) {
64
- console.log("Quality validation failed:");
65
- validation.errors.forEach((error) => console.log(`- ${error}`));
65
+ (0, errors_1.printError)("SDD-1644", "Quality validation failed.");
66
+ validation.errors.forEach((error) => (0, errors_1.printError)("SDD-1644", error));
66
67
  return;
67
68
  }
68
69
  const qualityYaml = renderQualityYaml(qualityJson.rules, qualityJson.thresholds.coverage, qualityJson.thresholds.complexity);
@@ -13,11 +13,12 @@ const validate_1 = require("../validation/validate");
13
13
  const gen_utils_1 = require("./gen-utils");
14
14
  const flags_1 = require("../context/flags");
15
15
  const index_1 = require("../workspace/index");
16
+ const errors_1 = require("../errors");
16
17
  async function runGenFunctionalSpec() {
17
18
  const projectName = await (0, prompt_1.askProjectName)();
18
19
  const reqId = await (0, prompt_1.ask)("Requirement ID (REQ-...): ");
19
20
  if (!projectName || !reqId) {
20
- console.log("Project name and requirement ID are required.");
21
+ (0, errors_1.printError)("SDD-1611", "Project name and requirement ID are required.");
21
22
  return;
22
23
  }
23
24
  const workspace = (0, index_1.getWorkspaceInfo)();
@@ -26,12 +27,12 @@ async function runGenFunctionalSpec() {
26
27
  project = (0, index_1.getProjectInfo)(workspace, projectName);
27
28
  }
28
29
  catch (error) {
29
- console.log(error.message);
30
+ (0, errors_1.printError)("SDD-1612", error.message);
30
31
  return;
31
32
  }
32
33
  const requirementDir = (0, gen_utils_1.findRequirementDir)(project.name, reqId);
33
34
  if (!requirementDir) {
34
- console.log("Requirement not found.");
35
+ (0, errors_1.printError)("SDD-1613", "Requirement not found.");
35
36
  return;
36
37
  }
37
38
  const overview = await (0, prompt_1.ask)("Functional overview: ");
@@ -54,8 +55,8 @@ async function runGenFunctionalSpec() {
54
55
  };
55
56
  const validation = (0, validate_1.validateJson)("functional-spec.schema.json", functionalJson);
56
57
  if (!validation.valid) {
57
- console.log("Functional spec validation failed:");
58
- validation.errors.forEach((error) => console.log(`- ${error}`));
58
+ (0, errors_1.printError)("SDD-1614", "Functional spec validation failed.");
59
+ validation.errors.forEach((error) => (0, errors_1.printError)("SDD-1614", error));
59
60
  return;
60
61
  }
61
62
  const template = (0, render_1.loadTemplate)("functional-spec");