@mcoda/core 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. package/CHANGELOG.md +7 -0
  2. package/LICENSE +21 -0
  3. package/README.md +9 -0
  4. package/dist/api/AgentsApi.d.ts +36 -0
  5. package/dist/api/AgentsApi.d.ts.map +1 -0
  6. package/dist/api/AgentsApi.js +176 -0
  7. package/dist/api/QaTasksApi.d.ts +8 -0
  8. package/dist/api/QaTasksApi.d.ts.map +1 -0
  9. package/dist/api/QaTasksApi.js +36 -0
  10. package/dist/api/TasksApi.d.ts +7 -0
  11. package/dist/api/TasksApi.d.ts.map +1 -0
  12. package/dist/api/TasksApi.js +34 -0
  13. package/dist/config/ConfigService.d.ts +3 -0
  14. package/dist/config/ConfigService.d.ts.map +1 -0
  15. package/dist/config/ConfigService.js +2 -0
  16. package/dist/domain/dependencies/Dependency.d.ts +3 -0
  17. package/dist/domain/dependencies/Dependency.d.ts.map +1 -0
  18. package/dist/domain/dependencies/Dependency.js +2 -0
  19. package/dist/domain/epics/Epic.d.ts +3 -0
  20. package/dist/domain/epics/Epic.d.ts.map +1 -0
  21. package/dist/domain/epics/Epic.js +2 -0
  22. package/dist/domain/projects/Project.d.ts +3 -0
  23. package/dist/domain/projects/Project.d.ts.map +1 -0
  24. package/dist/domain/projects/Project.js +2 -0
  25. package/dist/domain/tasks/Task.d.ts +3 -0
  26. package/dist/domain/tasks/Task.d.ts.map +1 -0
  27. package/dist/domain/tasks/Task.js +2 -0
  28. package/dist/domain/userStories/UserStory.d.ts +3 -0
  29. package/dist/domain/userStories/UserStory.d.ts.map +1 -0
  30. package/dist/domain/userStories/UserStory.js +2 -0
  31. package/dist/index.d.ts +28 -0
  32. package/dist/index.d.ts.map +1 -0
  33. package/dist/index.js +27 -0
  34. package/dist/prompts/PdrPrompts.d.ts +4 -0
  35. package/dist/prompts/PdrPrompts.d.ts.map +1 -0
  36. package/dist/prompts/PdrPrompts.js +21 -0
  37. package/dist/prompts/PromptLoader.d.ts +3 -0
  38. package/dist/prompts/PromptLoader.d.ts.map +1 -0
  39. package/dist/prompts/PromptLoader.js +2 -0
  40. package/dist/prompts/SdsPrompts.d.ts +5 -0
  41. package/dist/prompts/SdsPrompts.d.ts.map +1 -0
  42. package/dist/prompts/SdsPrompts.js +44 -0
  43. package/dist/services/agents/AgentManagementService.d.ts +3 -0
  44. package/dist/services/agents/AgentManagementService.d.ts.map +1 -0
  45. package/dist/services/agents/AgentManagementService.js +2 -0
  46. package/dist/services/agents/GatewayAgentService.d.ts +92 -0
  47. package/dist/services/agents/GatewayAgentService.d.ts.map +1 -0
  48. package/dist/services/agents/GatewayAgentService.js +870 -0
  49. package/dist/services/agents/RoutingApiClient.d.ts +23 -0
  50. package/dist/services/agents/RoutingApiClient.d.ts.map +1 -0
  51. package/dist/services/agents/RoutingApiClient.js +62 -0
  52. package/dist/services/agents/RoutingService.d.ts +50 -0
  53. package/dist/services/agents/RoutingService.d.ts.map +1 -0
  54. package/dist/services/agents/RoutingService.js +386 -0
  55. package/dist/services/agents/generated/RoutingApiClient.d.ts +21 -0
  56. package/dist/services/agents/generated/RoutingApiClient.d.ts.map +1 -0
  57. package/dist/services/agents/generated/RoutingApiClient.js +68 -0
  58. package/dist/services/backlog/BacklogService.d.ts +98 -0
  59. package/dist/services/backlog/BacklogService.d.ts.map +1 -0
  60. package/dist/services/backlog/BacklogService.js +453 -0
  61. package/dist/services/backlog/TaskOrderingService.d.ts +88 -0
  62. package/dist/services/backlog/TaskOrderingService.d.ts.map +1 -0
  63. package/dist/services/backlog/TaskOrderingService.js +675 -0
  64. package/dist/services/docs/DocsService.d.ts +82 -0
  65. package/dist/services/docs/DocsService.d.ts.map +1 -0
  66. package/dist/services/docs/DocsService.js +1631 -0
  67. package/dist/services/estimate/EstimateService.d.ts +12 -0
  68. package/dist/services/estimate/EstimateService.d.ts.map +1 -0
  69. package/dist/services/estimate/EstimateService.js +103 -0
  70. package/dist/services/estimate/VelocityService.d.ts +19 -0
  71. package/dist/services/estimate/VelocityService.d.ts.map +1 -0
  72. package/dist/services/estimate/VelocityService.js +237 -0
  73. package/dist/services/estimate/types.d.ts +30 -0
  74. package/dist/services/estimate/types.d.ts.map +1 -0
  75. package/dist/services/estimate/types.js +1 -0
  76. package/dist/services/execution/ExecutionService.d.ts +3 -0
  77. package/dist/services/execution/ExecutionService.d.ts.map +1 -0
  78. package/dist/services/execution/ExecutionService.js +2 -0
  79. package/dist/services/execution/QaFollowupService.d.ts +38 -0
  80. package/dist/services/execution/QaFollowupService.d.ts.map +1 -0
  81. package/dist/services/execution/QaFollowupService.js +236 -0
  82. package/dist/services/execution/QaProfileService.d.ts +22 -0
  83. package/dist/services/execution/QaProfileService.d.ts.map +1 -0
  84. package/dist/services/execution/QaProfileService.js +142 -0
  85. package/dist/services/execution/QaTasksService.d.ts +101 -0
  86. package/dist/services/execution/QaTasksService.d.ts.map +1 -0
  87. package/dist/services/execution/QaTasksService.js +1117 -0
  88. package/dist/services/execution/TaskSelectionService.d.ts +50 -0
  89. package/dist/services/execution/TaskSelectionService.d.ts.map +1 -0
  90. package/dist/services/execution/TaskSelectionService.js +281 -0
  91. package/dist/services/execution/TaskStateService.d.ts +19 -0
  92. package/dist/services/execution/TaskStateService.d.ts.map +1 -0
  93. package/dist/services/execution/TaskStateService.js +59 -0
  94. package/dist/services/execution/WorkOnTasksService.d.ts +80 -0
  95. package/dist/services/execution/WorkOnTasksService.d.ts.map +1 -0
  96. package/dist/services/execution/WorkOnTasksService.js +1833 -0
  97. package/dist/services/jobs/JobInsightsService.d.ts +97 -0
  98. package/dist/services/jobs/JobInsightsService.d.ts.map +1 -0
  99. package/dist/services/jobs/JobInsightsService.js +263 -0
  100. package/dist/services/jobs/JobResumeService.d.ts +16 -0
  101. package/dist/services/jobs/JobResumeService.d.ts.map +1 -0
  102. package/dist/services/jobs/JobResumeService.js +113 -0
  103. package/dist/services/jobs/JobService.d.ts +149 -0
  104. package/dist/services/jobs/JobService.d.ts.map +1 -0
  105. package/dist/services/jobs/JobService.js +490 -0
  106. package/dist/services/jobs/JobsApiClient.d.ts +73 -0
  107. package/dist/services/jobs/JobsApiClient.d.ts.map +1 -0
  108. package/dist/services/jobs/JobsApiClient.js +67 -0
  109. package/dist/services/openapi/OpenApiService.d.ts +54 -0
  110. package/dist/services/openapi/OpenApiService.d.ts.map +1 -0
  111. package/dist/services/openapi/OpenApiService.js +503 -0
  112. package/dist/services/planning/CreateTasksService.d.ts +68 -0
  113. package/dist/services/planning/CreateTasksService.d.ts.map +1 -0
  114. package/dist/services/planning/CreateTasksService.js +989 -0
  115. package/dist/services/planning/KeyHelpers.d.ts +5 -0
  116. package/dist/services/planning/KeyHelpers.d.ts.map +1 -0
  117. package/dist/services/planning/KeyHelpers.js +62 -0
  118. package/dist/services/planning/PlanningService.d.ts +3 -0
  119. package/dist/services/planning/PlanningService.d.ts.map +1 -0
  120. package/dist/services/planning/PlanningService.js +2 -0
  121. package/dist/services/planning/RefineTasksService.d.ts +56 -0
  122. package/dist/services/planning/RefineTasksService.d.ts.map +1 -0
  123. package/dist/services/planning/RefineTasksService.js +1328 -0
  124. package/dist/services/review/CodeReviewService.d.ts +103 -0
  125. package/dist/services/review/CodeReviewService.d.ts.map +1 -0
  126. package/dist/services/review/CodeReviewService.js +1187 -0
  127. package/dist/services/system/SystemUpdateService.d.ts +55 -0
  128. package/dist/services/system/SystemUpdateService.d.ts.map +1 -0
  129. package/dist/services/system/SystemUpdateService.js +136 -0
  130. package/dist/services/tasks/TaskApiResolver.d.ts +7 -0
  131. package/dist/services/tasks/TaskApiResolver.d.ts.map +1 -0
  132. package/dist/services/tasks/TaskApiResolver.js +41 -0
  133. package/dist/services/tasks/TaskDetailService.d.ts +106 -0
  134. package/dist/services/tasks/TaskDetailService.d.ts.map +1 -0
  135. package/dist/services/tasks/TaskDetailService.js +332 -0
  136. package/dist/services/telemetry/TelemetryService.d.ts +53 -0
  137. package/dist/services/telemetry/TelemetryService.d.ts.map +1 -0
  138. package/dist/services/telemetry/TelemetryService.js +434 -0
  139. package/dist/workspace/WorkspaceManager.d.ts +35 -0
  140. package/dist/workspace/WorkspaceManager.d.ts.map +1 -0
  141. package/dist/workspace/WorkspaceManager.js +201 -0
  142. package/package.json +45 -0
@@ -0,0 +1,1631 @@
1
+ import path from "node:path";
2
+ import { promises as fs } from "node:fs";
3
+ import { AgentService } from "@mcoda/agents";
4
+ import { GlobalRepository } from "@mcoda/db";
5
+ import { DocdexClient } from "@mcoda/integrations";
6
+ import { DEFAULT_PDR_CHARACTER_PROMPT, DEFAULT_PDR_JOB_PROMPT, DEFAULT_PDR_RUNBOOK_PROMPT, } from "../../prompts/PdrPrompts.js";
7
+ import { DEFAULT_SDS_CHARACTER_PROMPT, DEFAULT_SDS_JOB_PROMPT, DEFAULT_SDS_RUNBOOK_PROMPT, DEFAULT_SDS_TEMPLATE, } from "../../prompts/SdsPrompts.js";
8
+ import { JobService } from "../jobs/JobService.js";
9
+ import { RoutingService } from "../agents/RoutingService.js";
10
+ const ensureDir = async (targetPath) => {
11
+ await fs.mkdir(path.dirname(targetPath), { recursive: true });
12
+ };
13
+ const readPromptIfExists = async (workspace, relative) => {
14
+ const candidate = path.join(workspace.mcodaDir, relative);
15
+ try {
16
+ return await fs.readFile(candidate, "utf8");
17
+ }
18
+ catch {
19
+ return undefined;
20
+ }
21
+ };
22
+ const PDR_REQUIRED_HEADINGS = [
23
+ ["Introduction"],
24
+ ["Scope"],
25
+ ["Requirements", "Requirements & Constraints"],
26
+ ["Architecture", "Architecture Overview"],
27
+ ["Interfaces", "Interfaces / APIs"],
28
+ ["Non-Functional", "Non-Functional Requirements"],
29
+ ["Risks", "Risks & Mitigations"],
30
+ ["Open Questions"],
31
+ ["Acceptance Criteria"],
32
+ ];
33
+ const missingPdrHeadings = (draft) => {
34
+ const normalized = draft.trim();
35
+ if (!normalized)
36
+ return PDR_REQUIRED_HEADINGS.map((v) => v[0]);
37
+ return PDR_REQUIRED_HEADINGS.filter((variants) => !variants.some((section) => new RegExp(`^#{1,6}\\s+${section}\\b`, "im").test(normalized))).map((variants) => variants[0]);
38
+ };
39
+ const validateDraft = (draft) => {
40
+ if (!draft || draft.trim().length < 50)
41
+ return false;
42
+ return missingPdrHeadings(draft).length === 0;
43
+ };
44
+ const ensureSectionContent = (draft, title, fallback) => {
45
+ const headingRegex = new RegExp(`^#{1,6}\\s+${title}\\b.*$`, "im");
46
+ if (!headingRegex.test(draft)) {
47
+ return `${draft.trimEnd()}\n\n## ${title}\n${fallback}\n`;
48
+ }
49
+ const blockRegex = new RegExp(`(^#{1,6}\\s+${title}\\b.*$)([\\s\\S]*?)(?=^#{1,6}\\s+|$)`, "im");
50
+ return draft.replace(blockRegex, (_match, heading, body) => {
51
+ const trimmed = body.trim();
52
+ if (trimmed.length > 0)
53
+ return `${heading}${body}`;
54
+ return `${heading}\n\n${fallback}\n`;
55
+ });
56
+ };
57
+ const validateSdsDraft = (draft) => {
58
+ if (!draft || draft.trim().length < 100)
59
+ return false;
60
+ const required = [
61
+ "Introduction",
62
+ "Scope",
63
+ "Architecture",
64
+ "Components",
65
+ "Data Model",
66
+ "Interfaces",
67
+ "Non-Functional",
68
+ "Security",
69
+ "Failure",
70
+ "Risks",
71
+ "Open Questions",
72
+ ];
73
+ return required.every((section) => new RegExp(`^#{1,6}\\s+[^\\n]*${section}\\b`, "im").test(draft));
74
+ };
75
+ const slugify = (value) => value
76
+ .toLowerCase()
77
+ .replace(/[^a-z0-9]+/g, "-")
78
+ .replace(/^-+|-+$/g, "")
79
+ .replace(/-{2,}/g, "-") || "draft";
80
+ const estimateTokens = (text) => Math.max(1, Math.ceil(text.length / 4));
81
+ const SDS_CONTEXT_TOKEN_BUDGET = 8000;
82
+ const extractBullets = (content, limit = 20) => {
83
+ return content
84
+ .split(/\r?\n/)
85
+ .map((line) => line.trim())
86
+ .filter((line) => /^[-*+]\s+/.test(line))
87
+ .map((line) => line.replace(/^[-*+]\s+/, "").trim())
88
+ .filter((line) => line.length > 0)
89
+ .slice(0, limit);
90
+ };
91
+ class DocContextAssembler {
92
+ constructor(docdex, workspace) {
93
+ this.docdex = docdex;
94
+ this.workspace = workspace;
95
+ }
96
+ summarize(doc) {
97
+ const lines = (doc.content ?? "").split(/\r?\n/).filter(Boolean);
98
+ const head = lines.slice(0, 5).join(" ");
99
+ return head || doc.title || doc.path || "Document";
100
+ }
101
+ async findLatestLocalDoc(docType) {
102
+ const candidates = [];
103
+ const dirs = [
104
+ path.join(this.workspace.mcodaDir, "docs", docType.toLowerCase()),
105
+ path.join(this.workspace.workspaceRoot, "docs", docType.toLowerCase()),
106
+ ];
107
+ for (const dir of dirs) {
108
+ try {
109
+ const entries = await fs.readdir(dir);
110
+ for (const entry of entries.filter((e) => e.endsWith(".md"))) {
111
+ const full = path.join(dir, entry);
112
+ const stat = await fs.stat(full);
113
+ candidates.push({ path: full, mtime: stat.mtimeMs });
114
+ }
115
+ }
116
+ catch {
117
+ // ignore
118
+ }
119
+ }
120
+ const latest = candidates.sort((a, b) => b.mtime - a.mtime)[0];
121
+ if (!latest)
122
+ return undefined;
123
+ const content = await fs.readFile(latest.path, "utf8");
124
+ const timestamp = new Date(latest.mtime).toISOString();
125
+ return {
126
+ id: `local-${docType.toLowerCase()}-${path.basename(latest.path)}`,
127
+ docType,
128
+ path: latest.path,
129
+ content,
130
+ createdAt: timestamp,
131
+ updatedAt: timestamp,
132
+ };
133
+ }
134
+ formatBlock(doc, label, priority, maxSegments = 8) {
135
+ const segments = (doc.segments ?? []).slice(0, maxSegments);
136
+ const heading = `[${doc.docType}] ${label}`;
137
+ const source = doc.path ?? doc.id ?? label;
138
+ const body = segments.length
139
+ ? segments
140
+ .map((seg, idx) => {
141
+ const head = seg.heading ?? `Segment ${idx + 1}`;
142
+ const trimmed = seg.content.length > 800 ? `${seg.content.slice(0, 800)}...` : seg.content;
143
+ return `### ${head}\n${trimmed}`;
144
+ })
145
+ .join("\n\n")
146
+ : doc.content ?? this.summarize(doc);
147
+ const content = [heading, `Source: ${source}`, body].filter(Boolean).join("\n");
148
+ return {
149
+ label,
150
+ content,
151
+ summary: `${heading}: ${this.summarize(doc)}`,
152
+ priority,
153
+ tokens: estimateTokens(content),
154
+ };
155
+ }
156
+ enforceBudget(blocks, budget, warnings) {
157
+ let total = blocks.reduce((sum, b) => sum + b.tokens, 0);
158
+ if (total <= budget)
159
+ return blocks;
160
+ const ordered = [...blocks].sort((a, b) => a.priority - b.priority); // degrade lowest priority first
161
+ for (const block of ordered) {
162
+ if (total <= budget)
163
+ break;
164
+ if (block.content !== block.summary) {
165
+ total -= block.tokens;
166
+ block.content = block.summary;
167
+ block.tokens = estimateTokens(block.content);
168
+ total += block.tokens;
169
+ warnings.push(`Context for ${block.label} truncated to summary to fit token budget.`);
170
+ }
171
+ }
172
+ if (total > budget) {
173
+ for (const block of ordered) {
174
+ if (total <= budget)
175
+ break;
176
+ const trimmed = block.content.slice(0, Math.max(400, Math.floor((budget / ordered.length) * 4)));
177
+ const tokens = estimateTokens(trimmed);
178
+ if (tokens < block.tokens) {
179
+ total -= block.tokens;
180
+ block.content = `${trimmed}\n\n[truncated]`;
181
+ block.tokens = tokens;
182
+ total += block.tokens;
183
+ warnings.push(`Context for ${block.label} truncated further to meet token budget.`);
184
+ }
185
+ }
186
+ }
187
+ return blocks;
188
+ }
189
+ async buildSdsContext(input) {
190
+ const warnings = [];
191
+ let docdexAvailable = true;
192
+ let rfp;
193
+ let pdrs = [];
194
+ let existingSds = [];
195
+ let openapi = [];
196
+ const misc = [];
197
+ try {
198
+ const [pdrDocs, sdsDocs, openapiDocs, rfpDocs, constraintsDocs] = await Promise.all([
199
+ this.docdex.search({ projectKey: input.projectKey, docType: "PDR", profile: "sds_default" }),
200
+ this.docdex.search({ projectKey: input.projectKey, docType: "SDS", profile: "sds_default" }),
201
+ this.docdex.search({ projectKey: input.projectKey, docType: "OPENAPI", profile: "sds_default" }),
202
+ this.docdex.search({ projectKey: input.projectKey, docType: "RFP", profile: "sds_default" }),
203
+ this.docdex.search({ projectKey: input.projectKey, docType: "Architecture", profile: "sds_default" }),
204
+ ]);
205
+ pdrs = pdrDocs;
206
+ existingSds = sdsDocs;
207
+ openapi = openapiDocs;
208
+ rfp = rfpDocs[0];
209
+ misc.push(...constraintsDocs);
210
+ if (!rfp && pdrs.length === 0) {
211
+ warnings.push("RFP not found in docdex; SDS will rely on PDR content only.");
212
+ }
213
+ if (!pdrs.length) {
214
+ const localPdr = await this.findLatestLocalDoc("PDR");
215
+ if (localPdr) {
216
+ pdrs = [localPdr];
217
+ warnings.push("No PDR found in docdex; using latest local PDR file.");
218
+ }
219
+ }
220
+ if (!rfp) {
221
+ const localRfp = await this.findLatestLocalDoc("RFP");
222
+ if (localRfp) {
223
+ rfp = localRfp;
224
+ warnings.push("No RFP found in docdex; using latest local RFP file.");
225
+ }
226
+ }
227
+ }
228
+ catch (error) {
229
+ docdexAvailable = false;
230
+ warnings.push(`Docdex unavailable; attempting to use local docs (${error.message ?? "unknown"}).`);
231
+ rfp = await this.findLatestLocalDoc("RFP");
232
+ const localPdr = await this.findLatestLocalDoc("PDR");
233
+ if (localPdr)
234
+ pdrs = [localPdr];
235
+ const localSds = await this.findLatestLocalDoc("SDS");
236
+ if (localSds)
237
+ existingSds = [localSds];
238
+ }
239
+ if (!pdrs.length && !rfp) {
240
+ throw new Error("No PDR or RFP content could be resolved. Ensure docdex is reachable with an sds_default profile or add local docs under .mcoda/docs/pdr and docs/rfp.");
241
+ }
242
+ const blocks = [];
243
+ if (rfp)
244
+ blocks.push(this.formatBlock(rfp, "RFP context", 1, 10));
245
+ pdrs.slice(0, 2).forEach((doc, idx) => blocks.push(this.formatBlock(doc, `PDR ${idx + 1}`, 1, 8)));
246
+ existingSds.slice(0, 1).forEach((doc) => blocks.push(this.formatBlock(doc, "Existing SDS", 2, 6)));
247
+ if (openapi.length > 0) {
248
+ const doc = openapi[0];
249
+ const segments = (doc.segments ?? []).slice(0, 6);
250
+ const body = segments.length
251
+ ? segments
252
+ .map((seg) => `- ${seg.heading ?? "operation"}: ${seg.content.slice(0, 400)}`)
253
+ .join("\n")
254
+ : doc.content ?? this.summarize(doc);
255
+ const content = [`[OPENAPI] ${doc.title ?? doc.path ?? doc.id}`, body].join("\n");
256
+ blocks.push({
257
+ label: "OpenAPI",
258
+ content,
259
+ summary: `OpenAPI: ${this.summarize(doc)}`,
260
+ priority: 1,
261
+ tokens: estimateTokens(content),
262
+ });
263
+ }
264
+ if (misc.length > 0) {
265
+ const doc = misc[0];
266
+ blocks.push(this.formatBlock(doc, "Constraints & Principles", 0, 5));
267
+ }
268
+ const boundedBlocks = this.enforceBudget(blocks, SDS_CONTEXT_TOKEN_BUDGET, warnings);
269
+ const summaryParts = [
270
+ `PDRs: ${pdrs.length}`,
271
+ existingSds.length ? `Existing SDS: ${existingSds.length}` : "Existing SDS: none",
272
+ rfp ? `RFP: ${this.summarize(rfp)}` : "RFP: missing",
273
+ openapi.length ? `OpenAPI: ${openapi.length}` : "OpenAPI: none",
274
+ misc.length ? `Constraints: ${misc.length}` : "Constraints: none",
275
+ ];
276
+ return {
277
+ rfp,
278
+ pdrs,
279
+ existingSds,
280
+ openapi,
281
+ misc,
282
+ blocks: boundedBlocks,
283
+ docdexAvailable,
284
+ summary: summaryParts.join(" | "),
285
+ warnings,
286
+ };
287
+ }
288
+ async buildContext(input) {
289
+ const warnings = [];
290
+ let rfp;
291
+ let docdexAvailable = true;
292
+ let openapi = [];
293
+ try {
294
+ if (input.rfpId) {
295
+ rfp = await this.docdex.fetchDocumentById(input.rfpId);
296
+ }
297
+ else if (input.rfpPath) {
298
+ const resolved = path.isAbsolute(input.rfpPath)
299
+ ? input.rfpPath
300
+ : path.join(this.workspace.workspaceRoot, input.rfpPath);
301
+ rfp = await this.docdex.ensureRegisteredFromFile(resolved, "RFP", {
302
+ workspace: this.workspace.workspaceId,
303
+ projectKey: input.projectKey,
304
+ });
305
+ }
306
+ }
307
+ catch (error) {
308
+ docdexAvailable = false;
309
+ if (input.rfpPath) {
310
+ const resolved = path.isAbsolute(input.rfpPath)
311
+ ? input.rfpPath
312
+ : path.join(this.workspace.workspaceRoot, input.rfpPath);
313
+ const content = await fs.readFile(resolved, "utf8");
314
+ rfp = {
315
+ id: "rfp-local",
316
+ docType: "RFP",
317
+ path: resolved,
318
+ content,
319
+ createdAt: new Date().toISOString(),
320
+ updatedAt: new Date().toISOString(),
321
+ };
322
+ warnings.push(`Docdex unavailable; using local RFP content only (${error.message ?? "unknown error"}).`);
323
+ }
324
+ else {
325
+ throw error;
326
+ }
327
+ }
328
+ if (!rfp) {
329
+ throw new Error("RFP content could not be resolved. Provide --rfp-id or --rfp-path.");
330
+ }
331
+ let related = [];
332
+ if (docdexAvailable) {
333
+ related = await this.docdex.search({ projectKey: input.projectKey, docType: "PDR", profile: "rfp_default" });
334
+ const sds = await this.docdex.search({ projectKey: input.projectKey, docType: "SDS", profile: "rfp_default" });
335
+ openapi = await this.docdex.search({ projectKey: input.projectKey, docType: "OPENAPI", profile: "rfp_default" });
336
+ related = [...related, ...sds];
337
+ }
338
+ const summaryParts = [
339
+ `RFP: ${this.summarize(rfp)}`,
340
+ related.length > 0 ? `Related: ${related.map((d) => this.summarize(d)).join("; ")}` : "Related: none found",
341
+ openapi.length > 0 ? `OpenAPI: ${openapi.length} docs` : "OpenAPI: none",
342
+ ];
343
+ return {
344
+ rfp,
345
+ related,
346
+ openapi,
347
+ docdexAvailable,
348
+ summary: summaryParts.join(" | "),
349
+ bullets: extractBullets(rfp.content ?? ""),
350
+ warnings,
351
+ };
352
+ }
353
+ }
354
+ const buildRunPrompt = (context, projectKey, prompts, runbook) => {
355
+ const runbookPrompt = runbook || DEFAULT_PDR_RUNBOOK_PROMPT;
356
+ const docdexNote = context.docdexAvailable
357
+ ? ""
358
+ : "Docdex context is unavailable. Use only the provided RFP content and clearly mark missing references.";
359
+ const relatedSection = context.related.length === 0
360
+ ? "No related PDR/SDS documents were found."
361
+ : `Related docs:\n${context.related
362
+ .map((doc) => `- ${doc.docType} ${doc.path ?? doc.id}: ${doc.title ?? ""}`.trim())
363
+ .join("\n")}`;
364
+ const promptsSection = [
365
+ prompts?.jobPrompt ?? DEFAULT_PDR_JOB_PROMPT,
366
+ prompts?.characterPrompt ?? DEFAULT_PDR_CHARACTER_PROMPT,
367
+ prompts?.commandPrompts?.["docs:pdr"] ?? "",
368
+ ]
369
+ .filter(Boolean)
370
+ .join("\n\n");
371
+ return [
372
+ promptsSection,
373
+ `Workspace project: ${projectKey ?? "(not specified)"}`,
374
+ `Context summary: ${context.summary}`,
375
+ `RFP bullet cues:\n${context.bullets.map((b) => `- ${b}`).join("\n") || "- (none extracted)"}`,
376
+ relatedSection,
377
+ context.openapi.length
378
+ ? `OpenAPI excerpts:\n${context.openapi
379
+ .map((doc) => (doc.segments ?? []).slice(0, 2).map((seg) => `- ${seg.heading ?? "segment"}: ${seg.content.slice(0, 200)}`))
380
+ .flat()
381
+ .join("\n")}`
382
+ : "No OpenAPI excerpts available; do not invent endpoints.",
383
+ docdexNote,
384
+ [
385
+ "Return markdown with exactly these sections as H2 headings, one time each:",
386
+ "Introduction, Scope, Requirements & Constraints, Architecture Overview, Interfaces / APIs, Non-Functional Requirements, Risks & Mitigations, Open Questions, Acceptance Criteria",
387
+ "Do not use bold headings; use `##` headings only. Do not repeat sections.",
388
+ ].join("\n"),
389
+ runbookPrompt,
390
+ ]
391
+ .filter(Boolean)
392
+ .join("\n\n");
393
+ };
394
+ const buildSdsRunPrompt = (context, projectKey, prompts, runbook, template) => {
395
+ const promptsSection = [
396
+ prompts?.jobPrompt ?? DEFAULT_SDS_JOB_PROMPT,
397
+ prompts?.characterPrompt ?? DEFAULT_SDS_CHARACTER_PROMPT,
398
+ prompts?.commandPrompts?.["docs:sds"] ?? "",
399
+ prompts?.commandPrompts?.["docs:sds:generate"] ?? "",
400
+ ]
401
+ .filter(Boolean)
402
+ .join("\n\n");
403
+ const docdexNote = context.docdexAvailable
404
+ ? ""
405
+ : "Docdex context is unavailable; rely on provided local PDR/RFP content and explicitly mark missing references.";
406
+ const blocks = context.blocks
407
+ .map((block) => [`## ${block.label}`, block.content].join("\n"))
408
+ .join("\n\n");
409
+ return [
410
+ promptsSection,
411
+ `Workspace project: ${projectKey ?? "(not specified)"}`,
412
+ `Template:\n${template}`,
413
+ `Context summary: ${context.summary}`,
414
+ blocks,
415
+ docdexNote,
416
+ runbook,
417
+ ]
418
+ .filter(Boolean)
419
+ .join("\n\n");
420
+ };
421
+ const ensureStructuredDraft = (draft, projectKey, context, rfpSource) => {
422
+ const canonicalTitles = PDR_REQUIRED_HEADINGS.map((variants) => variants[0]);
423
+ const normalized = normalizeHeadingsToH2(draft, canonicalTitles);
424
+ const required = [
425
+ { title: "Introduction", fallback: `This PDR summarizes project ${projectKey ?? "N/A"} based on ${rfpSource}.` },
426
+ {
427
+ title: "Scope",
428
+ fallback: "In-scope: todo CRUD (title required; optional description, due date, priority), status toggle, filters/sort/search, bulk complete/delete, keyboard shortcuts, responsive UI, offline/localStorage. Out-of-scope: multi-user/auth/sync/backends, notifications/reminders, team features, heavy UI kits.",
429
+ },
430
+ {
431
+ title: "Requirements & Constraints",
432
+ fallback: context.bullets.map((b) => `- ${b}`).join("\n") ||
433
+ "- Data model, UX flows, keyboard shortcuts, and offline localStorage persistence per RFP.",
434
+ },
435
+ { title: "Architecture Overview", fallback: "Describe the system architecture, components, and interactions." },
436
+ { title: "Interfaces / APIs", fallback: "List key interfaces and constraints. Do not invent endpoints." },
437
+ { title: "Non-Functional Requirements", fallback: "- Performance, reliability, compliance, and operational needs." },
438
+ { title: "Risks & Mitigations", fallback: "- Enumerate risks from the RFP and proposed mitigations." },
439
+ { title: "Open Questions", fallback: "- Outstanding questions to clarify with stakeholders." },
440
+ { title: "Acceptance Criteria", fallback: "- Add/edit/delete todos persists offline; filters/sorts/search <100ms for 500 items; shortcuts (`n`, Ctrl/Cmd+Enter) work; bulk actions confirm/undo; responsive and accessible (WCAG AA basics)." },
441
+ ];
442
+ const parts = [];
443
+ parts.push(`# Product Design Review${projectKey ? `: ${projectKey}` : ""}`);
444
+ for (const section of required) {
445
+ const best = getBestSectionBody(normalized, section.title);
446
+ const cleaned = cleanBody(best ?? "");
447
+ const body = cleaned && cleaned.length > 0 ? cleaned : cleanBody(section.fallback);
448
+ parts.push(`## ${section.title}`);
449
+ parts.push(body);
450
+ }
451
+ parts.push("## Source RFP");
452
+ parts.push(rfpSource);
453
+ return parts.join("\n\n");
454
+ };
455
+ const tidyPdrDraft = async (draft, agent, invoke) => {
456
+ const prompt = [
457
+ "Tidy the following Product Design Review markdown:",
458
+ draft,
459
+ "",
460
+ "Requirements:",
461
+ "- Keep exactly one instance of each H2 section: Introduction, Scope, Requirements & Constraints, Architecture Overview, Interfaces / APIs, Non-Functional Requirements, Risks & Mitigations, Open Questions, Acceptance Criteria, Source RFP.",
462
+ "- Remove duplicate sections, bold headings posing as sections, placeholder sentences, and repeated bullet blocks. If the same idea appears twice, keep the richer/longer version and drop the restatement.",
463
+ "- Do not add new sections or reorder the required outline.",
464
+ "- Keep content concise and aligned to the headings. Do not alter semantics.",
465
+ "- Return only the cleaned markdown.",
466
+ ].join("\n");
467
+ const { output } = await invoke(prompt);
468
+ return output.trim();
469
+ };
470
+ const PDR_ENRICHMENT_SECTIONS = [
471
+ {
472
+ title: "Architecture Overview",
473
+ guidance: [
474
+ "List concrete modules/components: UI shells (list/detail), state/store, persistence adapter (localStorage), keyboard/shortcut handler, bulk selection manager, search/filter/sort utilities.",
475
+ "Describe data flow (load -> store -> UI render; user actions -> store mutate -> persist).",
476
+ "Call out offline-first behavior and how persistence errors are handled.",
477
+ ],
478
+ },
479
+ {
480
+ title: "Requirements & Constraints",
481
+ guidance: [
482
+ "Spell out data model fields (id, title, description?, status enum, dueDate format/timezone, priority enum order, createdAt/updatedAt, selection flag).",
483
+ "Define localStorage key naming, schema versioning, and migration approach.",
484
+ "Include accessibility expectations (keyboard focus, ARIA basics) and bundle size/perf targets.",
485
+ ],
486
+ },
487
+ {
488
+ title: "Interfaces / APIs",
489
+ guidance: [
490
+ "Define internal contracts: store API (add/update/delete/toggle/filter/search), persistence adapter API (load/save/validate), shortcut map (keys -> actions), bulk action contract, optional export/import shape.",
491
+ "Clarify validation rules (required title, length limits, due date handling).",
492
+ ],
493
+ },
494
+ {
495
+ title: "Non-Functional Requirements",
496
+ guidance: [
497
+ "Quantify perf (<100ms for 500 items), bundle size goal, offline expectations, and accessibility targets (focus order, contrast).",
498
+ "Reliability: handling storage quota/corruption, error surfacing.",
499
+ ],
500
+ },
501
+ {
502
+ title: "Risks & Mitigations",
503
+ guidance: [
504
+ "Cover localStorage limits/corruption, keyboard conflicts, bulk delete accidents, schema drift, and mobile usability.",
505
+ "Provide specific mitigations (validation, confirmations/undo, migrations, debounced search, accessible shortcuts).",
506
+ ],
507
+ },
508
+ {
509
+ title: "Open Questions",
510
+ guidance: [
511
+ "Resolve defaults: sort/tie-breakers, priority order, initial filters, due date format/timezone.",
512
+ "Ask about export/import needs, accessibility targets, theming/branding, undo/confirm patterns.",
513
+ ],
514
+ },
515
+ ];
516
+ const ensureSdsStructuredDraft = (draft, projectKey, context, template) => {
517
+ const normalized = draft.trim();
518
+ const templateHeadings = template
519
+ .split(/\r?\n/)
520
+ .map((line) => line.trim())
521
+ .filter((line) => /^#{1,6}\s+/.test(line))
522
+ .map((line) => line.replace(/^#{1,6}\s+/, "").trim());
523
+ const defaultSections = [
524
+ "Introduction",
525
+ "Goals & Scope",
526
+ "Architecture Overview",
527
+ "Components & Responsibilities",
528
+ "Data Model & Persistence",
529
+ "Interfaces & Contracts",
530
+ "Non-Functional Requirements",
531
+ "Security & Compliance",
532
+ "Failure Modes & Resilience",
533
+ "Risks & Mitigations",
534
+ "Assumptions",
535
+ "Open Questions",
536
+ "Acceptance Criteria",
537
+ ];
538
+ const sections = templateHeadings.length ? templateHeadings : defaultSections;
539
+ const cues = extractBullets(context.pdrs[0]?.content ?? context.rfp?.content ?? "", 10);
540
+ const assumptionFallback = context.warnings.length > 0
541
+ ? context.warnings.map((w) => `- Assumption/Gap: ${w}`).join("\n")
542
+ : "- Document assumptions and dependencies.";
543
+ const fallbackFor = (section) => {
544
+ const key = section.toLowerCase();
545
+ if (key.includes("goal") || key.includes("scope")) {
546
+ return cues.length ? cues.map((c) => `- ${c}`).join("\n") : "- Goals and scope derived from PDR/RFP.";
547
+ }
548
+ if (key.includes("architecture"))
549
+ return "- High-level architecture, deployment, and key data flows.";
550
+ if (key.includes("component"))
551
+ return "- Component responsibilities and interactions.";
552
+ if (key.includes("data"))
553
+ return "- Data entities, schemas, storage, and migrations.";
554
+ if (key.includes("interface") || key.includes("contract"))
555
+ return "- Interfaces/APIs aligned to OpenAPI; list operation ids or mark TODOs.";
556
+ if (key.includes("non-functional"))
557
+ return "- Performance, reliability, observability, capacity assumptions.";
558
+ if (key.includes("security"))
559
+ return "- Authentication, authorization, secrets, compliance, data protection.";
560
+ if (key.includes("failure") || key.includes("resilience"))
561
+ return "- Failure modes, detection, rollback, and recovery paths.";
562
+ if (key.includes("risk"))
563
+ return "- Enumerate major risks and proposed mitigations.";
564
+ if (key.includes("assumption"))
565
+ return assumptionFallback;
566
+ if (key.includes("question"))
567
+ return "- Outstanding questions and clarifications required.";
568
+ if (key.includes("acceptance"))
569
+ return "- Criteria for sign-off and verification.";
570
+ if (key.includes("introduction"))
571
+ return `SDS for ${projectKey ?? "project"} derived from available PDR/RFP context.`;
572
+ return "- TBD";
573
+ };
574
+ const hasHeading = (title) => new RegExp(`^#{1,6}\\s+${title}\\b`, "im").test(normalized);
575
+ const parts = [];
576
+ if (!/^#\s+/m.test(normalized)) {
577
+ parts.push(`# Software Design Specification${projectKey ? `: ${projectKey}` : ""}`);
578
+ }
579
+ if (normalized)
580
+ parts.push(normalized);
581
+ for (const section of sections) {
582
+ if (hasHeading(section))
583
+ continue;
584
+ parts.push(`## ${section}`);
585
+ parts.push(fallbackFor(section));
586
+ }
587
+ let structured = parts.join("\n\n");
588
+ for (const section of sections) {
589
+ structured = ensureSectionContent(structured, section, fallbackFor(section));
590
+ }
591
+ return structured;
592
+ };
593
+ const getSdsSections = (template) => {
594
+ const templateHeadings = template
595
+ .split(/\r?\n/)
596
+ .map((line) => line.trim())
597
+ .filter((line) => /^#{1,6}\s+/.test(line))
598
+ .map((line) => line.replace(/^#{1,6}\s+/, "").trim());
599
+ const defaultSections = [
600
+ "Introduction",
601
+ "Goals & Scope",
602
+ "Architecture Overview",
603
+ "Components & Responsibilities",
604
+ "Data Model & Persistence",
605
+ "Interfaces & Contracts",
606
+ "Non-Functional Requirements",
607
+ "Security & Compliance",
608
+ "Failure Modes & Resilience",
609
+ "Risks & Mitigations",
610
+ "Assumptions",
611
+ "Open Questions",
612
+ "Acceptance Criteria",
613
+ ];
614
+ const sections = templateHeadings.length ? templateHeadings : defaultSections;
615
+ const seen = new Set();
616
+ const unique = sections.filter((title) => {
617
+ const key = title.toLowerCase();
618
+ if (seen.has(key))
619
+ return false;
620
+ seen.add(key);
621
+ return true;
622
+ });
623
+ return unique;
624
+ };
625
+ const tidySdsDraft = async (draft, sections, agent, invoke) => {
626
+ const prompt = [
627
+ "Tidy the following Software Design Specification markdown:",
628
+ draft,
629
+ "",
630
+ "Requirements:",
631
+ `- Keep exactly one instance of each H2 section in this order: ${sections.join(", ")}.`,
632
+ "- Remove duplicate sections, bold headings pretending to be sections, placeholder sentences, and repeated bullet blocks. If content is duplicated, keep the richer/longer version.",
633
+ "- Do not add new sections or reorder the required outline.",
634
+ "- Keep content concise and aligned to the headings. Do not alter semantics.",
635
+ "- Return only the cleaned markdown.",
636
+ ].join("\n");
637
+ const { output } = await invoke(prompt);
638
+ return output.trim();
639
+ };
640
+ const enrichSdsDraft = async (draft, sections, agent, context, projectKey, invoke) => {
641
+ let enriched = draft;
642
+ const contextLines = context.blocks.map((b) => `- ${b.label}: ${b.summary}`).join("\n") || "- (no additional context)";
643
+ for (const sectionTitle of sections) {
644
+ const current = extractSection(enriched, sectionTitle);
645
+ const currentBody = current?.body ?? "";
646
+ const prompt = [
647
+ `You are enriching an SDS section "${sectionTitle}" for project ${projectKey ?? "(unspecified)"} using only provided context.`,
648
+ `Context summary: ${context.summary}`,
649
+ `Context blocks:\n${contextLines}`,
650
+ `Current section "${sectionTitle}":\n${currentBody || "(empty)"}`,
651
+ "Enrich this section with concrete, actionable content. Keep it concise (bullets acceptable).",
652
+ "Do NOT remove the heading. Return only the updated section, starting with the heading. Do not include any other sections.",
653
+ ]
654
+ .filter(Boolean)
655
+ .join("\n\n");
656
+ const { output } = await invoke(prompt);
657
+ const replacement = parseSectionFromAgentOutput(output, sectionTitle);
658
+ if (replacement && replacement.trim().length > 0) {
659
+ enriched = replaceSection(enriched, sectionTitle, replacement);
660
+ }
661
+ }
662
+ return enriched;
663
+ };
664
+ const parseTocHeadings = (toc, fallback) => {
665
+ const lines = toc
666
+ .split(/\r?\n/)
667
+ .map((l) => l.trim())
668
+ .filter(Boolean);
669
+ const headings = [];
670
+ const seen = new Set();
671
+ for (const line of lines) {
672
+ const stripped = line
673
+ .replace(/^[\d]+\.\s*/, "")
674
+ .replace(/^[-*+]\s*/, "")
675
+ .replace(/^#+\s*/, "")
676
+ .trim();
677
+ if (!stripped)
678
+ continue;
679
+ const key = stripped.toLowerCase();
680
+ if (seen.has(key))
681
+ continue;
682
+ seen.add(key);
683
+ headings.push(stripped);
684
+ }
685
+ if (headings.length === 0)
686
+ return fallback;
687
+ return headings;
688
+ };
689
+ const parseTocEntries = (toc, fallback) => {
690
+ const lines = toc
691
+ .split(/\r?\n/)
692
+ .map((l) => l.trim())
693
+ .filter(Boolean);
694
+ const entries = [];
695
+ const seen = new Set();
696
+ for (const line of lines) {
697
+ const stripped = line.replace(/^[-*+]\s*/, "");
698
+ const match = stripped.match(/^(\d+(?:\.\d+)*)[.)]?\s+(.*)$/);
699
+ const label = match?.[1] ? `${match[1]}.` : undefined;
700
+ const title = (match?.[2] ?? stripped).replace(/^#+\s*/, "").trim();
701
+ if (!title)
702
+ continue;
703
+ const key = `${(label ?? "").toLowerCase()}|${title.toLowerCase()}`;
704
+ if (seen.has(key))
705
+ continue;
706
+ seen.add(key);
707
+ entries.push({ title, label });
708
+ }
709
+ if (entries.length === 0) {
710
+ return fallback.map((title, idx) => ({ title, label: `${idx + 1}.` }));
711
+ }
712
+ // Ensure every entry has a label for numbering; fill gaps sequentially.
713
+ let counter = 1;
714
+ for (const entry of entries) {
715
+ if (!entry.label) {
716
+ entry.label = `${counter}.`;
717
+ }
718
+ counter += 1;
719
+ }
720
+ return entries;
721
+ };
722
+ const buildIterativePdr = async (projectKey, context, firstDraft, outputPath, invoke) => {
723
+ const header = `# Product Design Review${projectKey ? `: ${projectKey}` : ""}`;
724
+ await ensureDir(outputPath);
725
+ const tocPrompt = [
726
+ "Generate ONLY a concise table of contents for the Product Design Review using the provided RFP and first draft. Do not include any section content.",
727
+ "Return bullets or numbered lines that represent the H2 sections in order.",
728
+ `RFP path: ${context.rfp.path ?? context.rfp.id ?? "RFP"}`,
729
+ "RFP excerpt:",
730
+ (context.rfp.content ?? "").slice(0, 4000),
731
+ "Current PDR draft:",
732
+ firstDraft,
733
+ ]
734
+ .filter(Boolean)
735
+ .join("\n\n");
736
+ const { output: tocOutput } = await invoke(tocPrompt);
737
+ const tocHeadings = parseTocHeadings(tocOutput, PDR_REQUIRED_HEADINGS.map((variants) => variants[0]));
738
+ let currentDoc = [header, "## Table of Contents", cleanBody(tocOutput)].join("\n\n");
739
+ await fs.writeFile(outputPath, currentDoc, "utf8");
740
+ for (const heading of tocHeadings) {
741
+ const sectionPrompt = [
742
+ `Generate the section "${heading}" for a Product Design Review.`,
743
+ `Project: ${projectKey ?? "(unspecified)"}`,
744
+ `RFP path: ${context.rfp.path ?? context.rfp.id ?? "RFP"}`,
745
+ "RFP excerpt:",
746
+ (context.rfp.content ?? "").slice(0, 4000),
747
+ "First PDR draft (saved as first-draft):",
748
+ firstDraft.slice(0, 8000),
749
+ "Current improved document so far:",
750
+ currentDoc.slice(0, 8000),
751
+ "Other available docs: none beyond RFP and first draft (no SDS exists yet).",
752
+ "Table of contents:",
753
+ tocHeadings.map((h) => `- ${h}`).join("\n"),
754
+ "Requirements:",
755
+ "- Return only this section starting with the proper H2 heading.",
756
+ "- Be concrete, avoid placeholders, and align with the TOC heading text.",
757
+ ]
758
+ .filter(Boolean)
759
+ .join("\n\n");
760
+ const { output: sectionOutput } = await invoke(sectionPrompt);
761
+ const parsed = parseSectionFromAgentOutput(sectionOutput, heading) ?? cleanBody(sectionOutput);
762
+ currentDoc = `${currentDoc}\n\n## ${heading}\n${parsed}`;
763
+ await fs.writeFile(outputPath, currentDoc, "utf8");
764
+ }
765
+ if (!/^\s*##\s+Source RFP\b/im.test(currentDoc)) {
766
+ currentDoc = `${currentDoc}\n\n## Source RFP\n${context.rfp.path ?? context.rfp.id ?? "RFP"}`;
767
+ await fs.writeFile(outputPath, currentDoc, "utf8");
768
+ }
769
+ return currentDoc;
770
+ };
771
+ const buildIterativeSds = async (projectKey, context, firstDraft, sections, outputPath, invoke) => {
772
+ const header = `# Software Design Specification${projectKey ? `: ${projectKey}` : ""}`;
773
+ await ensureDir(outputPath);
774
+ const tocPrompt = [
775
+ "Generate ONLY a concise table of contents for the Software Design Specification using the provided context and first draft. Do not include section content.",
776
+ "Return numbered lines that represent the H2 sections in order (e.g., '1. Introduction', '2. Goals & Scope'). Include numbers so they can be mirrored in section headings.",
777
+ `Context summary: ${context.summary}`,
778
+ "Existing SDS draft:",
779
+ firstDraft,
780
+ ]
781
+ .filter(Boolean)
782
+ .join("\n\n");
783
+ const { output: tocOutput } = await invoke(tocPrompt);
784
+ const tocEntries = parseTocEntries(tocOutput, sections);
785
+ const tocHeadings = tocEntries.map((e) => (e.label ? `${e.label} ${e.title}` : e.title));
786
+ let currentDoc = [header, "## Table of Contents", cleanBody(tocOutput)].join("\n\n");
787
+ await fs.writeFile(outputPath, currentDoc, "utf8");
788
+ const referenceDocs = [
789
+ context.rfp?.content ?? "",
790
+ ...context.pdrs.map((p) => p.content ?? ""),
791
+ ...context.existingSds.map((s) => s.content ?? ""),
792
+ ]
793
+ .filter(Boolean)
794
+ .map((c) => c.slice(0, 4000))
795
+ .join("\n\n---\n\n");
796
+ for (const entry of tocEntries) {
797
+ const heading = entry.label ? `${entry.label} ${entry.title}` : entry.title;
798
+ const sectionPrompt = [
799
+ `Generate the section "${heading}" for a Software Design Specification.`,
800
+ `Project: ${projectKey ?? "(unspecified)"}`,
801
+ `Context summary: ${context.summary}`,
802
+ "Reference materials:",
803
+ referenceDocs || "(no additional docs)",
804
+ "First SDS draft (saved as first-draft):",
805
+ firstDraft.slice(0, 8000),
806
+ "Current improved document so far:",
807
+ currentDoc.slice(0, 8000),
808
+ "PDR and RFP content have been provided above as reference.",
809
+ "Table of contents:",
810
+ tocHeadings.map((h) => `- ${h}`).join("\n"),
811
+ "Requirements:",
812
+ "- Return only this section starting with the proper H2 heading.",
813
+ "- Be concrete, avoid placeholders, and align with the TOC heading text.",
814
+ ]
815
+ .filter(Boolean)
816
+ .join("\n\n");
817
+ const { output: sectionOutput } = await invoke(sectionPrompt);
818
+ const parsed = parseSectionFromAgentOutput(sectionOutput, heading) ?? cleanBody(sectionOutput);
819
+ currentDoc = `${currentDoc}\n\n## ${heading}\n${parsed}`;
820
+ await fs.writeFile(outputPath, currentDoc, "utf8");
821
+ }
822
+ return currentDoc;
823
+ };
824
+ const headingHasContent = (draft, title) => {
825
+ const regex = new RegExp(`^#{1,6}\\s+${title}\\b([\\s\\S]*?)(^#{1,6}\\s+|$)`, "im");
826
+ const match = draft.match(regex);
827
+ if (match) {
828
+ // If the heading exists, assume it has content (we already inject fallbacks elsewhere).
829
+ return true;
830
+ }
831
+ // Fallback: treat bolded headings like "**Introduction**" as valid.
832
+ const boldRegex = new RegExp(`\\*\\*${title}\\*\\*`, "i");
833
+ return boldRegex.test(draft);
834
+ };
835
+ const normalizeHeadingsToH2 = (draft, titles) => {
836
+ let updated = draft;
837
+ for (const title of titles) {
838
+ const bold = new RegExp(`^\\s*\\*\\*${title}\\*\\*\\s*$`, "im");
839
+ updated = updated.replace(bold, `## ${title}`);
840
+ }
841
+ return updated;
842
+ };
843
+ const PLACEHOLDER_PATTERNS = [
844
+ /^[-*+.]?\s*Describe the system architecture/i,
845
+ /^[-*+.]?\s*List key interfaces/i,
846
+ /^[-*+.]?\s*Outline the functional boundaries/i,
847
+ /^[-*+.]?\s*Outstanding questions/i,
848
+ /^[-*+.]?\s*Performance, reliability, compliance/i,
849
+ /^[-*+.]?\s*Enumerate risks from the RFP/i,
850
+ ];
851
+ const cleanBody = (body) => {
852
+ const requiredTitles = PDR_REQUIRED_HEADINGS.flat().map((t) => t.toLowerCase());
853
+ const normalizeLine = (line) => line
854
+ .toLowerCase()
855
+ .replace(/[`*_]/g, "")
856
+ .replace(/[^a-z0-9]+/g, " ")
857
+ .trim();
858
+ const lines = body
859
+ .split(/\r?\n/)
860
+ .map((l) => l.trim())
861
+ .filter((l) => {
862
+ if (!l)
863
+ return false;
864
+ if (/^#{1,6}\s+/.test(l))
865
+ return false; // strip stray headings
866
+ if (/^\*{2}.+?\*{2}$/.test(l) && requiredTitles.includes(l.replace(/\*/g, "").toLowerCase()))
867
+ return false;
868
+ if (PLACEHOLDER_PATTERNS.some((p) => p.test(l)))
869
+ return false;
870
+ if (requiredTitles.includes(l.toLowerCase()))
871
+ return false; // drop stray title text
872
+ return true;
873
+ });
874
+ const deduped = [];
875
+ const seen = [];
876
+ for (const line of lines) {
877
+ if (!line)
878
+ continue;
879
+ const key = normalizeLine(line);
880
+ if (!key)
881
+ continue;
882
+ const isDuplicate = seen.some((prev) => prev === key || prev.includes(key) || key.includes(prev));
883
+ if (isDuplicate)
884
+ continue;
885
+ seen.push(key);
886
+ deduped.push(line);
887
+ }
888
+ return deduped.join("\n").trim();
889
+ };
890
+ const extractSection = (draft, title) => {
891
+ const regex = new RegExp(`(^#{1,6}\\s+${title}\\b)([\\s\\S]*?)(?=^#{1,6}\\s+|(?![\\s\\S]))`, "im");
892
+ const match = draft.match(regex);
893
+ if (!match)
894
+ return undefined;
895
+ return { heading: match[1], body: (match[2] ?? "").trim() };
896
+ };
897
+ const getBestSectionBody = (draft, title) => {
898
+ const variants = [];
899
+ const headingPattern = `(^#{1,6}\\s+${title}\\b[^\\n]*$|^\\*\\*${title}\\*\\*\\s*$)`;
900
+ const regex = new RegExp(`${headingPattern}([\\s\\S]*?)(?=^#{1,6}\\s+|^\\*\\*[^\\n]+\\*\\*\\s*$|(?![\\s\\S]))`, "gim");
901
+ let match;
902
+ while ((match = regex.exec(draft)) !== null) {
903
+ variants.push(match[2] ?? "");
904
+ }
905
+ const cleaned = variants
906
+ .map((body) => cleanBody(body))
907
+ .filter((body) => body.trim().length > 0)
908
+ .sort((a, b) => b.length - a.length);
909
+ return cleaned[0];
910
+ };
911
+ const replaceSection = (draft, title, newBody) => {
912
+ const normalizedBody = cleanBody(newBody);
913
+ const regex = new RegExp(`(^#{1,6}\\s+${title}\\b)([\\s\\S]*?)(?=^#{1,6}\\s+|$)`, "im");
914
+ if (regex.test(draft)) {
915
+ return draft.replace(regex, `$1\n\n${normalizedBody}\n\n`);
916
+ }
917
+ return `${draft.trimEnd()}\n\n## ${title}\n${normalizedBody}\n`;
918
+ };
919
+ const parseSectionFromAgentOutput = (output, title) => {
920
+ // Prefer content under a heading matching the title.
921
+ const extracted = extractSection(output, title);
922
+ if (extracted && extracted.body.length > 0)
923
+ return cleanBody(extracted.body);
924
+ // Fallback: if the agent returned without heading, use text before any other heading.
925
+ const preHeading = output.split(/^#{1,6}\s+/m)[0]?.trim();
926
+ if (preHeading && preHeading.length > 0)
927
+ return cleanBody(preHeading);
928
+ const trimmed = output.trim();
929
+ return trimmed.length > 0 ? cleanBody(trimmed) : undefined;
930
+ };
931
+ const enrichPdrDraft = async (draft, agent, context, projectKey, invoke) => {
932
+ let enriched = draft;
933
+ for (const section of PDR_ENRICHMENT_SECTIONS) {
934
+ const current = extractSection(enriched, section.title);
935
+ const currentBody = current?.body ?? "";
936
+ const guidance = section.guidance.join("\n- ");
937
+ const prompt = [
938
+ `You are enriching a Product Design Review section for project ${projectKey ?? "(unspecified)"} using only provided context.`,
939
+ `Context summary: ${context.summary}`,
940
+ `RFP cues:\n${context.bullets.map((b) => `- ${b}`).join("\n") || "- (none)"}`,
941
+ `Current section "${section.title}":\n${currentBody || "(empty)"}`,
942
+ "Enrich this section with concrete, actionable content. Keep it concise (bullets ok).",
943
+ "Do NOT remove the heading. Return only the updated section, starting with the heading. Do not include any other sections.",
944
+ `Guidance:\n- ${guidance}`,
945
+ ]
946
+ .filter(Boolean)
947
+ .join("\n\n");
948
+ const { output } = await invoke(prompt);
949
+ const replacement = parseSectionFromAgentOutput(output, section.title);
950
+ if (replacement && replacement.trim().length > 0) {
951
+ enriched = replaceSection(enriched, section.title, replacement);
952
+ }
953
+ }
954
+ return enriched;
955
+ };
956
+ const ensureHeadingContent = (draft, title, fallback) => {
957
+ const regex = new RegExp(`(^#{1,6}\\s+${title}\\b)([\\s\\S]*?)(^#{1,6}\\s+|$)`, "im");
958
+ const match = draft.match(regex);
959
+ if (!match) {
960
+ return `${draft.trimEnd()}\n\n## ${title}\n${fallback}\n`;
961
+ }
962
+ const body = match[2].trim();
963
+ if (body.length > 0)
964
+ return draft;
965
+ return draft.replace(regex, `${match[1]}\n\n${fallback}\n\n${match[3] ?? ""}`);
966
+ };
967
+ const readGitBranch = async (workspaceRoot) => {
968
+ const headPath = path.join(workspaceRoot, ".git", "HEAD");
969
+ try {
970
+ const content = await fs.readFile(headPath, "utf8");
971
+ const match = content.match(/ref: refs\/heads\/(.+)/);
972
+ return match ? match[1].trim() : content.trim();
973
+ }
974
+ catch {
975
+ return undefined;
976
+ }
977
+ };
978
+ export class DocsService {
979
+ constructor(workspace, deps) {
980
+ this.workspace = workspace;
981
+ this.docdex = deps?.docdex ?? new DocdexClient({ workspaceRoot: workspace.workspaceRoot });
982
+ this.jobService = deps?.jobService ?? new JobService(workspace, undefined, { noTelemetry: deps?.noTelemetry });
983
+ this.repo = deps.repo;
984
+ this.agentService = deps.agentService;
985
+ this.routingService = deps.routingService;
986
+ }
987
+ static async create(workspace, options = {}) {
988
+ const repo = await GlobalRepository.create();
989
+ const agentService = new AgentService(repo);
990
+ const routingService = await RoutingService.create();
991
+ const docdex = new DocdexClient({
992
+ workspaceRoot: workspace.workspaceRoot,
993
+ baseUrl: workspace.config?.docdexUrl ?? process.env.MCODA_DOCDEX_URL,
994
+ });
995
+ const jobService = new JobService(workspace, undefined, { noTelemetry: options.noTelemetry });
996
+ return new DocsService(workspace, { repo, agentService, routingService, docdex, jobService, noTelemetry: options.noTelemetry });
997
+ }
998
+ async close() {
999
+ if (this.agentService.close) {
1000
+ await this.agentService.close();
1001
+ }
1002
+ if (this.repo.close) {
1003
+ await this.repo.close();
1004
+ }
1005
+ if (this.jobService.close) {
1006
+ await this.jobService.close();
1007
+ }
1008
+ }
1009
+ defaultPdrOutputPath(projectKey, rfpPath) {
1010
+ const slug = slugify(projectKey ?? (rfpPath ? path.basename(rfpPath, path.extname(rfpPath)) : "pdr"));
1011
+ return path.join(this.workspace.mcodaDir, "docs", "pdr", `${slug}.md`);
1012
+ }
1013
+ defaultSdsOutputPath(projectKey) {
1014
+ const slug = slugify(projectKey ?? "sds");
1015
+ return path.join(this.workspace.mcodaDir, "docs", "sds", `${slug}.md`);
1016
+ }
1017
+ async loadSdsTemplate(templateName) {
1018
+ const names = templateName
1019
+ ? [templateName.replace(/\.md$/i, "")]
1020
+ : ["SDS_default", "sds", "SDS"];
1021
+ const dirs = [
1022
+ path.join(this.workspace.mcodaDir, "docs", "templates"),
1023
+ path.join(this.workspace.workspaceRoot, "docs", "templates"),
1024
+ ];
1025
+ for (const dir of dirs) {
1026
+ for (const name of names) {
1027
+ const candidate = path.join(dir, `${name}.md`);
1028
+ try {
1029
+ const content = await fs.readFile(candidate, "utf8");
1030
+ return { name, content };
1031
+ }
1032
+ catch {
1033
+ // try next
1034
+ }
1035
+ }
1036
+ }
1037
+ return { name: templateName ?? "default", content: DEFAULT_SDS_TEMPLATE };
1038
+ }
1039
+ async resolveAgent(agentName, commandAliases = ["docs-pdr-generate", "docs:pdr:generate", "pdr"]) {
1040
+ const commandName = commandAliases[commandAliases.length - 1] ?? "pdr";
1041
+ const resolved = await this.routingService.resolveAgentForCommand({
1042
+ workspace: this.workspace,
1043
+ commandName,
1044
+ overrideAgentSlug: agentName,
1045
+ });
1046
+ return resolved.agent;
1047
+ }
1048
+ async writePdrFile(outPath, content) {
1049
+ await ensureDir(outPath);
1050
+ await fs.writeFile(outPath, content, "utf8");
1051
+ }
1052
+ async registerPdr(outPath, content, projectKey) {
1053
+ const branch = this.workspace.config?.branch ?? (await readGitBranch(this.workspace.workspaceRoot));
1054
+ return this.docdex.registerDocument({
1055
+ docType: "PDR",
1056
+ path: outPath,
1057
+ content,
1058
+ metadata: {
1059
+ workspace: this.workspace.workspaceId,
1060
+ projectKey,
1061
+ branch,
1062
+ status: "draft",
1063
+ },
1064
+ });
1065
+ }
1066
+ async writeSdsFile(outPath, content) {
1067
+ await ensureDir(outPath);
1068
+ await fs.writeFile(outPath, content, "utf8");
1069
+ }
1070
+ async assertSdsDocdexProfile() {
1071
+ const base = this.workspace.config?.docdexUrl ?? process.env.MCODA_DOCDEX_URL;
1072
+ if (base)
1073
+ return;
1074
+ const localStore = path.join(this.workspace.workspaceRoot, ".mcoda", "docdex", "documents.json");
1075
+ try {
1076
+ await fs.access(localStore);
1077
+ }
1078
+ catch {
1079
+ throw new Error("Docdex is not configured for SDS retrieval (missing docdexUrl and no local store). Configure docdexUrl or index docs with an sds_default profile.");
1080
+ }
1081
+ }
1082
+ async registerSds(outPath, content, projectKey) {
1083
+ const branch = this.workspace.config?.branch ?? (await readGitBranch(this.workspace.workspaceRoot));
1084
+ return this.docdex.registerDocument({
1085
+ docType: "SDS",
1086
+ path: outPath,
1087
+ content,
1088
+ metadata: {
1089
+ workspace: this.workspace.workspaceId,
1090
+ projectKey,
1091
+ branch,
1092
+ status: "draft",
1093
+ },
1094
+ });
1095
+ }
1096
+ async invokeAgent(agent, prompt, stream, jobId, onToken) {
1097
+ if (stream) {
1098
+ try {
1099
+ const generator = await this.agentService.invokeStream(agent.id, { input: prompt, metadata: { jobId } });
1100
+ const collected = [];
1101
+ for await (const chunk of generator) {
1102
+ collected.push(chunk.output);
1103
+ await this.jobService.appendLog(jobId, chunk.output);
1104
+ if (onToken)
1105
+ onToken(chunk.output);
1106
+ }
1107
+ return { output: collected.join(""), adapter: agent.adapter };
1108
+ }
1109
+ catch {
1110
+ // Fall back to non-streaming invocation if streaming is not supported.
1111
+ const fallback = await this.agentService.invoke(agent.id, { input: prompt, metadata: { jobId } });
1112
+ await this.jobService.appendLog(jobId, fallback.output);
1113
+ if (onToken)
1114
+ onToken(fallback.output);
1115
+ return { output: fallback.output, adapter: fallback.adapter, metadata: fallback.metadata };
1116
+ }
1117
+ }
1118
+ const result = await this.agentService.invoke(agent.id, { input: prompt, metadata: { jobId } });
1119
+ await this.jobService.appendLog(jobId, result.output);
1120
+ if (onToken)
1121
+ onToken(result.output);
1122
+ return { output: result.output, adapter: result.adapter, metadata: result.metadata };
1123
+ }
1124
+ async generatePdr(options) {
1125
+ if (!options.rfpId && !options.rfpPath) {
1126
+ throw new Error("Either --rfp-id or --rfp-path must be provided.");
1127
+ }
1128
+ if (options.rfpPath) {
1129
+ const resolved = path.isAbsolute(options.rfpPath)
1130
+ ? options.rfpPath
1131
+ : path.join(this.workspace.workspaceRoot, options.rfpPath);
1132
+ try {
1133
+ await fs.access(resolved);
1134
+ }
1135
+ catch {
1136
+ throw new Error(`RFP path does not exist: ${resolved}`);
1137
+ }
1138
+ }
1139
+ const commandRun = await this.jobService.startCommandRun("docs-pdr-generate", options.projectKey);
1140
+ const job = await this.jobService.startJob("pdr_generate", commandRun.id, options.projectKey, {
1141
+ commandName: commandRun.commandName,
1142
+ payload: {
1143
+ projectKey: options.projectKey,
1144
+ rfpId: options.rfpId,
1145
+ rfpPath: options.rfpPath,
1146
+ },
1147
+ });
1148
+ const assembler = new DocContextAssembler(this.docdex, this.workspace);
1149
+ try {
1150
+ const context = await assembler.buildContext({
1151
+ rfpId: options.rfpId,
1152
+ rfpPath: options.rfpPath,
1153
+ projectKey: options.projectKey,
1154
+ });
1155
+ const checkpoint = {
1156
+ stage: "context_built",
1157
+ timestamp: new Date().toISOString(),
1158
+ details: { rfp: context.rfp.path ?? context.rfp.id, docdexAvailable: context.docdexAvailable },
1159
+ };
1160
+ await this.jobService.writeCheckpoint(job.id, checkpoint);
1161
+ await this.jobService.recordTokenUsage({
1162
+ timestamp: new Date().toISOString(),
1163
+ workspaceId: this.workspace.workspaceId,
1164
+ commandName: "docs-pdr-generate",
1165
+ jobId: job.id,
1166
+ action: "docdex_context",
1167
+ promptTokens: 0,
1168
+ completionTokens: 0,
1169
+ metadata: { docdexAvailable: context.docdexAvailable },
1170
+ });
1171
+ const agent = await this.resolveAgent(options.agentName);
1172
+ const prompts = await this.agentService.getPrompts(agent.id);
1173
+ const runbook = (await readPromptIfExists(this.workspace, path.join("prompts", "commands", "pdr-generate.md"))) ||
1174
+ DEFAULT_PDR_RUNBOOK_PROMPT;
1175
+ let draft = "";
1176
+ let agentMetadata;
1177
+ let adapter = agent.adapter;
1178
+ const stream = options.agentStream ?? true;
1179
+ const skipValidation = process.env.MCODA_SKIP_PDR_VALIDATION === "1";
1180
+ let lastInvoke;
1181
+ for (let attempt = 0; attempt < 2; attempt += 1) {
1182
+ const prompt = buildRunPrompt(context, options.projectKey, prompts, attempt === 0 ? runbook : `${runbook}\n\nRETRY: The previous attempt failed validation. Ensure all required sections are present and non-empty. Do not leave placeholders.`);
1183
+ const invoke = async (input) => {
1184
+ const { output: out, adapter: usedAdapter, metadata } = await this.invokeAgent(agent, input, stream, job.id, options.onToken);
1185
+ adapter = usedAdapter;
1186
+ agentMetadata = metadata;
1187
+ return { output: out, adapter: usedAdapter, metadata };
1188
+ };
1189
+ lastInvoke = invoke;
1190
+ const { output: agentOutput } = await invoke(prompt);
1191
+ const structured = ensureStructuredDraft(agentOutput, options.projectKey, context, context.rfp.path ?? context.rfp.id ?? "RFP");
1192
+ const valid = skipValidation || (validateDraft(structured) && headingHasContent(structured, "Introduction"));
1193
+ await this.jobService.recordTokenUsage({
1194
+ timestamp: new Date().toISOString(),
1195
+ workspaceId: this.workspace.workspaceId,
1196
+ commandName: "docs-pdr-generate",
1197
+ jobId: job.id,
1198
+ agentId: agent.id,
1199
+ modelName: agent.defaultModel,
1200
+ action: attempt === 0 ? "draft_pdr" : "draft_pdr_retry",
1201
+ promptTokens: estimateTokens(prompt),
1202
+ completionTokens: estimateTokens(agentOutput),
1203
+ metadata: { adapter, docdexAvailable: context.docdexAvailable, attempt },
1204
+ });
1205
+ if (valid) {
1206
+ draft = structured;
1207
+ break;
1208
+ }
1209
+ if (valid)
1210
+ break;
1211
+ const missing = missingPdrHeadings(draft);
1212
+ // eslint-disable-next-line no-console
1213
+ console.error(`[pdr validation] missing sections: ${missing.join(", ") || "none"}; introHasContent=${headingHasContent(draft, "Introduction")}; length=${draft.length}; attempt=${attempt + 1}`);
1214
+ if (attempt === 1) {
1215
+ throw new Error("PDR draft validation failed after retry (missing required sections or empty output).");
1216
+ }
1217
+ }
1218
+ if (!draft) {
1219
+ throw new Error("PDR draft generation failed; no valid draft produced.");
1220
+ }
1221
+ if (lastInvoke) {
1222
+ draft = await enrichPdrDraft(draft, agent, context, options.projectKey, lastInvoke);
1223
+ draft = ensureStructuredDraft(draft, options.projectKey, context, context.rfp.path ?? context.rfp.id ?? "RFP");
1224
+ }
1225
+ if (lastInvoke) {
1226
+ try {
1227
+ const tidiedRaw = await tidyPdrDraft(draft, agent, lastInvoke);
1228
+ const tidied = ensureStructuredDraft(tidiedRaw, options.projectKey, context, context.rfp.path ?? context.rfp.id ?? "RFP");
1229
+ const tidiedValid = validateDraft(tidied) && headingHasContent(tidied, "Introduction");
1230
+ const keepTidied = tidiedValid && tidied.length >= draft.length * 0.6;
1231
+ if (keepTidied) {
1232
+ draft = tidied;
1233
+ }
1234
+ }
1235
+ catch (error) {
1236
+ context.warnings.push(`Tidy pass skipped: ${error.message ?? "unknown error"}`);
1237
+ }
1238
+ }
1239
+ const outputPath = options.outPath ?? this.defaultPdrOutputPath(options.projectKey, context.rfp.path);
1240
+ if (!options.dryRun) {
1241
+ const firstDraftPath = path.join(this.workspace.mcodaDir, "docs", "pdr", `${path.basename(outputPath, path.extname(outputPath))}-first-draft.md`);
1242
+ await ensureDir(firstDraftPath);
1243
+ await fs.writeFile(firstDraftPath, draft, "utf8");
1244
+ try {
1245
+ const iterativeDraft = await buildIterativePdr(options.projectKey, context, draft, outputPath, lastInvoke ?? (async (input) => this.invokeAgent(agent, input, stream, job.id, options.onToken)));
1246
+ draft = iterativeDraft;
1247
+ }
1248
+ catch (error) {
1249
+ context.warnings.push(`Iterative PDR refinement failed; keeping first draft. ${String(error)}`);
1250
+ }
1251
+ }
1252
+ await this.jobService.writeCheckpoint(job.id, {
1253
+ stage: "draft_completed",
1254
+ timestamp: new Date().toISOString(),
1255
+ details: { length: draft.length },
1256
+ });
1257
+ let docdexId;
1258
+ let segments;
1259
+ let mirrorStatus = "skipped";
1260
+ if (options.dryRun) {
1261
+ context.warnings.push("Dry run enabled; PDR was not written to disk or registered in docdex.");
1262
+ }
1263
+ if (!options.dryRun) {
1264
+ await this.writePdrFile(outputPath, draft);
1265
+ if (context.docdexAvailable) {
1266
+ const registered = await this.registerPdr(outputPath, draft, options.projectKey);
1267
+ docdexId = registered.id;
1268
+ segments = (registered.segments ?? []).map((s) => s.id);
1269
+ await fs.writeFile(`${outputPath}.meta.json`, JSON.stringify({ docdexId, segments, projectKey: options.projectKey }, null, 2), "utf8");
1270
+ }
1271
+ const publicDocsDir = path.join(this.workspace.workspaceRoot, "docs", "pdr");
1272
+ const shouldMirror = this.workspace.config?.mirrorDocs !== false;
1273
+ if (shouldMirror) {
1274
+ try {
1275
+ await ensureDir(path.join(publicDocsDir, "placeholder"));
1276
+ const mirrorPath = path.join(publicDocsDir, path.basename(outputPath));
1277
+ await ensureDir(mirrorPath);
1278
+ await fs.writeFile(mirrorPath, draft, "utf8");
1279
+ mirrorStatus = "mirrored";
1280
+ }
1281
+ catch {
1282
+ // optional mirror skipped
1283
+ mirrorStatus = "failed";
1284
+ }
1285
+ }
1286
+ }
1287
+ await this.jobService.updateJobStatus(job.id, "completed", {
1288
+ payload: { outputPath, docdexId, segments, mirrorStatus },
1289
+ });
1290
+ await this.jobService.finishCommandRun(commandRun.id, "succeeded");
1291
+ return {
1292
+ jobId: job.id,
1293
+ commandRunId: commandRun.id,
1294
+ outputPath,
1295
+ draft,
1296
+ docdexId,
1297
+ warnings: context.warnings,
1298
+ };
1299
+ }
1300
+ catch (error) {
1301
+ await this.jobService.updateJobStatus(job.id, "failed", { errorSummary: error.message });
1302
+ await this.jobService.finishCommandRun(commandRun.id, "failed", error.message);
1303
+ throw error;
1304
+ }
1305
+ }
1306
+ async tryResumeSds(resumeJobId, warnings) {
1307
+ const manifestPath = path.join(this.workspace.mcodaDir, "jobs", resumeJobId, "manifest.json");
1308
+ try {
1309
+ const raw = await fs.readFile(manifestPath, "utf8");
1310
+ const manifest = JSON.parse(raw);
1311
+ if (manifest.type && manifest.type !== "sds_generate") {
1312
+ throw new Error(`Job ${resumeJobId} is type ${manifest.type}, not sds_generate. Use a matching job id or rerun without --resume.`);
1313
+ }
1314
+ if (manifest.status === "running") {
1315
+ throw new Error(`Job ${resumeJobId} is still running; use "mcoda job watch --id ${resumeJobId}" to monitor.`);
1316
+ }
1317
+ const checkpoints = await this.jobService.readCheckpoints(resumeJobId);
1318
+ const lastCkpt = checkpoints[checkpoints.length - 1];
1319
+ const draftPath = lastCkpt?.details?.draftPath ??
1320
+ path.join(this.workspace.mcodaDir, "jobs", resumeJobId, "draft.md");
1321
+ let draft;
1322
+ try {
1323
+ draft = await fs.readFile(draftPath, "utf8");
1324
+ }
1325
+ catch {
1326
+ // missing draft is allowed; will re-run agent
1327
+ }
1328
+ if (manifest.status === "succeeded") {
1329
+ const outputPath = manifest.metadata?.outputPath ?? manifest.metadata?.output_path;
1330
+ warnings.push(`Resume requested; returning completed SDS from job ${resumeJobId}.`);
1331
+ return {
1332
+ job: manifest,
1333
+ completed: true,
1334
+ outputPath,
1335
+ draft,
1336
+ docdexId: manifest.metadata?.docdexId ?? manifest.metadata?.docdex_id,
1337
+ commandRunId: manifest.commandRunId ?? manifest.command_run_id,
1338
+ };
1339
+ }
1340
+ const resumeFromDraft = draft && lastCkpt?.stage === "draft_completed";
1341
+ if (resumeFromDraft) {
1342
+ warnings.push(`Resuming SDS generation from saved draft for job ${resumeJobId}.`);
1343
+ }
1344
+ else {
1345
+ warnings.push(`Resume requested for ${resumeJobId}; restarting agent draft generation.`);
1346
+ }
1347
+ return {
1348
+ job: manifest,
1349
+ completed: false,
1350
+ outputPath: manifest.metadata?.outputPath ?? manifest.metadata?.output_path,
1351
+ draft,
1352
+ docdexId: manifest.metadata?.docdexId ?? manifest.metadata?.docdex_id,
1353
+ };
1354
+ }
1355
+ catch (error) {
1356
+ if (error.code === "ENOENT") {
1357
+ warnings.push(`No resume data found for job ${resumeJobId}; starting a new SDS job.`);
1358
+ return undefined;
1359
+ }
1360
+ if (error instanceof Error)
1361
+ throw error;
1362
+ throw new Error(String(error));
1363
+ }
1364
+ }
1365
+ async generateSds(options) {
1366
+ await this.assertSdsDocdexProfile();
1367
+ const assembler = new DocContextAssembler(this.docdex, this.workspace);
1368
+ const warnings = [];
1369
+ const commandRun = await this.jobService.startCommandRun("docs-sds-generate", options.projectKey);
1370
+ let job;
1371
+ let resumeDraft;
1372
+ let resumeDocdexId;
1373
+ if (options.resumeJobId) {
1374
+ const resumed = await this.tryResumeSds(options.resumeJobId, warnings);
1375
+ if (resumed) {
1376
+ job = resumed.job;
1377
+ await this.jobService.updateJobStatus(job.id, "running", { resumedBy: commandRun.id });
1378
+ await this.jobService.writeCheckpoint(job.id, {
1379
+ stage: "resume_started",
1380
+ timestamp: new Date().toISOString(),
1381
+ details: { resumedBy: commandRun.id },
1382
+ });
1383
+ if (resumed.completed) {
1384
+ await this.jobService.finishCommandRun(commandRun.id, "succeeded");
1385
+ return {
1386
+ jobId: job.id,
1387
+ commandRunId: commandRun.id,
1388
+ outputPath: resumed.outputPath,
1389
+ draft: resumed.draft ?? "",
1390
+ docdexId: resumed.docdexId,
1391
+ warnings,
1392
+ };
1393
+ }
1394
+ resumeDraft = resumed.draft;
1395
+ resumeDocdexId = resumed.docdexId;
1396
+ }
1397
+ }
1398
+ if (!job) {
1399
+ job = await this.jobService.startJob("sds_generate", commandRun.id, options.projectKey, {
1400
+ commandName: commandRun.commandName,
1401
+ payload: {
1402
+ projectKey: options.projectKey,
1403
+ templateName: options.templateName,
1404
+ resumeJobId: options.resumeJobId,
1405
+ },
1406
+ });
1407
+ }
1408
+ try {
1409
+ const context = await assembler.buildSdsContext({ projectKey: options.projectKey });
1410
+ warnings.push(...context.warnings);
1411
+ await this.jobService.writeCheckpoint(job.id, {
1412
+ stage: "context_built",
1413
+ timestamp: new Date().toISOString(),
1414
+ details: {
1415
+ docdexAvailable: context.docdexAvailable,
1416
+ rfp: context.rfp?.path ?? context.rfp?.id,
1417
+ pdrCount: context.pdrs.length,
1418
+ existingSds: context.existingSds.length,
1419
+ },
1420
+ });
1421
+ await this.jobService.recordTokenUsage({
1422
+ timestamp: new Date().toISOString(),
1423
+ workspaceId: this.workspace.workspaceId,
1424
+ commandName: "docs-sds-generate",
1425
+ jobId: job.id,
1426
+ action: "docdex_context",
1427
+ promptTokens: 0,
1428
+ completionTokens: 0,
1429
+ metadata: { docdexAvailable: context.docdexAvailable },
1430
+ });
1431
+ const outputPath = (options.outPath ??
1432
+ job.metadata?.outputPath ??
1433
+ this.defaultSdsOutputPath(options.projectKey));
1434
+ const draftPath = path.join(this.workspace.mcodaDir, "jobs", job.id, "draft.md");
1435
+ const allowExisting = Boolean(options.resumeJobId);
1436
+ if (!options.force && !allowExisting) {
1437
+ try {
1438
+ await fs.access(outputPath);
1439
+ throw new Error(`Output already exists: ${outputPath}. Re-run with --force to overwrite or specify --out for a different path.`);
1440
+ }
1441
+ catch (error) {
1442
+ if (error.code !== "ENOENT") {
1443
+ throw error;
1444
+ }
1445
+ }
1446
+ }
1447
+ const agent = await this.resolveAgent(options.agentName, ["docs-sds-generate", "docs:sds:generate", "sds"]);
1448
+ const prompts = await this.agentService.getPrompts(agent.id);
1449
+ const template = await this.loadSdsTemplate(options.templateName);
1450
+ const sdsSections = getSdsSections(template.content);
1451
+ const runbook = (await readPromptIfExists(this.workspace, path.join("prompts", "commands", "sds-generate.md"))) ||
1452
+ (await readPromptIfExists(this.workspace, path.join("prompts", "sds", "generate.md"))) ||
1453
+ DEFAULT_SDS_RUNBOOK_PROMPT;
1454
+ let draft = resumeDraft ?? "";
1455
+ let agentMetadata;
1456
+ let adapter = agent.adapter;
1457
+ const stream = options.agentStream ?? true;
1458
+ const skipValidation = process.env.MCODA_SKIP_SDS_VALIDATION === "1";
1459
+ const invoke = async (input) => {
1460
+ const { output: out, adapter: usedAdapter, metadata } = await this.invokeAgent(agent, input, stream, job.id, options.onToken);
1461
+ adapter = usedAdapter;
1462
+ if (metadata)
1463
+ agentMetadata = metadata;
1464
+ return { output: out, adapter: usedAdapter, metadata };
1465
+ };
1466
+ if (!resumeDraft) {
1467
+ for (let attempt = 0; attempt < 2; attempt += 1) {
1468
+ const prompt = buildSdsRunPrompt(context, options.projectKey, prompts, attempt === 0
1469
+ ? runbook
1470
+ : `${runbook}\n\nRETRY: The previous attempt failed validation. Ensure all required sections are present and non-empty.`, template.content);
1471
+ const { output: agentOutput, adapter: usedAdapter } = await invoke(prompt);
1472
+ draft = ensureSdsStructuredDraft(agentOutput, options.projectKey, context, template.content);
1473
+ const valid = skipValidation || (validateSdsDraft(draft) && headingHasContent(draft, "Architecture"));
1474
+ await this.jobService.recordTokenUsage({
1475
+ timestamp: new Date().toISOString(),
1476
+ workspaceId: this.workspace.workspaceId,
1477
+ commandName: "docs-sds-generate",
1478
+ jobId: job.id,
1479
+ agentId: agent.id,
1480
+ modelName: agent.defaultModel,
1481
+ action: attempt === 0 ? "draft_sds" : "draft_sds_retry",
1482
+ promptTokens: estimateTokens(prompt),
1483
+ completionTokens: estimateTokens(agentOutput),
1484
+ metadata: {
1485
+ adapter,
1486
+ provider: adapter,
1487
+ docdexAvailable: context.docdexAvailable,
1488
+ template: template.name,
1489
+ attempt,
1490
+ },
1491
+ });
1492
+ if (valid)
1493
+ break;
1494
+ if (attempt === 1) {
1495
+ throw new Error("SDS draft validation failed after retry (missing required sections or empty output).");
1496
+ }
1497
+ }
1498
+ }
1499
+ else {
1500
+ const valid = skipValidation || (validateSdsDraft(draft) && headingHasContent(draft, "Architecture"));
1501
+ if (!valid) {
1502
+ warnings.push("Saved draft failed validation on resume; regenerating.");
1503
+ draft = "";
1504
+ }
1505
+ else {
1506
+ await this.jobService.recordTokenUsage({
1507
+ timestamp: new Date().toISOString(),
1508
+ workspaceId: this.workspace.workspaceId,
1509
+ commandName: "docs-sds-generate",
1510
+ jobId: job.id,
1511
+ action: "draft_sds_resume",
1512
+ promptTokens: 0,
1513
+ completionTokens: estimateTokens(draft),
1514
+ metadata: {
1515
+ adapter,
1516
+ provider: adapter,
1517
+ docdexAvailable: context.docdexAvailable,
1518
+ template: template.name,
1519
+ resumeFromJob: options.resumeJobId,
1520
+ },
1521
+ });
1522
+ }
1523
+ }
1524
+ if (!draft) {
1525
+ // regenerated draft in case resume draft was invalid
1526
+ const prompt = buildSdsRunPrompt(context, options.projectKey, prompts, runbook, template.content);
1527
+ const { output: agentOutput, adapter: usedAdapter } = await invoke(prompt);
1528
+ draft = ensureSdsStructuredDraft(agentOutput, options.projectKey, context, template.content);
1529
+ await this.jobService.recordTokenUsage({
1530
+ timestamp: new Date().toISOString(),
1531
+ workspaceId: this.workspace.workspaceId,
1532
+ commandName: "docs-sds-generate",
1533
+ jobId: job.id,
1534
+ agentId: agent.id,
1535
+ modelName: agent.defaultModel,
1536
+ action: "draft_sds_resume_regenerate",
1537
+ promptTokens: estimateTokens(prompt),
1538
+ completionTokens: estimateTokens(agentOutput),
1539
+ metadata: { adapter, provider: adapter, docdexAvailable: context.docdexAvailable, template: template.name },
1540
+ });
1541
+ }
1542
+ // Enrich each section sequentially after a valid base draft exists.
1543
+ draft = await enrichSdsDraft(draft, sdsSections, agent, context, options.projectKey, invoke);
1544
+ draft = ensureSdsStructuredDraft(draft, options.projectKey, context, template.content);
1545
+ if (!skipValidation && !(validateSdsDraft(draft) && headingHasContent(draft, "Architecture"))) {
1546
+ warnings.push("Enriched SDS draft failed validation; using structured fallback.");
1547
+ draft = ensureSdsStructuredDraft(draft, options.projectKey, context, template.content);
1548
+ }
1549
+ try {
1550
+ const tidiedRaw = await tidySdsDraft(draft, sdsSections, agent, invoke);
1551
+ const tidied = ensureSdsStructuredDraft(tidiedRaw, options.projectKey, context, template.content);
1552
+ if (skipValidation || (validateSdsDraft(tidied) && headingHasContent(tidied, "Architecture"))) {
1553
+ draft = tidied;
1554
+ }
1555
+ }
1556
+ catch (error) {
1557
+ warnings.push(`SDS tidy pass skipped: ${error.message ?? "unknown error"}`);
1558
+ }
1559
+ await fs.mkdir(path.dirname(draftPath), { recursive: true });
1560
+ await fs.writeFile(draftPath, draft, "utf8");
1561
+ const firstDraftPath = path.join(this.workspace.mcodaDir, "docs", "sds", `${path.basename(outputPath, path.extname(outputPath))}-first-draft.md`);
1562
+ await ensureDir(firstDraftPath);
1563
+ await fs.writeFile(firstDraftPath, draft, "utf8");
1564
+ try {
1565
+ const iterativeDraft = await buildIterativeSds(options.projectKey, context, draft, sdsSections, outputPath, invoke);
1566
+ draft = iterativeDraft;
1567
+ }
1568
+ catch (error) {
1569
+ warnings.push(`Iterative SDS refinement failed; keeping first draft. ${String(error)}`);
1570
+ }
1571
+ await this.jobService.writeCheckpoint(job.id, {
1572
+ stage: "draft_completed",
1573
+ timestamp: new Date().toISOString(),
1574
+ details: { length: draft.length, template: template.name, draftPath },
1575
+ });
1576
+ let docdexId;
1577
+ let segments;
1578
+ let mirrorStatus = "skipped";
1579
+ if (options.dryRun) {
1580
+ warnings.push("Dry run enabled; SDS was not written to disk or registered in docdex.");
1581
+ }
1582
+ if (!options.dryRun) {
1583
+ await this.writeSdsFile(outputPath, draft);
1584
+ if (context.docdexAvailable) {
1585
+ const registered = await this.registerSds(outputPath, draft, options.projectKey);
1586
+ docdexId = registered.id;
1587
+ segments = (registered.segments ?? []).map((s) => s.id);
1588
+ await fs.writeFile(`${outputPath}.meta.json`, JSON.stringify({ docdexId, segments, projectKey: options.projectKey }, null, 2), "utf8");
1589
+ }
1590
+ const publicDocsDir = path.join(this.workspace.workspaceRoot, "docs", "sds");
1591
+ const shouldMirror = this.workspace.config?.mirrorDocs !== false;
1592
+ if (shouldMirror) {
1593
+ try {
1594
+ await ensureDir(path.join(publicDocsDir, "placeholder"));
1595
+ const mirrorPath = path.join(publicDocsDir, path.basename(outputPath));
1596
+ await ensureDir(mirrorPath);
1597
+ await fs.writeFile(mirrorPath, draft, "utf8");
1598
+ mirrorStatus = "mirrored";
1599
+ }
1600
+ catch {
1601
+ mirrorStatus = "failed";
1602
+ }
1603
+ }
1604
+ }
1605
+ await this.jobService.updateJobStatus(job.id, "completed", {
1606
+ payload: {
1607
+ outputPath,
1608
+ docdexId,
1609
+ segments,
1610
+ template: template.name,
1611
+ mirrorStatus,
1612
+ agentMetadata,
1613
+ },
1614
+ });
1615
+ await this.jobService.finishCommandRun(commandRun.id, "succeeded");
1616
+ return {
1617
+ jobId: job.id,
1618
+ commandRunId: commandRun.id,
1619
+ outputPath,
1620
+ draft,
1621
+ docdexId,
1622
+ warnings,
1623
+ };
1624
+ }
1625
+ catch (error) {
1626
+ await this.jobService.updateJobStatus(job.id, "failed", { errorSummary: error.message });
1627
+ await this.jobService.finishCommandRun(commandRun.id, "failed", error.message);
1628
+ throw error;
1629
+ }
1630
+ }
1631
+ }