@mcoda/core 0.1.24 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,908 @@
1
+ import path from "node:path";
2
+ import { promises as fs } from "node:fs";
3
+ import { WorkspaceRepository } from "@mcoda/db";
4
+ import { PathHelper } from "@mcoda/shared";
5
+ import { JobService } from "../jobs/JobService.js";
6
+ import { createEpicKeyGenerator, createStoryKeyGenerator, createTaskKeyGenerator } from "./KeyHelpers.js";
7
+ const DEFAULT_MAX_ITERATIONS = 5;
8
+ const DEFAULT_MAX_TASKS_PER_ITERATION = 24;
9
+ const DEFAULT_MIN_COVERAGE_RATIO = 0.96;
10
+ const SDS_SCAN_MAX_FILES = 120;
11
+ const SDS_HEADING_LIMIT = 200;
12
+ const SDS_FOLDER_LIMIT = 240;
13
+ const GAP_BUNDLE_SIZE = 4;
14
+ const REPORT_FILE_NAME = "task-sufficiency-report.json";
15
+ const ignoredDirs = new Set([".git", "node_modules", "dist", "build", ".mcoda", ".docdex"]);
16
+ const sdsFilenamePattern = /(sds|software[-_ ]design|system[-_ ]design|design[-_ ]spec)/i;
17
+ const sdsContentPattern = /(software design specification|system design specification|^#\s*sds\b)/im;
18
+ const nonImplementationHeadingPattern = /\b(revision history|table of contents|purpose|scope|definitions?|abbreviations?|glossary|references?|appendix|document control|authors?)\b/i;
19
+ const likelyImplementationHeadingPattern = /\b(architecture|entity|entities|service|services|module|modules|component|components|pipeline|workflow|api|endpoint|schema|model|feature|store|database|ingestion|training|inference|ui|frontend|backend|ops|observability|security|deployment|solver|integration|testing|validation|contract|index|mapping|registry|cache|queue|event|job|task|migration|controller|router|policy)\b/i;
20
+ const repoRootSegments = new Set([
21
+ "apps",
22
+ "api",
23
+ "backend",
24
+ "config",
25
+ "configs",
26
+ "db",
27
+ "deployment",
28
+ "deployments",
29
+ "docs",
30
+ "frontend",
31
+ "implementation",
32
+ "infra",
33
+ "internal",
34
+ "packages",
35
+ "scripts",
36
+ "service",
37
+ "services",
38
+ "shared",
39
+ "src",
40
+ "test",
41
+ "tests",
42
+ "ui",
43
+ "web",
44
+ ]);
45
+ const headingNoiseTokens = new Set(["and", "for", "from", "into", "the", "with"]);
46
+ const normalizeText = (value) => value
47
+ .toLowerCase()
48
+ .replace(/[`*_]/g, " ")
49
+ .replace(/[^a-z0-9/\s.-]+/g, " ")
50
+ .replace(/\s+/g, " ")
51
+ .trim();
52
+ const normalizeAnchor = (kind, value) => `${kind}:${normalizeText(value).replace(/\s+/g, " ").trim()}`;
53
+ const unique = (items) => Array.from(new Set(items.filter(Boolean)));
54
+ const stripDecorators = (value) => value
55
+ .replace(/[`*_]/g, " ")
56
+ .replace(/^[\s>:\-[\]().]+/, "")
57
+ .replace(/\s+/g, " ")
58
+ .trim();
59
+ const normalizeHeadingCandidate = (value) => {
60
+ const cleaned = stripDecorators(value).replace(/^\d+(?:\.\d+)*\s+/, "").trim();
61
+ return cleaned.length > 0 ? cleaned : stripDecorators(value);
62
+ };
63
+ const headingLooksImplementationRelevant = (heading) => {
64
+ const normalized = normalizeHeadingCandidate(heading).toLowerCase();
65
+ if (!normalized || normalized.length < 3)
66
+ return false;
67
+ if (nonImplementationHeadingPattern.test(normalized))
68
+ return false;
69
+ if (likelyImplementationHeadingPattern.test(normalized))
70
+ return true;
71
+ const sectionMatch = heading.trim().match(/^(\d+)(?:\.\d+)*(?:\s+|$)/);
72
+ if (sectionMatch) {
73
+ const major = Number.parseInt(sectionMatch[1] ?? "", 10);
74
+ if (Number.isFinite(major) && major >= 3)
75
+ return true;
76
+ }
77
+ const tokens = normalized
78
+ .split(/\s+/)
79
+ .map((token) => token.replace(/[^a-z0-9.-]+/g, ""))
80
+ .filter((token) => token.length >= 4 && !headingNoiseTokens.has(token));
81
+ return tokens.length >= 2;
82
+ };
83
+ const normalizeFolderEntry = (entry) => {
84
+ const trimmed = stripDecorators(entry)
85
+ .replace(/^\.?\//, "")
86
+ .replace(/\/+$/, "")
87
+ .replace(/\s+/g, "");
88
+ if (!trimmed.includes("/"))
89
+ return undefined;
90
+ if (trimmed.includes("...") || trimmed.includes("*"))
91
+ return undefined;
92
+ return trimmed;
93
+ };
94
+ const folderEntryLooksRepoRelevant = (entry) => {
95
+ const normalized = normalizeFolderEntry(entry);
96
+ if (!normalized)
97
+ return false;
98
+ if (normalized.startsWith("/") || /^[A-Za-z]:\//.test(normalized))
99
+ return false;
100
+ const segments = normalized.split("/").filter(Boolean);
101
+ if (segments.length < 2)
102
+ return false;
103
+ const root = segments[0].toLowerCase();
104
+ return repoRootSegments.has(root);
105
+ };
106
+ const deriveSectionDomain = (heading) => {
107
+ const normalized = normalizeHeadingCandidate(heading).toLowerCase();
108
+ const tokens = normalized
109
+ .split(/\s+/)
110
+ .map((token) => token.replace(/[^a-z0-9.-]+/g, ""))
111
+ .filter((token) => token.length >= 3 && !headingNoiseTokens.has(token));
112
+ return tokens[0] ?? "coverage";
113
+ };
114
+ const deriveFolderDomain = (entry) => {
115
+ const normalized = normalizeFolderEntry(entry)?.toLowerCase();
116
+ if (!normalized)
117
+ return "structure";
118
+ const segments = normalized.split("/").filter(Boolean);
119
+ if (segments.length === 0)
120
+ return "structure";
121
+ return segments.length === 1 ? segments[0] : `${segments[0]}-${segments[1]}`;
122
+ };
123
+ const extractMarkdownHeadings = (content, limit) => {
124
+ if (!content)
125
+ return [];
126
+ const matches = [...content.matchAll(/^\s{0,3}#{1,6}\s+(.+?)\s*$/gm)];
127
+ const headings = [];
128
+ for (const match of matches) {
129
+ const heading = match[1]?.replace(/#+$/, "").trim();
130
+ if (!heading)
131
+ continue;
132
+ headings.push(heading);
133
+ if (headings.length >= limit)
134
+ break;
135
+ }
136
+ return unique(headings).slice(0, limit);
137
+ };
138
+ const extractFolderEntries = (content, limit) => {
139
+ if (!content)
140
+ return [];
141
+ const candidates = [];
142
+ const lines = content.split(/\r?\n/);
143
+ for (const line of lines) {
144
+ const trimmed = line.trim();
145
+ if (!trimmed)
146
+ continue;
147
+ const matches = [...trimmed.matchAll(/[`'"]?([a-zA-Z0-9._-]+(?:\/[a-zA-Z0-9._-]+)+(?:\/[a-zA-Z0-9._-]+)*)[`'"]?/g)];
148
+ for (const match of matches) {
149
+ const raw = (match[1] ?? "").replace(/^\.?\//, "").replace(/\/+$/, "").trim();
150
+ if (!raw || !raw.includes("/"))
151
+ continue;
152
+ candidates.push(raw);
153
+ if (candidates.length >= limit)
154
+ break;
155
+ }
156
+ if (candidates.length >= limit)
157
+ break;
158
+ }
159
+ return unique(candidates).slice(0, limit);
160
+ };
161
+ const headingCovered = (corpus, heading) => {
162
+ const normalized = normalizeText(heading);
163
+ if (!normalized)
164
+ return true;
165
+ if (corpus.includes(normalized))
166
+ return true;
167
+ const tokens = normalized
168
+ .split(/\s+/)
169
+ .filter((token) => token.length >= 4)
170
+ .slice(0, 8);
171
+ if (tokens.length === 0)
172
+ return true;
173
+ const hitCount = tokens.filter((token) => corpus.includes(token)).length;
174
+ const minHits = Math.min(2, tokens.length);
175
+ return hitCount >= minHits;
176
+ };
177
+ const folderEntryCovered = (corpus, entry) => {
178
+ const normalized = normalizeText(entry).replace(/\s+/g, "");
179
+ if (!normalized)
180
+ return true;
181
+ if (corpus.includes(normalized))
182
+ return true;
183
+ const segments = normalized.split("/").filter(Boolean);
184
+ if (segments.length === 0)
185
+ return true;
186
+ const leaf = segments[segments.length - 1];
187
+ const parent = segments.length > 1 ? segments[segments.length - 2] : undefined;
188
+ if (leaf && corpus.includes(leaf)) {
189
+ if (!parent)
190
+ return true;
191
+ return corpus.includes(parent);
192
+ }
193
+ return false;
194
+ };
195
+ const readJsonSafe = (raw, fallback) => {
196
+ if (typeof raw !== "string" || raw.trim().length === 0)
197
+ return fallback;
198
+ try {
199
+ return JSON.parse(raw);
200
+ }
201
+ catch {
202
+ return fallback;
203
+ }
204
+ };
205
+ export class TaskSufficiencyService {
206
+ constructor(workspace, deps, ownership = {}) {
207
+ this.workspace = workspace;
208
+ this.workspaceRepo = deps.workspaceRepo;
209
+ this.jobService = deps.jobService;
210
+ this.ownsWorkspaceRepo = ownership.ownsWorkspaceRepo === true;
211
+ this.ownsJobService = ownership.ownsJobService === true;
212
+ }
213
+ static async create(workspace) {
214
+ const workspaceRepo = await WorkspaceRepository.create(workspace.workspaceRoot);
215
+ const jobService = new JobService(workspace);
216
+ return new TaskSufficiencyService(workspace, { workspaceRepo, jobService }, { ownsWorkspaceRepo: true, ownsJobService: true });
217
+ }
218
+ async close() {
219
+ if (this.ownsWorkspaceRepo) {
220
+ await this.workspaceRepo.close();
221
+ }
222
+ if (this.ownsJobService) {
223
+ await this.jobService.close();
224
+ }
225
+ }
226
+ async discoverSdsPaths(workspaceRoot) {
227
+ const directCandidates = [
228
+ path.join(workspaceRoot, "docs", "sds.md"),
229
+ path.join(workspaceRoot, "docs", "sds", "sds.md"),
230
+ path.join(workspaceRoot, "docs", "software-design-specification.md"),
231
+ path.join(workspaceRoot, "sds.md"),
232
+ ];
233
+ const found = new Set();
234
+ for (const candidate of directCandidates) {
235
+ try {
236
+ const stat = await fs.stat(candidate);
237
+ if (stat.isFile())
238
+ found.add(path.resolve(candidate));
239
+ }
240
+ catch {
241
+ // ignore missing direct candidate
242
+ }
243
+ }
244
+ const roots = [path.join(workspaceRoot, "docs"), workspaceRoot];
245
+ for (const root of roots) {
246
+ const discovered = await this.walkSdsCandidates(root, root === workspaceRoot ? 3 : 5, SDS_SCAN_MAX_FILES);
247
+ discovered.forEach((entry) => found.add(entry));
248
+ if (found.size >= SDS_SCAN_MAX_FILES)
249
+ break;
250
+ }
251
+ return Array.from(found).slice(0, SDS_SCAN_MAX_FILES);
252
+ }
253
+ async walkSdsCandidates(root, maxDepth, cap) {
254
+ const results = [];
255
+ const walk = async (dir, depth) => {
256
+ if (results.length >= cap || depth > maxDepth)
257
+ return;
258
+ let entries = [];
259
+ try {
260
+ entries = await fs.readdir(dir, { withFileTypes: true });
261
+ }
262
+ catch {
263
+ return;
264
+ }
265
+ for (const entry of entries) {
266
+ if (results.length >= cap)
267
+ break;
268
+ if (entry.isDirectory()) {
269
+ if (ignoredDirs.has(entry.name))
270
+ continue;
271
+ await walk(path.join(dir, entry.name), depth + 1);
272
+ continue;
273
+ }
274
+ if (!entry.isFile())
275
+ continue;
276
+ const filePath = path.join(dir, entry.name);
277
+ if (!/\.(md|markdown|txt)$/i.test(entry.name))
278
+ continue;
279
+ if (!sdsFilenamePattern.test(entry.name)) {
280
+ try {
281
+ const sample = await fs.readFile(filePath, "utf8");
282
+ if (!sdsContentPattern.test(sample.slice(0, 30000)))
283
+ continue;
284
+ }
285
+ catch {
286
+ continue;
287
+ }
288
+ }
289
+ results.push(path.resolve(filePath));
290
+ }
291
+ };
292
+ await walk(root, 0);
293
+ return results;
294
+ }
295
+ async loadSdsSources(paths) {
296
+ const docs = [];
297
+ for (const filePath of paths) {
298
+ try {
299
+ const content = await fs.readFile(filePath, "utf8");
300
+ if (!sdsContentPattern.test(content) && !sdsFilenamePattern.test(path.basename(filePath)))
301
+ continue;
302
+ docs.push({ path: filePath, content });
303
+ }
304
+ catch {
305
+ // ignore unreadable source
306
+ }
307
+ }
308
+ return docs;
309
+ }
310
+ async loadProjectSnapshot(projectKey) {
311
+ const project = await this.workspaceRepo.getProjectByKey(projectKey);
312
+ if (!project) {
313
+ throw new Error(`task-sufficiency-audit could not find project "${projectKey}". Run create-tasks first or pass a valid --project.`);
314
+ }
315
+ const db = this.workspaceRepo.getDb();
316
+ const [epics, stories, tasks, maxPriorityRow] = await Promise.all([
317
+ db.all(`SELECT id, key, title, description
318
+ FROM epics
319
+ WHERE project_id = ?
320
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, project.id),
321
+ db.all(`SELECT id, key, title, description, acceptance_criteria
322
+ FROM user_stories
323
+ WHERE project_id = ?
324
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, project.id),
325
+ db.all(`SELECT id, key, title, description, metadata_json
326
+ FROM tasks
327
+ WHERE project_id = ?
328
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, project.id),
329
+ db.get(`SELECT COALESCE(MAX(priority), 0) AS max_priority FROM tasks WHERE project_id = ?`, project.id),
330
+ ]);
331
+ const existingAnchors = new Set();
332
+ const corpusChunks = [];
333
+ for (const epic of epics) {
334
+ corpusChunks.push(`${epic.title ?? ""} ${epic.description ?? ""}`);
335
+ }
336
+ for (const story of stories) {
337
+ corpusChunks.push(`${story.title ?? ""} ${story.description ?? ""} ${story.acceptance_criteria ?? ""}`);
338
+ }
339
+ for (const task of tasks) {
340
+ corpusChunks.push(`${task.title ?? ""} ${task.description ?? ""}`);
341
+ const metadata = readJsonSafe(task.metadata_json, null);
342
+ const sufficiencyAudit = metadata?.sufficiencyAudit;
343
+ const rawAnchor = sufficiencyAudit?.anchor;
344
+ if (typeof rawAnchor === "string" && rawAnchor.trim().length > 0) {
345
+ existingAnchors.add(rawAnchor.trim());
346
+ }
347
+ const rawAnchors = sufficiencyAudit?.anchors;
348
+ if (Array.isArray(rawAnchors)) {
349
+ for (const anchor of rawAnchors) {
350
+ if (typeof anchor !== "string" || anchor.trim().length === 0)
351
+ continue;
352
+ existingAnchors.add(anchor.trim());
353
+ }
354
+ }
355
+ }
356
+ return {
357
+ project,
358
+ epicCount: epics.length,
359
+ storyCount: stories.length,
360
+ taskCount: tasks.length,
361
+ corpus: normalizeText(corpusChunks.join("\n")).replace(/\s+/g, " ").trim(),
362
+ existingAnchors,
363
+ maxPriority: Number(maxPriorityRow?.max_priority ?? 0),
364
+ };
365
+ }
366
+ evaluateCoverage(corpus, sectionHeadings, folderEntries, existingAnchors) {
367
+ const missingSectionHeadings = sectionHeadings.filter((heading) => {
368
+ const anchor = normalizeAnchor("section", heading);
369
+ if (existingAnchors.has(anchor))
370
+ return false;
371
+ return !headingCovered(corpus, heading);
372
+ });
373
+ const missingFolderEntries = folderEntries.filter((entry) => {
374
+ const anchor = normalizeAnchor("folder", entry);
375
+ if (existingAnchors.has(anchor))
376
+ return false;
377
+ return !folderEntryCovered(corpus, entry);
378
+ });
379
+ const totalSignals = sectionHeadings.length + folderEntries.length;
380
+ const coveredSignals = totalSignals - missingSectionHeadings.length - missingFolderEntries.length;
381
+ const coverageRatio = totalSignals === 0 ? 1 : coveredSignals / totalSignals;
382
+ return {
383
+ coverageRatio: Number(coverageRatio.toFixed(4)),
384
+ totalSignals,
385
+ missingSectionHeadings,
386
+ missingFolderEntries,
387
+ };
388
+ }
389
+ buildGapItems(coverage, existingAnchors, limit) {
390
+ const items = [];
391
+ for (const heading of coverage.missingSectionHeadings) {
392
+ const normalizedAnchor = normalizeAnchor("section", heading);
393
+ if (existingAnchors.has(normalizedAnchor))
394
+ continue;
395
+ items.push({
396
+ kind: "section",
397
+ value: heading,
398
+ normalizedAnchor,
399
+ domain: deriveSectionDomain(heading),
400
+ });
401
+ if (items.length >= limit)
402
+ return items;
403
+ }
404
+ for (const entry of coverage.missingFolderEntries) {
405
+ const normalizedAnchor = normalizeAnchor("folder", entry);
406
+ if (existingAnchors.has(normalizedAnchor))
407
+ continue;
408
+ items.push({
409
+ kind: "folder",
410
+ value: entry,
411
+ normalizedAnchor,
412
+ domain: deriveFolderDomain(entry),
413
+ });
414
+ if (items.length >= limit)
415
+ return items;
416
+ }
417
+ return items;
418
+ }
419
+ bundleGapItems(gapItems, limit) {
420
+ const groups = new Map();
421
+ const orderedKeys = [];
422
+ for (const item of gapItems) {
423
+ const key = `${item.domain}:${item.kind}`;
424
+ if (!groups.has(key)) {
425
+ groups.set(key, []);
426
+ orderedKeys.push(key);
427
+ }
428
+ groups.get(key)?.push(item);
429
+ }
430
+ const bundles = [];
431
+ for (const key of orderedKeys) {
432
+ const group = groups.get(key) ?? [];
433
+ for (let index = 0; index < group.length; index += GAP_BUNDLE_SIZE) {
434
+ if (bundles.length >= limit)
435
+ return bundles;
436
+ const chunk = group.slice(index, index + GAP_BUNDLE_SIZE);
437
+ const kinds = new Set(chunk.map((item) => item.kind));
438
+ bundles.push({
439
+ kind: kinds.size > 1 ? "mixed" : chunk[0]?.kind ?? "section",
440
+ domain: chunk[0]?.domain ?? "coverage",
441
+ values: chunk.map((item) => item.value),
442
+ normalizedAnchors: chunk.map((item) => item.normalizedAnchor),
443
+ });
444
+ }
445
+ }
446
+ return bundles;
447
+ }
448
+ async ensureTargetStory(project) {
449
+ const db = this.workspaceRepo.getDb();
450
+ const existingStories = await db.all(`SELECT
451
+ us.id AS story_id,
452
+ us.key AS story_key,
453
+ us.metadata_json AS story_metadata_json,
454
+ us.epic_id AS epic_id,
455
+ e.key AS epic_key,
456
+ e.metadata_json AS epic_metadata_json
457
+ FROM user_stories us
458
+ JOIN epics e ON e.id = us.epic_id
459
+ WHERE us.project_id = ?
460
+ ORDER BY COALESCE(us.priority, 2147483647), datetime(us.created_at), us.key`, project.id);
461
+ for (const row of existingStories) {
462
+ const storyMetadata = readJsonSafe(row.story_metadata_json, null) ?? {};
463
+ const epicMetadata = readJsonSafe(row.epic_metadata_json, null) ?? {};
464
+ const storySource = typeof storyMetadata.source === "string" ? storyMetadata.source : undefined;
465
+ const epicSource = typeof epicMetadata.source === "string" ? epicMetadata.source : undefined;
466
+ if (storySource === "task-sufficiency-audit" || epicSource === "task-sufficiency-audit") {
467
+ return {
468
+ epicId: row.epic_id,
469
+ epicKey: row.epic_key,
470
+ storyId: row.story_id,
471
+ storyKey: row.story_key,
472
+ };
473
+ }
474
+ }
475
+ let epicId = "";
476
+ let epicKey = "";
477
+ const existingEpic = await db.get(`SELECT id, key, metadata_json
478
+ FROM epics
479
+ WHERE project_id = ?
480
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, project.id);
481
+ const existingEpicMetadata = readJsonSafe(existingEpic?.metadata_json, null) ?? {};
482
+ const existingEpicSource = typeof existingEpicMetadata.source === "string" ? existingEpicMetadata.source : undefined;
483
+ if (existingEpic && existingEpicSource === "task-sufficiency-audit") {
484
+ epicId = existingEpic.id;
485
+ epicKey = existingEpic.key;
486
+ }
487
+ else {
488
+ const epicKeyGen = createEpicKeyGenerator(project.key, await this.workspaceRepo.listEpicKeys(project.id));
489
+ const insertedEpic = (await this.workspaceRepo.insertEpics([
490
+ {
491
+ projectId: project.id,
492
+ key: epicKeyGen("ops"),
493
+ title: "Backlog Sufficiency Alignment",
494
+ description: "Tracks generated backlog patches required to align SDS coverage and implementation readiness.",
495
+ storyPointsTotal: null,
496
+ priority: null,
497
+ metadata: {
498
+ source: "task-sufficiency-audit",
499
+ },
500
+ },
501
+ ]))[0];
502
+ epicId = insertedEpic.id;
503
+ epicKey = insertedEpic.key;
504
+ }
505
+ const storyKeyGen = createStoryKeyGenerator(epicKey, await this.workspaceRepo.listStoryKeys(epicId));
506
+ const insertedStory = (await this.workspaceRepo.insertStories([
507
+ {
508
+ projectId: project.id,
509
+ epicId,
510
+ key: storyKeyGen(),
511
+ title: "Close SDS Coverage Gaps",
512
+ description: "Adds missing implementation tasks discovered by SDS-vs-backlog sufficiency auditing.",
513
+ acceptanceCriteria: "- SDS gaps are represented as executable backlog tasks.\n- Coverage report reaches configured minimum threshold.",
514
+ storyPointsTotal: null,
515
+ priority: null,
516
+ metadata: {
517
+ source: "task-sufficiency-audit",
518
+ },
519
+ },
520
+ ]))[0];
521
+ return {
522
+ epicId,
523
+ epicKey,
524
+ storyId: insertedStory.id,
525
+ storyKey: insertedStory.key,
526
+ };
527
+ }
528
+ async insertGapTasks(params) {
529
+ const existingTaskKeys = await this.workspaceRepo.listTaskKeys(params.storyId);
530
+ const taskKeyGen = createTaskKeyGenerator(params.storyKey, existingTaskKeys);
531
+ const now = new Date().toISOString();
532
+ const taskInserts = params.gapBundles.map((bundle, index) => {
533
+ const domainLabel = bundle.domain.replace(/[-_]+/g, " ").trim();
534
+ const titlePrefix = bundle.kind === "section"
535
+ ? "Close SDS section coverage"
536
+ : bundle.kind === "folder"
537
+ ? "Materialize SDS structure coverage"
538
+ : "Close SDS coverage bundle";
539
+ const title = `${titlePrefix}: ${domainLabel || "implementation scope"}`.slice(0, 180);
540
+ const objective = bundle.kind === "folder"
541
+ ? `Create or update implementation artifacts for ${bundle.values.length} SDS folder-tree requirement(s).`
542
+ : `Implement missing functionality for ${bundle.values.length} SDS section requirement(s).`;
543
+ const scopeLines = bundle.values.map((value) => `- ${value}`);
544
+ const anchorLines = bundle.normalizedAnchors.map((anchor) => `- ${anchor}`);
545
+ const description = [
546
+ `## Objective`,
547
+ objective,
548
+ ``,
549
+ `## Context`,
550
+ `- Generated by task-sufficiency-audit iteration ${params.iteration}.`,
551
+ `- Coverage domain: ${bundle.domain}`,
552
+ ``,
553
+ `## Anchor Scope`,
554
+ ...scopeLines,
555
+ ``,
556
+ `## Anchor Keys`,
557
+ ...anchorLines,
558
+ ``,
559
+ `## Implementation Plan`,
560
+ `- Implement production code for this bundle before adding follow-up docs-only changes.`,
561
+ `- Update module wiring/contracts touched by these anchors.`,
562
+ `- Ensure each anchor has deterministic evidence (tests or checks).`,
563
+ ``,
564
+ `## Testing`,
565
+ `- Add or update tests that validate each listed anchor scope.`,
566
+ `- Keep regression suites green after applying this bundle.`,
567
+ ``,
568
+ `## Definition of Done`,
569
+ `- All anchor scope items in this bundle are represented in implementation code.`,
570
+ `- Validation evidence exists for every anchor key listed above.`,
571
+ ].join("\n");
572
+ return {
573
+ projectId: params.project.id,
574
+ epicId: params.epicId,
575
+ userStoryId: params.storyId,
576
+ key: taskKeyGen(),
577
+ title,
578
+ description,
579
+ type: "feature",
580
+ status: "not_started",
581
+ storyPoints: Math.min(5, Math.max(2, bundle.normalizedAnchors.length)),
582
+ priority: params.maxPriority + index + 1,
583
+ metadata: {
584
+ sufficiencyAudit: {
585
+ source: "task-sufficiency-audit",
586
+ kind: bundle.kind,
587
+ domain: bundle.domain,
588
+ values: bundle.values,
589
+ anchor: bundle.normalizedAnchors[0],
590
+ anchors: bundle.normalizedAnchors,
591
+ iteration: params.iteration,
592
+ generatedAt: now,
593
+ },
594
+ },
595
+ };
596
+ });
597
+ const rows = await this.workspaceRepo.insertTasks(taskInserts);
598
+ for (const row of rows) {
599
+ await this.workspaceRepo.createTaskRun({
600
+ taskId: row.id,
601
+ command: "task-sufficiency-audit",
602
+ status: "succeeded",
603
+ jobId: params.jobId,
604
+ commandRunId: params.commandRunId,
605
+ startedAt: now,
606
+ finishedAt: now,
607
+ runContext: {
608
+ key: row.key,
609
+ source: "task-sufficiency-audit",
610
+ },
611
+ });
612
+ }
613
+ const db = this.workspaceRepo.getDb();
614
+ const storyTotal = await db.get(`SELECT COALESCE(SUM(COALESCE(story_points, 0)), 0) AS total FROM tasks WHERE user_story_id = ?`, params.storyId);
615
+ const epicTotal = await db.get(`SELECT COALESCE(SUM(COALESCE(story_points, 0)), 0) AS total FROM tasks WHERE epic_id = ?`, params.epicId);
616
+ await this.workspaceRepo.updateStoryPointsTotal(params.storyId, Number(storyTotal?.total ?? 0));
617
+ await this.workspaceRepo.updateEpicStoryPointsTotal(params.epicId, Number(epicTotal?.total ?? 0));
618
+ return rows;
619
+ }
620
+ async writeReportArtifacts(projectKey, report) {
621
+ const baseDir = path.join(this.workspace.mcodaDir, "tasks", projectKey);
622
+ const historyDir = path.join(baseDir, "sufficiency-audit");
623
+ await fs.mkdir(baseDir, { recursive: true });
624
+ await fs.mkdir(historyDir, { recursive: true });
625
+ const reportPath = path.join(baseDir, REPORT_FILE_NAME);
626
+ const stamp = new Date().toISOString().replace(/[:.]/g, "-");
627
+ const historyPath = path.join(historyDir, `${stamp}.json`);
628
+ const payload = JSON.stringify(report, null, 2);
629
+ await fs.writeFile(reportPath, payload, "utf8");
630
+ await fs.writeFile(historyPath, payload, "utf8");
631
+ return { reportPath, historyPath };
632
+ }
633
+ async runAudit(request) {
634
+ const maxIterations = Math.max(1, request.maxIterations ?? DEFAULT_MAX_ITERATIONS);
635
+ const maxTasksPerIteration = Math.max(1, request.maxTasksPerIteration ?? DEFAULT_MAX_TASKS_PER_ITERATION);
636
+ const minCoverageRatio = Math.min(1, Math.max(0, request.minCoverageRatio ?? DEFAULT_MIN_COVERAGE_RATIO));
637
+ const dryRun = request.dryRun === true;
638
+ const sourceCommand = request.sourceCommand?.trim() || undefined;
639
+ await PathHelper.ensureDir(this.workspace.mcodaDir);
640
+ const commandRun = await this.jobService.startCommandRun("task-sufficiency-audit", request.projectKey);
641
+ const job = await this.jobService.startJob("task_sufficiency_audit", commandRun.id, request.projectKey, {
642
+ commandName: "task-sufficiency-audit",
643
+ payload: {
644
+ projectKey: request.projectKey,
645
+ sourceCommand,
646
+ dryRun,
647
+ maxIterations,
648
+ maxTasksPerIteration,
649
+ minCoverageRatio,
650
+ },
651
+ });
652
+ try {
653
+ const sdsPaths = await this.discoverSdsPaths(request.workspace.workspaceRoot);
654
+ const sdsDocs = await this.loadSdsSources(sdsPaths);
655
+ if (sdsDocs.length === 0) {
656
+ throw new Error("task-sufficiency-audit requires an SDS document but none was found. Add docs/sds.md (or a fuzzy-match SDS doc) and retry.");
657
+ }
658
+ const warnings = [];
659
+ const rawSectionHeadings = unique(sdsDocs.flatMap((doc) => extractMarkdownHeadings(doc.content, SDS_HEADING_LIMIT))).slice(0, SDS_HEADING_LIMIT);
660
+ const rawFolderEntries = unique(sdsDocs.flatMap((doc) => extractFolderEntries(doc.content, SDS_FOLDER_LIMIT))).slice(0, SDS_FOLDER_LIMIT);
661
+ const sectionHeadings = unique(rawSectionHeadings
662
+ .map((heading) => normalizeHeadingCandidate(heading))
663
+ .filter((heading) => headingLooksImplementationRelevant(heading))).slice(0, SDS_HEADING_LIMIT);
664
+ const folderEntries = unique(rawFolderEntries
665
+ .map((entry) => normalizeFolderEntry(entry))
666
+ .filter((entry) => Boolean(entry))
667
+ .filter((entry) => folderEntryLooksRepoRelevant(entry))).slice(0, SDS_FOLDER_LIMIT);
668
+ const skippedHeadingSignals = Math.max(0, rawSectionHeadings.length - sectionHeadings.length);
669
+ const skippedFolderSignals = Math.max(0, rawFolderEntries.length - folderEntries.length);
670
+ if (skippedHeadingSignals > 0 || skippedFolderSignals > 0) {
671
+ warnings.push(`Filtered non-actionable SDS signals (headings=${skippedHeadingSignals}, folders=${skippedFolderSignals}) before remediation.`);
672
+ }
673
+ if (sectionHeadings.length === 0 && folderEntries.length === 0) {
674
+ warnings.push("No actionable implementation signals detected from SDS headings/folder tree after filtering; audit will report coverage only.");
675
+ }
676
+ await this.jobService.writeCheckpoint(job.id, {
677
+ stage: "sds_loaded",
678
+ timestamp: new Date().toISOString(),
679
+ details: {
680
+ docCount: sdsDocs.length,
681
+ headingSignals: sectionHeadings.length,
682
+ folderSignals: folderEntries.length,
683
+ rawHeadingSignals: rawSectionHeadings.length,
684
+ rawFolderSignals: rawFolderEntries.length,
685
+ filteredHeadingSignals: skippedHeadingSignals,
686
+ filteredFolderSignals: skippedFolderSignals,
687
+ docs: sdsDocs.map((doc) => path.relative(request.workspace.workspaceRoot, doc.path)),
688
+ },
689
+ });
690
+ const iterations = [];
691
+ let totalTasksAdded = 0;
692
+ const totalTasksUpdated = 0;
693
+ let satisfied = false;
694
+ for (let iteration = 1; iteration <= maxIterations; iteration += 1) {
695
+ const snapshot = await this.loadProjectSnapshot(request.projectKey);
696
+ const coverage = this.evaluateCoverage(snapshot.corpus, sectionHeadings, folderEntries, snapshot.existingAnchors);
697
+ const shouldStop = coverage.coverageRatio >= minCoverageRatio ||
698
+ (coverage.missingSectionHeadings.length === 0 && coverage.missingFolderEntries.length === 0);
699
+ if (shouldStop) {
700
+ satisfied = true;
701
+ iterations.push({
702
+ iteration,
703
+ coverageRatio: coverage.coverageRatio,
704
+ totalSignals: coverage.totalSignals,
705
+ missingSectionCount: coverage.missingSectionHeadings.length,
706
+ missingFolderCount: coverage.missingFolderEntries.length,
707
+ createdTaskKeys: [],
708
+ });
709
+ await this.jobService.writeCheckpoint(job.id, {
710
+ stage: "iteration",
711
+ timestamp: new Date().toISOString(),
712
+ details: {
713
+ iteration,
714
+ coverageRatio: coverage.coverageRatio,
715
+ totalSignals: coverage.totalSignals,
716
+ missingSectionCount: coverage.missingSectionHeadings.length,
717
+ missingFolderCount: coverage.missingFolderEntries.length,
718
+ action: "complete",
719
+ },
720
+ });
721
+ break;
722
+ }
723
+ const gapItems = this.buildGapItems(coverage, snapshot.existingAnchors, maxTasksPerIteration * GAP_BUNDLE_SIZE);
724
+ const gapBundles = this.bundleGapItems(gapItems, maxTasksPerIteration);
725
+ if (gapBundles.length === 0) {
726
+ warnings.push(`Iteration ${iteration}: unresolved SDS gaps remain but no insertable gap items were identified.`);
727
+ iterations.push({
728
+ iteration,
729
+ coverageRatio: coverage.coverageRatio,
730
+ totalSignals: coverage.totalSignals,
731
+ missingSectionCount: coverage.missingSectionHeadings.length,
732
+ missingFolderCount: coverage.missingFolderEntries.length,
733
+ createdTaskKeys: [],
734
+ });
735
+ break;
736
+ }
737
+ if (dryRun) {
738
+ iterations.push({
739
+ iteration,
740
+ coverageRatio: coverage.coverageRatio,
741
+ totalSignals: coverage.totalSignals,
742
+ missingSectionCount: coverage.missingSectionHeadings.length,
743
+ missingFolderCount: coverage.missingFolderEntries.length,
744
+ createdTaskKeys: [],
745
+ });
746
+ await this.jobService.writeCheckpoint(job.id, {
747
+ stage: "iteration",
748
+ timestamp: new Date().toISOString(),
749
+ details: {
750
+ iteration,
751
+ coverageRatio: coverage.coverageRatio,
752
+ totalSignals: coverage.totalSignals,
753
+ missingSectionCount: coverage.missingSectionHeadings.length,
754
+ missingFolderCount: coverage.missingFolderEntries.length,
755
+ action: "dry_run",
756
+ proposedGapItems: gapBundles.map((bundle) => ({
757
+ kind: bundle.kind,
758
+ domain: bundle.domain,
759
+ values: bundle.values,
760
+ })),
761
+ },
762
+ });
763
+ break;
764
+ }
765
+ const target = await this.ensureTargetStory(snapshot.project);
766
+ const inserted = await this.insertGapTasks({
767
+ project: snapshot.project,
768
+ storyId: target.storyId,
769
+ storyKey: target.storyKey,
770
+ epicId: target.epicId,
771
+ maxPriority: snapshot.maxPriority,
772
+ gapBundles,
773
+ iteration,
774
+ jobId: job.id,
775
+ commandRunId: commandRun.id,
776
+ });
777
+ const createdTaskKeys = inserted.map((task) => task.key);
778
+ totalTasksAdded += createdTaskKeys.length;
779
+ iterations.push({
780
+ iteration,
781
+ coverageRatio: coverage.coverageRatio,
782
+ totalSignals: coverage.totalSignals,
783
+ missingSectionCount: coverage.missingSectionHeadings.length,
784
+ missingFolderCount: coverage.missingFolderEntries.length,
785
+ createdTaskKeys,
786
+ });
787
+ await this.jobService.writeCheckpoint(job.id, {
788
+ stage: "iteration",
789
+ timestamp: new Date().toISOString(),
790
+ details: {
791
+ iteration,
792
+ coverageRatio: coverage.coverageRatio,
793
+ totalSignals: coverage.totalSignals,
794
+ missingSectionCount: coverage.missingSectionHeadings.length,
795
+ missingFolderCount: coverage.missingFolderEntries.length,
796
+ createdTaskKeys,
797
+ addedCount: createdTaskKeys.length,
798
+ },
799
+ });
800
+ await this.jobService.appendLog(job.id, `Iteration ${iteration}: added ${createdTaskKeys.length} remediation task(s) from ${gapBundles.length} gap bundle(s): ${createdTaskKeys.join(", ")}\n`);
801
+ }
802
+ const finalSnapshot = await this.loadProjectSnapshot(request.projectKey);
803
+ const finalCoverage = this.evaluateCoverage(finalSnapshot.corpus, sectionHeadings, folderEntries, finalSnapshot.existingAnchors);
804
+ if (finalCoverage.coverageRatio >= minCoverageRatio ||
805
+ (finalCoverage.missingSectionHeadings.length === 0 && finalCoverage.missingFolderEntries.length === 0)) {
806
+ satisfied = true;
807
+ }
808
+ if (!satisfied) {
809
+ warnings.push(`Sufficiency target not reached (coverage=${finalCoverage.coverageRatio}, threshold=${minCoverageRatio}) after ${iterations.length} iteration(s).`);
810
+ }
811
+ const report = {
812
+ projectKey: request.projectKey,
813
+ sourceCommand,
814
+ generatedAt: new Date().toISOString(),
815
+ dryRun,
816
+ maxIterations,
817
+ maxTasksPerIteration,
818
+ minCoverageRatio,
819
+ satisfied,
820
+ totalTasksAdded,
821
+ totalTasksUpdated,
822
+ docs: sdsDocs.map((doc) => ({
823
+ path: path.relative(request.workspace.workspaceRoot, doc.path),
824
+ headingSignals: extractMarkdownHeadings(doc.content, SDS_HEADING_LIMIT)
825
+ .map((heading) => normalizeHeadingCandidate(heading))
826
+ .filter((heading) => headingLooksImplementationRelevant(heading)).length,
827
+ folderSignals: extractFolderEntries(doc.content, SDS_FOLDER_LIMIT)
828
+ .map((entry) => normalizeFolderEntry(entry))
829
+ .filter((entry) => Boolean(entry))
830
+ .filter((entry) => folderEntryLooksRepoRelevant(entry)).length,
831
+ rawHeadingSignals: extractMarkdownHeadings(doc.content, SDS_HEADING_LIMIT).length,
832
+ rawFolderSignals: extractFolderEntries(doc.content, SDS_FOLDER_LIMIT).length,
833
+ })),
834
+ finalCoverage: {
835
+ coverageRatio: finalCoverage.coverageRatio,
836
+ totalSignals: finalCoverage.totalSignals,
837
+ missingSectionHeadings: finalCoverage.missingSectionHeadings,
838
+ missingFolderEntries: finalCoverage.missingFolderEntries,
839
+ },
840
+ iterations,
841
+ warnings,
842
+ };
843
+ const { reportPath, historyPath } = await this.writeReportArtifacts(request.projectKey, report);
844
+ await this.jobService.writeCheckpoint(job.id, {
845
+ stage: "report_written",
846
+ timestamp: new Date().toISOString(),
847
+ details: {
848
+ reportPath,
849
+ historyPath,
850
+ satisfied,
851
+ totalTasksAdded,
852
+ totalTasksUpdated,
853
+ finalCoverageRatio: finalCoverage.coverageRatio,
854
+ },
855
+ });
856
+ const result = {
857
+ jobId: job.id,
858
+ commandRunId: commandRun.id,
859
+ projectKey: request.projectKey,
860
+ sourceCommand,
861
+ satisfied,
862
+ dryRun,
863
+ totalTasksAdded,
864
+ totalTasksUpdated,
865
+ maxIterations,
866
+ minCoverageRatio,
867
+ finalCoverageRatio: finalCoverage.coverageRatio,
868
+ remainingSectionHeadings: finalCoverage.missingSectionHeadings,
869
+ remainingFolderEntries: finalCoverage.missingFolderEntries,
870
+ remainingGaps: {
871
+ sections: finalCoverage.missingSectionHeadings.length,
872
+ folders: finalCoverage.missingFolderEntries.length,
873
+ total: finalCoverage.missingSectionHeadings.length + finalCoverage.missingFolderEntries.length,
874
+ },
875
+ iterations,
876
+ reportPath,
877
+ reportHistoryPath: historyPath,
878
+ warnings,
879
+ };
880
+ await this.jobService.updateJobStatus(job.id, "completed", {
881
+ payload: {
882
+ projectKey: request.projectKey,
883
+ satisfied,
884
+ dryRun,
885
+ totalTasksAdded,
886
+ totalTasksUpdated,
887
+ maxIterations,
888
+ minCoverageRatio,
889
+ finalCoverageRatio: finalCoverage.coverageRatio,
890
+ remainingSectionCount: finalCoverage.missingSectionHeadings.length,
891
+ remainingFolderCount: finalCoverage.missingFolderEntries.length,
892
+ reportPath,
893
+ reportHistoryPath: historyPath,
894
+ warnings,
895
+ },
896
+ });
897
+ await this.jobService.finishCommandRun(commandRun.id, "succeeded");
898
+ return result;
899
+ }
900
+ catch (error) {
901
+ const message = error instanceof Error ? error.message : String(error);
902
+ await this.jobService.appendLog(job.id, `task-sufficiency-audit failed: ${message}\n`);
903
+ await this.jobService.updateJobStatus(job.id, "failed", { errorSummary: message });
904
+ await this.jobService.finishCommandRun(commandRun.id, "failed", message);
905
+ throw error;
906
+ }
907
+ }
908
+ }