@mcoda/core 0.1.34 → 0.1.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/dist/api/AgentsApi.d.ts +4 -1
  2. package/dist/api/AgentsApi.d.ts.map +1 -1
  3. package/dist/api/AgentsApi.js +4 -1
  4. package/dist/services/docs/DocsService.d.ts +37 -0
  5. package/dist/services/docs/DocsService.d.ts.map +1 -1
  6. package/dist/services/docs/DocsService.js +537 -2
  7. package/dist/services/docs/review/gates/OpenQuestionsGate.d.ts.map +1 -1
  8. package/dist/services/docs/review/gates/OpenQuestionsGate.js +13 -2
  9. package/dist/services/docs/review/gates/SdsNoUnresolvedItemsGate.d.ts.map +1 -1
  10. package/dist/services/docs/review/gates/SdsNoUnresolvedItemsGate.js +12 -1
  11. package/dist/services/planning/CreateTasksService.d.ts +20 -0
  12. package/dist/services/planning/CreateTasksService.d.ts.map +1 -1
  13. package/dist/services/planning/CreateTasksService.js +772 -163
  14. package/dist/services/planning/SdsCoverageModel.d.ts +27 -0
  15. package/dist/services/planning/SdsCoverageModel.d.ts.map +1 -0
  16. package/dist/services/planning/SdsCoverageModel.js +138 -0
  17. package/dist/services/planning/SdsPreflightService.d.ts +2 -0
  18. package/dist/services/planning/SdsPreflightService.d.ts.map +1 -1
  19. package/dist/services/planning/SdsPreflightService.js +125 -31
  20. package/dist/services/planning/SdsStructureSignals.d.ts +24 -0
  21. package/dist/services/planning/SdsStructureSignals.d.ts.map +1 -0
  22. package/dist/services/planning/SdsStructureSignals.js +402 -0
  23. package/dist/services/planning/TaskSufficiencyService.d.ts +1 -0
  24. package/dist/services/planning/TaskSufficiencyService.d.ts.map +1 -1
  25. package/dist/services/planning/TaskSufficiencyService.js +218 -285
  26. package/package.json +6 -6
@@ -12,6 +12,8 @@ import { classifyTask } from "../backlog/TaskOrderingHeuristics.js";
12
12
  import { TaskOrderingService } from "../backlog/TaskOrderingService.js";
13
13
  import { QaTestCommandBuilder } from "../execution/QaTestCommandBuilder.js";
14
14
  import { createEpicKeyGenerator, createStoryKeyGenerator, createTaskKeyGenerator, } from "./KeyHelpers.js";
15
+ import { collectSdsCoverageSignalsFromDocs, evaluateSdsCoverage, normalizeCoverageText, } from "./SdsCoverageModel.js";
16
+ import { collectSdsImplementationSignals, extractStructuredPaths, filterImplementationStructuredPaths, headingLooksImplementationRelevant, isStructuredFilePath, normalizeHeadingCandidate, normalizeStructuredPathToken, stripManagedSdsPreflightBlock, } from "./SdsStructureSignals.js";
15
17
  import { TaskSufficiencyService } from "./TaskSufficiencyService.js";
16
18
  import { SdsPreflightService } from "./SdsPreflightService.js";
17
19
  const formatBullets = (items, fallback) => {
@@ -162,6 +164,7 @@ const DOC_CONTEXT_SEGMENTS_PER_DOC = 8;
162
164
  const DOC_CONTEXT_FALLBACK_CHUNK_LENGTH = 480;
163
165
  const SDS_COVERAGE_HINT_HEADING_LIMIT = 24;
164
166
  const SDS_COVERAGE_REPORT_SECTION_LIMIT = 80;
167
+ const SDS_COVERAGE_REPORT_FOLDER_LIMIT = 240;
165
168
  const OPENAPI_HINT_OPERATIONS_LIMIT = 30;
166
169
  const DOCDEX_HANDLE = /^docdex:/i;
167
170
  const DOCDEX_LOCAL_HANDLE = /^docdex:local[-:/]/i;
@@ -170,7 +173,6 @@ const RELATIVE_DOC_PATH_PATTERN = /^(?:\.{1,2}\/)+[A-Za-z0-9._/-]+(?:\.[A-Za-z0-
170
173
  const FUZZY_DOC_CANDIDATE_LIMIT = 64;
171
174
  const DEPENDENCY_SCAN_LINE_LIMIT = 1400;
172
175
  const STARTUP_WAVE_SCAN_LINE_LIMIT = 4000;
173
- const VALID_AREAS = new Set(["web", "adm", "bck", "ops", "infra", "mobile"]);
174
176
  const VALID_TASK_TYPES = new Set(["feature", "bug", "chore", "spike"]);
175
177
  const VALID_EPIC_SERVICE_POLICIES = new Set(["auto-remediate", "fail"]);
176
178
  const CROSS_SERVICE_TAG = "cross_service";
@@ -189,16 +191,12 @@ const inferDocType = (filePath) => {
189
191
  const normalizeArea = (value) => {
190
192
  if (typeof value !== "string")
191
193
  return undefined;
192
- const tokens = value
194
+ const normalized = value
193
195
  .toLowerCase()
194
- .split(/[^a-z]+/)
195
- .map((token) => token.trim())
196
- .filter(Boolean);
197
- for (const token of tokens) {
198
- if (VALID_AREAS.has(token))
199
- return token;
200
- }
201
- return undefined;
196
+ .replace(/[^a-z0-9]+/g, "-")
197
+ .replace(/^-+|-+$/g, "")
198
+ .replace(/-{2,}/g, "-");
199
+ return normalized.length > 0 ? normalized.slice(0, 24) : undefined;
202
200
  };
203
201
  const normalizeTaskType = (value) => {
204
202
  if (typeof value !== "string")
@@ -252,40 +250,6 @@ const normalizeRelatedDocs = (value) => {
252
250
  }
253
251
  return normalized;
254
252
  };
255
- const extractMarkdownHeadings = (value, limit) => {
256
- if (!value)
257
- return [];
258
- const lines = value.split(/\r?\n/);
259
- const headings = [];
260
- for (let index = 0; index < lines.length; index += 1) {
261
- const line = lines[index]?.trim() ?? "";
262
- if (!line)
263
- continue;
264
- const hashHeading = line.match(/^#{1,6}\s+(.+)$/);
265
- if (hashHeading) {
266
- headings.push(hashHeading[1].trim());
267
- }
268
- else if (index + 1 < lines.length &&
269
- /^[=-]{3,}\s*$/.test((lines[index + 1] ?? "").trim()) &&
270
- !line.startsWith("-") &&
271
- !line.startsWith("*")) {
272
- headings.push(line);
273
- }
274
- else {
275
- const numberedHeading = line.match(/^(\d+(?:\.\d+)+)\s+(.+)$/);
276
- if (numberedHeading) {
277
- const headingText = `${numberedHeading[1]} ${numberedHeading[2]}`.trim();
278
- if (/[a-z]/i.test(headingText))
279
- headings.push(headingText);
280
- }
281
- }
282
- if (headings.length >= limit)
283
- break;
284
- }
285
- return uniqueStrings(headings
286
- .map((entry) => entry.replace(/[`*_]/g, "").trim())
287
- .filter(Boolean));
288
- };
289
253
  const pickDistributedIndices = (length, limit) => {
290
254
  if (length <= 0 || limit <= 0)
291
255
  return [];
@@ -574,8 +538,8 @@ const buildTaskDescription = (taskKey, title, description, storyKey, epicKey, re
574
538
  ? "- Task-specific tests are added/updated and green in the task validation loop."
575
539
  : "- Verification evidence is captured in task logs/checklists for this scope.",
576
540
  relatedDocs?.length
577
- ? "- Related contracts/docs are consistent with delivered behavior."
578
- : "- Documentation impact is reviewed and no additional contract docs are required.",
541
+ ? "- Related interfaces/docs are consistent with delivered behavior."
542
+ : "- Documentation impact is reviewed and no additional interface docs are required.",
579
543
  qa?.blockers?.length ? "- Remaining QA blockers are explicit and actionable." : "- QA blockers are resolved or not present.",
580
544
  ];
581
545
  const defaultImplementationPlan = [
@@ -586,7 +550,7 @@ const buildTaskDescription = (taskKey, title, description, storyKey, epicKey, re
586
550
  ];
587
551
  const defaultRisks = dependencies.length
588
552
  ? [`Delivery depends on upstream tasks: ${dependencies.join(", ")}.`]
589
- : ["Keep implementation aligned to SDS/OpenAPI contracts to avoid drift."];
553
+ : ["Keep implementation aligned to documented interfaces and dependency expectations to avoid drift."];
590
554
  return [
591
555
  `* **Task Key**: ${taskKey}`,
592
556
  "* **Objective**",
@@ -680,6 +644,51 @@ const SERVICE_PATH_CONTAINER_SEGMENTS = new Set([
680
644
  "lib",
681
645
  "src",
682
646
  ]);
647
+ const SOURCE_LIKE_PATH_SEGMENTS = new Set([
648
+ "api",
649
+ "app",
650
+ "apps",
651
+ "bin",
652
+ "cmd",
653
+ "components",
654
+ "controllers",
655
+ "handlers",
656
+ "internal",
657
+ "lib",
658
+ "libs",
659
+ "pages",
660
+ "routes",
661
+ "screens",
662
+ "server",
663
+ "servers",
664
+ "spec",
665
+ "specs",
666
+ "src",
667
+ "test",
668
+ "tests",
669
+ "ui",
670
+ "web",
671
+ ]);
672
+ const GENERIC_CONTAINER_PATH_SEGMENTS = new Set([
673
+ "adapters",
674
+ "apps",
675
+ "clients",
676
+ "consoles",
677
+ "domains",
678
+ "engines",
679
+ "features",
680
+ "modules",
681
+ "packages",
682
+ "platforms",
683
+ "plugins",
684
+ "products",
685
+ "servers",
686
+ "services",
687
+ "systems",
688
+ "tools",
689
+ "workers",
690
+ ]);
691
+ const NON_RUNTIME_STRUCTURE_ROOT_SEGMENTS = new Set(["docs", "fixtures", "runbooks", "policies", "policy"]);
683
692
  const SERVICE_NAME_STOPWORDS = new Set([
684
693
  "the",
685
694
  "a",
@@ -737,10 +746,92 @@ const SERVICE_NAME_INVALID = new Set([
737
746
  "repository",
738
747
  "codebase",
739
748
  ]);
749
+ const SERVICE_TEXT_INVALID_STARTERS = new Set([
750
+ "active",
751
+ "are",
752
+ "artifact",
753
+ "artifacts",
754
+ "be",
755
+ "been",
756
+ "being",
757
+ "block",
758
+ "blocks",
759
+ "build",
760
+ "builder",
761
+ "built",
762
+ "canonical",
763
+ "chain",
764
+ "configured",
765
+ "dedicated",
766
+ "deployment",
767
+ "discovered",
768
+ "failure",
769
+ "first",
770
+ "is",
771
+ "last",
772
+ "listing",
773
+ "mode",
774
+ "modes",
775
+ "never",
776
+ "no",
777
+ "not",
778
+ "ordered",
779
+ "owned",
780
+ "private",
781
+ "public",
782
+ "resolved",
783
+ "runtime",
784
+ "second",
785
+ "startup",
786
+ "third",
787
+ "validation",
788
+ "wave",
789
+ "waves",
790
+ "was",
791
+ "were",
792
+ ]);
793
+ const NON_RUNTIME_SERVICE_SINGLETONS = new Set([
794
+ "artifact",
795
+ "artifacts",
796
+ "compose",
797
+ "config",
798
+ "configs",
799
+ "doc",
800
+ "docs",
801
+ "interface",
802
+ "interfaces",
803
+ "key",
804
+ "keys",
805
+ "libraries",
806
+ "library",
807
+ "pdr",
808
+ "read",
809
+ "rfp",
810
+ "sds",
811
+ "script",
812
+ "scripts",
813
+ "src",
814
+ "systemd",
815
+ "test",
816
+ "tests",
817
+ "types",
818
+ "write",
819
+ ]);
820
+ const NON_RUNTIME_PATH_SERVICE_TOKENS = new Set([
821
+ "artifact",
822
+ "artifacts",
823
+ "manifest",
824
+ "manifests",
825
+ "schema",
826
+ "schemas",
827
+ "taxonomy",
828
+ "taxonomies",
829
+ ]);
740
830
  const SERVICE_LABEL_PATTERN = /\b([A-Za-z][A-Za-z0-9]*(?:[ _/-]+[A-Za-z][A-Za-z0-9]*){0,3})\s+(service|api|backend|frontend|worker|gateway|database|db|ui|client|server|adapter)\b/gi;
741
831
  const SERVICE_ARROW_PATTERN = /([A-Za-z][A-Za-z0-9 _/-]{1,80})\s*(?:->|=>|→)\s*([A-Za-z][A-Za-z0-9 _/-]{1,80})/g;
742
832
  const SERVICE_HANDLE_PATTERN = /\b((?:svc|ui|worker)-[a-z0-9-*]+)\b/gi;
743
833
  const WAVE_LABEL_PATTERN = /\bwave\s*([0-9]{1,2})\b/i;
834
+ const TOPOLOGY_HEADING_PATTERN = /\b(service|services|component|components|module|modules|interface|interfaces|runtime|runtimes|worker|workers|client|clients|gateway|gateways|server|servers|engine|engines|pipeline|pipelines|registry|registries|adapter|adapters|processor|processors|daemon|daemons|ops|operations|deployment|deployments|topology)\b/i;
744
835
  const nextUniqueLocalId = (prefix, existing) => {
745
836
  let index = 1;
746
837
  let candidate = `${prefix}-${index}`;
@@ -763,11 +854,21 @@ const looksLikeSdsDoc = (doc) => {
763
854
  .slice(0, 5000);
764
855
  return STRICT_SDS_CONTENT_PATTERN.test(sample);
765
856
  };
857
+ const looksLikePathishDocId = (value) => {
858
+ if (!value)
859
+ return false;
860
+ if (DOCDEX_LOCAL_HANDLE.test(value))
861
+ return false;
862
+ return (value.includes("/") ||
863
+ value.includes("\\") ||
864
+ FILE_EXTENSION_PATTERN.test(value) ||
865
+ STRICT_SDS_PATH_PATTERN.test(value.replace(/\\/g, "/").toLowerCase()));
866
+ };
766
867
  const EPIC_SCHEMA_SNIPPET = `{
767
868
  "epics": [
768
869
  {
769
870
  "localId": "e1",
770
- "area": "web|adm|bck|ops|infra|mobile",
871
+ "area": "documented-area-label",
771
872
  "title": "Epic title",
772
873
  "description": "Epic description using the epic template",
773
874
  "acceptanceCriteria": ["criterion"],
@@ -931,7 +1032,7 @@ export class CreateTasksService {
931
1032
  if (!documents.some((doc) => looksLikeSdsDoc(doc))) {
932
1033
  throw new Error("create-tasks requires at least one SDS document. Add an SDS file (for example docs/sds.md) or pass SDS paths as input.");
933
1034
  }
934
- return this.sortDocsForPlanning(documents);
1035
+ return this.sortDocsForPlanning(this.dedupePlanningDocs(documents.map((doc) => this.sanitizeDocForPlanning(doc))));
935
1036
  }
936
1037
  normalizeDocInputForSet(input) {
937
1038
  if (input.startsWith("docdex:"))
@@ -953,8 +1054,19 @@ export class CreateTasksService {
953
1054
  }
954
1055
  return merged;
955
1056
  }
1057
+ canonicalizeDocPathKey(value) {
1058
+ const trimmed = `${value ?? ""}`.trim();
1059
+ if (!trimmed || DOCDEX_LOCAL_HANDLE.test(trimmed))
1060
+ return undefined;
1061
+ if (path.isAbsolute(trimmed))
1062
+ return path.resolve(trimmed).toLowerCase();
1063
+ if (looksLikePathishDocId(trimmed)) {
1064
+ return path.resolve(this.workspace.workspaceRoot, trimmed).toLowerCase();
1065
+ }
1066
+ return undefined;
1067
+ }
956
1068
  docIdentity(doc) {
957
- const pathKey = `${doc.path ?? ""}`.trim().toLowerCase();
1069
+ const pathKey = this.canonicalizeDocPathKey(doc.path) ?? this.canonicalizeDocPathKey(doc.id);
958
1070
  const idKey = `${doc.id ?? ""}`.trim().toLowerCase();
959
1071
  if (pathKey)
960
1072
  return `path:${pathKey}`;
@@ -978,6 +1090,61 @@ export class CreateTasksService {
978
1090
  }
979
1091
  return merged;
980
1092
  }
1093
+ sanitizeDocForPlanning(doc) {
1094
+ const content = stripManagedSdsPreflightBlock(doc.content);
1095
+ const segments = content !== doc.content
1096
+ ? []
1097
+ : (doc.segments ?? [])
1098
+ .map((segment) => {
1099
+ const sanitizedContent = stripManagedSdsPreflightBlock(segment.content ?? undefined);
1100
+ return {
1101
+ ...segment,
1102
+ content: sanitizedContent ?? segment.content,
1103
+ };
1104
+ })
1105
+ .filter((segment) => `${segment.content ?? ""}`.trim().length > 0 || `${segment.heading ?? ""}`.trim().length > 0);
1106
+ const sanitized = {
1107
+ ...doc,
1108
+ content: content ?? doc.content,
1109
+ segments,
1110
+ };
1111
+ if (looksLikeSdsDoc(sanitized) && `${sanitized.docType ?? ""}`.toUpperCase() !== "SDS") {
1112
+ sanitized.docType = "SDS";
1113
+ }
1114
+ return sanitized;
1115
+ }
1116
+ scorePlanningDoc(doc) {
1117
+ const segmentCount = doc.segments?.length ?? 0;
1118
+ const contentLength = `${doc.content ?? ""}`.length;
1119
+ return ((looksLikeSdsDoc(doc) ? 5000 : 0) +
1120
+ (doc.path ? 400 : 0) +
1121
+ segmentCount * 20 +
1122
+ Math.min(300, contentLength));
1123
+ }
1124
+ mergePlanningDocPair(current, incoming) {
1125
+ const [primary, secondary] = this.scorePlanningDoc(incoming) > this.scorePlanningDoc(current) ? [incoming, current] : [current, incoming];
1126
+ const merged = {
1127
+ ...secondary,
1128
+ ...primary,
1129
+ path: primary.path ?? secondary.path,
1130
+ title: primary.title ?? secondary.title,
1131
+ content: primary.content ?? secondary.content,
1132
+ segments: (primary.segments?.length ?? 0) > 0 ? primary.segments : secondary.segments,
1133
+ };
1134
+ if (looksLikeSdsDoc(merged) && `${merged.docType ?? ""}`.toUpperCase() !== "SDS") {
1135
+ merged.docType = "SDS";
1136
+ }
1137
+ return merged;
1138
+ }
1139
+ dedupePlanningDocs(docs) {
1140
+ const merged = new Map();
1141
+ for (const doc of docs) {
1142
+ const identity = this.docIdentity(doc);
1143
+ const existing = merged.get(identity);
1144
+ merged.set(identity, existing ? this.mergePlanningDocPair(existing, doc) : doc);
1145
+ }
1146
+ return Array.from(merged.values());
1147
+ }
981
1148
  sortDocsForPlanning(docs) {
982
1149
  return [...docs].sort((a, b) => {
983
1150
  const aIsSds = looksLikeSdsDoc(a) ? 0 : 1;
@@ -1158,47 +1325,29 @@ export class CreateTasksService {
1158
1325
  .map((entry) => entry.path);
1159
1326
  }
1160
1327
  normalizeStructurePathToken(value) {
1161
- const normalized = value
1162
- .replace(/\\/g, "/")
1163
- .replace(/^[./]+/, "")
1164
- .replace(/^\/+/, "")
1165
- .trim();
1328
+ const normalized = normalizeStructuredPathToken(value);
1166
1329
  if (!normalized)
1167
1330
  return undefined;
1168
- if (normalized.length > 140)
1169
- return undefined;
1170
- if (!normalized.includes("/"))
1171
- return undefined;
1172
- if (normalized.includes("://"))
1331
+ const root = normalized.split("/")[0]?.toLowerCase();
1332
+ if (root && DOC_SCAN_IGNORE_DIRS.has(root))
1173
1333
  return undefined;
1174
- if (/[\u0000-\u001f]/.test(normalized))
1175
- return undefined;
1176
- const hadTrailingSlash = /\/$/.test(normalized);
1177
- const parts = normalized.split("/").filter(Boolean);
1178
- if (parts.length < 2 && !(hadTrailingSlash && parts.length === 1))
1179
- return undefined;
1180
- if (parts.some((part) => part === "." || part === ".."))
1181
- return undefined;
1182
- if (parts.length === 1 && !TOP_LEVEL_STRUCTURE_PATTERN.test(parts[0]))
1183
- return undefined;
1184
- if (DOC_SCAN_IGNORE_DIRS.has(parts[0].toLowerCase()))
1185
- return undefined;
1186
- return parts.join("/");
1334
+ return normalized;
1187
1335
  }
1188
1336
  extractStructureTargets(docs) {
1189
1337
  const directories = new Set();
1190
1338
  const files = new Set();
1191
1339
  for (const doc of docs) {
1340
+ const relativeDocPath = doc.path ? path.relative(this.workspace.workspaceRoot, doc.path).replace(/\\/g, "/") : undefined;
1341
+ const localDocPath = relativeDocPath && !relativeDocPath.startsWith("..") && !path.isAbsolute(relativeDocPath)
1342
+ ? relativeDocPath
1343
+ : undefined;
1192
1344
  const segments = (doc.segments ?? []).map((segment) => segment.content).filter(Boolean).join("\n");
1193
- const corpus = [doc.title, doc.path, doc.content, segments].filter(Boolean).join("\n");
1194
- for (const match of corpus.matchAll(DOC_PATH_TOKEN_PATTERN)) {
1195
- const token = match[2];
1196
- if (!token)
1197
- continue;
1345
+ const corpus = [localDocPath, doc.content, segments].filter(Boolean).join("\n");
1346
+ for (const token of filterImplementationStructuredPaths(extractStructuredPaths(corpus, 256))) {
1198
1347
  const normalized = this.normalizeStructurePathToken(token);
1199
1348
  if (!normalized)
1200
1349
  continue;
1201
- if (FILE_EXTENSION_PATTERN.test(path.basename(normalized))) {
1350
+ if (isStructuredFilePath(path.basename(normalized))) {
1202
1351
  files.add(normalized);
1203
1352
  const parent = path.dirname(normalized).replace(/\\/g, "/");
1204
1353
  if (parent && parent !== ".")
@@ -1238,6 +1387,61 @@ export class CreateTasksService {
1238
1387
  return undefined;
1239
1388
  return candidate.length >= 2 ? candidate : undefined;
1240
1389
  }
1390
+ normalizeTextServiceName(value) {
1391
+ const candidate = this.normalizeServiceName(value);
1392
+ if (!candidate)
1393
+ return undefined;
1394
+ const tokens = candidate.split(" ").filter(Boolean);
1395
+ if (tokens.length === 0 || tokens.length > 3)
1396
+ return undefined;
1397
+ const first = tokens[0] ?? "";
1398
+ if (SERVICE_TEXT_INVALID_STARTERS.has(first))
1399
+ return undefined;
1400
+ if (tokens.length === 1) {
1401
+ if (first.length < 3)
1402
+ return undefined;
1403
+ if (SERVICE_NAME_INVALID.has(first) || NON_RUNTIME_SERVICE_SINGLETONS.has(first))
1404
+ return undefined;
1405
+ if (SERVICE_NAME_STOPWORDS.has(first))
1406
+ return undefined;
1407
+ }
1408
+ return candidate;
1409
+ }
1410
+ isLikelyServiceContainerSegment(parts, index) {
1411
+ const segment = parts[index];
1412
+ if (!segment)
1413
+ return false;
1414
+ if (SERVICE_PATH_CONTAINER_SEGMENTS.has(segment))
1415
+ return true;
1416
+ if (index !== 0)
1417
+ return false;
1418
+ const next = parts[index + 1];
1419
+ if (!next)
1420
+ return false;
1421
+ const following = parts[index + 2];
1422
+ const nextLooksSpecific = !SERVICE_PATH_CONTAINER_SEGMENTS.has(next) &&
1423
+ !NON_RUNTIME_STRUCTURE_ROOT_SEGMENTS.has(next) &&
1424
+ !SOURCE_LIKE_PATH_SEGMENTS.has(next) &&
1425
+ !isStructuredFilePath(next);
1426
+ if (!nextLooksSpecific)
1427
+ return false;
1428
+ if (GENERIC_CONTAINER_PATH_SEGMENTS.has(segment)) {
1429
+ if (!following)
1430
+ return true;
1431
+ return SOURCE_LIKE_PATH_SEGMENTS.has(following) || isStructuredFilePath(following);
1432
+ }
1433
+ return false;
1434
+ }
1435
+ normalizePathDerivedServiceName(value) {
1436
+ const candidate = this.normalizeServiceName(value);
1437
+ if (!candidate)
1438
+ return undefined;
1439
+ if (NON_RUNTIME_SERVICE_SINGLETONS.has(candidate))
1440
+ return undefined;
1441
+ if (candidate.split(" ").some((token) => NON_RUNTIME_PATH_SERVICE_TOKENS.has(token)))
1442
+ return undefined;
1443
+ return candidate;
1444
+ }
1241
1445
  deriveServiceFromPathToken(pathToken) {
1242
1446
  const parts = pathToken
1243
1447
  .replace(/\\/g, "/")
@@ -1246,11 +1450,18 @@ export class CreateTasksService {
1246
1450
  .filter(Boolean);
1247
1451
  if (!parts.length)
1248
1452
  return undefined;
1453
+ if (NON_RUNTIME_STRUCTURE_ROOT_SEGMENTS.has(parts[0] ?? ""))
1454
+ return undefined;
1455
+ if (parts.length === 1 && isStructuredFilePath(parts[0] ?? ""))
1456
+ return undefined;
1249
1457
  let idx = 0;
1250
- while (idx < parts.length - 1 && SERVICE_PATH_CONTAINER_SEGMENTS.has(parts[idx])) {
1458
+ while (idx < parts.length - 1 && this.isLikelyServiceContainerSegment(parts, idx)) {
1251
1459
  idx += 1;
1252
1460
  }
1253
- return this.normalizeServiceName(parts[idx] ?? parts[0]);
1461
+ const candidate = parts[idx] ?? parts[0];
1462
+ if (isStructuredFilePath(candidate))
1463
+ return undefined;
1464
+ return this.normalizePathDerivedServiceName(candidate);
1254
1465
  }
1255
1466
  addServiceAlias(aliases, rawValue) {
1256
1467
  const canonical = this.normalizeServiceName(rawValue);
@@ -1266,6 +1477,10 @@ export class CreateTasksService {
1266
1477
  .trim();
1267
1478
  if (alias)
1268
1479
  existing.add(alias);
1480
+ if (alias.endsWith("s") && alias.length > 3)
1481
+ existing.add(alias.slice(0, -1));
1482
+ if (!alias.endsWith("s") && alias.length > 2)
1483
+ existing.add(`${alias}s`);
1269
1484
  aliases.set(canonical, existing);
1270
1485
  return canonical;
1271
1486
  }
@@ -1275,7 +1490,7 @@ export class CreateTasksService {
1275
1490
  const mentions = new Set();
1276
1491
  for (const match of text.matchAll(SERVICE_LABEL_PATTERN)) {
1277
1492
  const phrase = `${match[1] ?? ""} ${match[2] ?? ""}`.trim();
1278
- const normalized = this.normalizeServiceName(phrase);
1493
+ const normalized = this.normalizeTextServiceName(phrase);
1279
1494
  if (normalized)
1280
1495
  mentions.add(normalized);
1281
1496
  }
@@ -1289,7 +1504,18 @@ export class CreateTasksService {
1289
1504
  }
1290
1505
  return Array.from(mentions);
1291
1506
  }
1292
- resolveServiceMentionFromPhrase(phrase, aliases) {
1507
+ deriveServiceMentionFromPathPhrase(phrase) {
1508
+ for (const match of phrase.matchAll(DOC_PATH_TOKEN_PATTERN)) {
1509
+ const token = match[2];
1510
+ if (!token)
1511
+ continue;
1512
+ const derived = this.deriveServiceFromPathToken(token);
1513
+ if (derived)
1514
+ return derived;
1515
+ }
1516
+ return undefined;
1517
+ }
1518
+ resolveServiceMentionFromPhrase(phrase, aliases, options = {}) {
1293
1519
  const normalizedPhrase = phrase
1294
1520
  .toLowerCase()
1295
1521
  .replace(/[._/-]+/g, " ")
@@ -1312,6 +1538,11 @@ export class CreateTasksService {
1312
1538
  }
1313
1539
  if (best)
1314
1540
  return best.key;
1541
+ const pathDerived = this.deriveServiceMentionFromPathPhrase(phrase);
1542
+ if (pathDerived)
1543
+ return pathDerived;
1544
+ if (!options.allowAliasRegistration)
1545
+ return undefined;
1315
1546
  const mention = this.extractServiceMentionsFromText(phrase)[0];
1316
1547
  if (!mention)
1317
1548
  return undefined;
@@ -1395,10 +1626,17 @@ export class CreateTasksService {
1395
1626
  resolved.add(canonical);
1396
1627
  }
1397
1628
  if (resolved.size === 0) {
1398
- for (const mention of this.extractServiceMentionsFromText(cell)) {
1399
- const canonical = this.addServiceAlias(aliases, mention);
1400
- if (canonical)
1401
- resolved.add(canonical);
1629
+ const normalizedCell = this.normalizeServiceLookupKey(cell);
1630
+ const haystack = normalizedCell ? ` ${normalizedCell} ` : "";
1631
+ for (const [service, names] of aliases.entries()) {
1632
+ for (const alias of names) {
1633
+ if (!alias || alias.length < 2)
1634
+ continue;
1635
+ if (!haystack.includes(` ${alias} `))
1636
+ continue;
1637
+ resolved.add(service);
1638
+ break;
1639
+ }
1402
1640
  }
1403
1641
  }
1404
1642
  return Array.from(resolved);
@@ -1441,6 +1679,31 @@ export class CreateTasksService {
1441
1679
  for (const service of resolveServicesFromCell(cells[0]))
1442
1680
  registerWave(service, waveIndex);
1443
1681
  }
1682
+ for (let index = 0; index < lines.length; index += 1) {
1683
+ const line = lines[index];
1684
+ const waveMatch = line.match(WAVE_LABEL_PATTERN);
1685
+ if (!waveMatch)
1686
+ continue;
1687
+ const waveIndex = Number.parseInt(waveMatch[1] ?? "", 10);
1688
+ if (!Number.isFinite(waveIndex))
1689
+ continue;
1690
+ const contextLines = [line];
1691
+ for (let cursor = index + 1; cursor < lines.length; cursor += 1) {
1692
+ const next = lines[cursor];
1693
+ if (WAVE_LABEL_PATTERN.test(next))
1694
+ break;
1695
+ if (/^#{1,6}\s+/.test(next))
1696
+ break;
1697
+ if (/^(?:[-*]|\d+[.)])\s+/.test(next))
1698
+ break;
1699
+ contextLines.push(next);
1700
+ if (contextLines.length >= 4)
1701
+ break;
1702
+ }
1703
+ for (const service of resolveServicesFromCell(contextLines.join(" "))) {
1704
+ registerWave(service, waveIndex);
1705
+ }
1706
+ }
1444
1707
  const startupWaves = Array.from(startupWavesMap.entries())
1445
1708
  .sort((a, b) => a[0] - b[0])
1446
1709
  .map(([wave, services]) => ({ wave, services: Array.from(services).sort((a, b) => a.localeCompare(b)) }));
@@ -1535,15 +1798,19 @@ export class CreateTasksService {
1535
1798
  }
1536
1799
  }
1537
1800
  const structureTargets = this.extractStructureTargets(docs);
1538
- for (const token of [...structureTargets.directories, ...structureTargets.files]) {
1801
+ const structureTokens = [...structureTargets.directories, ...structureTargets.files];
1802
+ for (const token of structureTokens) {
1803
+ if (!token.includes("/") &&
1804
+ !isStructuredFilePath(path.basename(token)) &&
1805
+ structureTokens.some((candidate) => candidate !== token && candidate.startsWith(`${token}/`))) {
1806
+ continue;
1807
+ }
1539
1808
  register(this.deriveServiceFromPathToken(token));
1540
1809
  }
1541
1810
  for (const match of docsText.matchAll(SERVICE_HANDLE_PATTERN))
1542
1811
  register(match[1]);
1543
1812
  for (const match of planText.matchAll(SERVICE_HANDLE_PATTERN))
1544
1813
  register(match[1]);
1545
- for (const mention of this.extractServiceMentionsFromText(docsText))
1546
- register(mention);
1547
1814
  for (const mention of this.extractServiceMentionsFromText(planText))
1548
1815
  register(mention);
1549
1816
  const corpus = [docsText, planText].filter(Boolean);
@@ -1570,6 +1837,65 @@ export class CreateTasksService {
1570
1837
  foundationalDependencies: waveHints.foundationalDependencies,
1571
1838
  };
1572
1839
  }
1840
+ summarizeTopologySignals(docs) {
1841
+ const structureTargets = this.extractStructureTargets(docs);
1842
+ const structureServices = uniqueStrings([...structureTargets.directories, ...structureTargets.files]
1843
+ .map((token) => this.deriveServiceFromPathToken(token))
1844
+ .filter((value) => Boolean(value))).slice(0, 24);
1845
+ const topologyHeadings = this.extractSdsSectionCandidates(docs, 64)
1846
+ .filter((heading) => TOPOLOGY_HEADING_PATTERN.test(heading))
1847
+ .slice(0, 24);
1848
+ const docsText = docs
1849
+ .map((doc) => [doc.title, doc.path, doc.content, ...(doc.segments ?? []).map((segment) => segment.content)].filter(Boolean).join("\n"))
1850
+ .join("\n");
1851
+ const dependencyPairs = uniqueStrings(this.collectDependencyStatements(docsText).map((statement) => `${statement.dependent} -> ${statement.dependency}`)).slice(0, 16);
1852
+ const waveMentions = docsText
1853
+ .split(/\r?\n/)
1854
+ .map((line) => line.trim())
1855
+ .filter(Boolean)
1856
+ .filter((line) => WAVE_LABEL_PATTERN.test(line))
1857
+ .slice(0, 16);
1858
+ return {
1859
+ structureServices,
1860
+ topologyHeadings,
1861
+ dependencyPairs,
1862
+ waveMentions,
1863
+ };
1864
+ }
1865
+ validateTopologyExtraction(projectKey, docs, graph) {
1866
+ const topologySignals = this.summarizeTopologySignals(docs);
1867
+ const hasServiceSignals = topologySignals.structureServices.length > 0 ||
1868
+ topologySignals.topologyHeadings.length > 0 ||
1869
+ topologySignals.dependencyPairs.length > 0;
1870
+ if (hasServiceSignals && graph.services.length === 0) {
1871
+ const signalSummary = uniqueStrings([
1872
+ ...topologySignals.structureServices.map((service) => `structure:${service}`),
1873
+ ...topologySignals.topologyHeadings.map((heading) => `heading:${heading}`),
1874
+ ...topologySignals.dependencyPairs.map((pair) => `dependency:${pair}`),
1875
+ ])
1876
+ .slice(0, 8)
1877
+ .join("; ");
1878
+ throw new Error(`create-tasks failed internal topology extraction for project "${projectKey}". SDS includes runtime topology signals but no services were resolved. Signals: ${signalSummary || "unavailable"}`);
1879
+ }
1880
+ if (topologySignals.waveMentions.length > 0 && graph.startupWaves.length === 0) {
1881
+ throw new Error(`create-tasks failed internal topology extraction for project "${projectKey}". SDS includes startup wave signals but no startup waves were resolved. Signals: ${topologySignals.waveMentions.slice(0, 6).join("; ")}`);
1882
+ }
1883
+ return topologySignals;
1884
+ }
1885
+ derivePlanningArtifacts(projectKey, docs, plan) {
1886
+ const discoveryGraph = this.buildServiceDependencyGraph(plan, docs);
1887
+ const topologySignals = this.validateTopologyExtraction(projectKey, docs, discoveryGraph);
1888
+ const serviceCatalog = this.buildServiceCatalogArtifact(projectKey, docs, discoveryGraph);
1889
+ const projectBuildMethod = this.buildProjectConstructionMethod(docs, discoveryGraph);
1890
+ const projectBuildPlan = this.buildProjectPlanArtifact(projectKey, docs, discoveryGraph, projectBuildMethod);
1891
+ return {
1892
+ discoveryGraph,
1893
+ topologySignals,
1894
+ serviceCatalog,
1895
+ projectBuildMethod,
1896
+ projectBuildPlan,
1897
+ };
1898
+ }
1573
1899
  normalizeServiceId(value) {
1574
1900
  const normalizedName = this.normalizeServiceName(value);
1575
1901
  if (!normalizedName)
@@ -1667,6 +1993,7 @@ export class CreateTasksService {
1667
1993
  const sourceDocs = docs
1668
1994
  .map((doc) => doc.path ?? (doc.id ? `docdex:${doc.id}` : doc.title ?? "doc"))
1669
1995
  .filter((value) => Boolean(value))
1996
+ .filter((value, index, items) => items.indexOf(value) === index)
1670
1997
  .slice(0, 24);
1671
1998
  return {
1672
1999
  projectKey,
@@ -1889,8 +2216,16 @@ export class CreateTasksService {
1889
2216
  buildProjectConstructionMethod(docs, graph) {
1890
2217
  const toLabel = (value) => value.replace(/\s+/g, "-");
1891
2218
  const structureTargets = this.extractStructureTargets(docs);
1892
- const topDirectories = structureTargets.directories.slice(0, 10);
1893
- const topFiles = structureTargets.files.slice(0, 10);
2219
+ const sourceDocPaths = new Set(docs
2220
+ .map((doc) => (doc.path ? path.relative(this.workspace.workspaceRoot, doc.path).replace(/\\/g, "/") : undefined))
2221
+ .filter((value) => Boolean(value)));
2222
+ const sourceDocDirectories = new Set(Array.from(sourceDocPaths)
2223
+ .map((docPath) => path.posix.dirname(docPath))
2224
+ .filter((dir) => dir && dir !== "."));
2225
+ const buildDirectories = structureTargets.directories.filter((dir) => !sourceDocDirectories.has(dir));
2226
+ const buildFiles = structureTargets.files.filter((file) => !sourceDocPaths.has(file));
2227
+ const topDirectories = (buildDirectories.length > 0 ? buildDirectories : structureTargets.directories).slice(0, 10);
2228
+ const topFiles = (buildFiles.length > 0 ? buildFiles : structureTargets.files).slice(0, 10);
1894
2229
  const startupWaveLines = graph.startupWaves
1895
2230
  .slice(0, 8)
1896
2231
  .map((wave) => `- Wave ${wave.wave}: ${wave.services.map(toLabel).join(", ")}`);
@@ -1915,7 +2250,9 @@ export class CreateTasksService {
1915
2250
  ...(graph.foundationalDependencies.length > 0
1916
2251
  ? graph.foundationalDependencies.map((dependency) => ` - foundation: ${dependency}`)
1917
2252
  : [" - foundation: infer runtime prerequisites from SDS deployment sections"]),
1918
- ...(startupWaveLines.length > 0 ? startupWaveLines : [" - startup waves: infer from dependency contracts"]),
2253
+ ...(startupWaveLines.length > 0
2254
+ ? startupWaveLines
2255
+ : [" - startup waves: infer from documented dependency constraints"]),
1919
2256
  "3) Implement services by dependency direction and startup wave.",
1920
2257
  ` - service order: ${serviceOrderLine}`,
1921
2258
  ...(dependencyPairs.length > 0
@@ -1929,6 +2266,7 @@ export class CreateTasksService {
1929
2266
  const sourceDocs = docs
1930
2267
  .map((doc) => doc.path ?? (doc.id ? `docdex:${doc.id}` : doc.title ?? "doc"))
1931
2268
  .filter((value) => Boolean(value))
2269
+ .filter((value, index, items) => items.indexOf(value) === index)
1932
2270
  .slice(0, 24);
1933
2271
  return {
1934
2272
  projectKey,
@@ -2202,7 +2540,7 @@ export class CreateTasksService {
2202
2540
  .slice(0, 12);
2203
2541
  const bootstrapEpic = {
2204
2542
  localId: epicLocalId,
2205
- area: normalizeArea(projectKey) ?? "infra",
2543
+ area: normalizeArea(projectKey) ?? "core",
2206
2544
  title: "Codebase Foundation and Structure Setup",
2207
2545
  description: "Create the SDS-defined codebase scaffold first (folders/files/service boundaries) before feature implementation tasks.",
2208
2546
  acceptanceCriteria: [
@@ -2564,17 +2902,32 @@ export class CreateTasksService {
2564
2902
  for (const doc of docs) {
2565
2903
  if (!looksLikeSdsDoc(doc))
2566
2904
  continue;
2905
+ const scanLimit = Math.max(limit * 4, limit + 12);
2906
+ const contentHeadings = collectSdsImplementationSignals(doc.content ?? "", {
2907
+ headingLimit: scanLimit,
2908
+ folderLimit: 0,
2909
+ }).sectionHeadings;
2567
2910
  const segmentHeadings = (doc.segments ?? [])
2568
- .map((segment) => segment.heading?.trim())
2911
+ .map((segment) => normalizeHeadingCandidate(segment.heading?.trim() ?? ""))
2569
2912
  .filter((heading) => Boolean(heading));
2570
2913
  const segmentContentHeadings = (doc.segments ?? [])
2571
- .flatMap((segment) => extractMarkdownHeadings(segment.content ?? "", Math.max(6, Math.ceil(limit / 4))))
2572
- .slice(0, limit);
2573
- const contentHeadings = extractMarkdownHeadings(doc.content ?? "", limit);
2574
- for (const heading of [...segmentHeadings, ...segmentContentHeadings, ...contentHeadings]) {
2575
- const normalized = heading.replace(/[`*_]/g, "").trim();
2914
+ .flatMap((segment) => collectSdsImplementationSignals(segment.content ?? "", {
2915
+ headingLimit: Math.max(12, Math.ceil(scanLimit / 2)),
2916
+ folderLimit: 0,
2917
+ }).sectionHeadings)
2918
+ .slice(0, scanLimit);
2919
+ for (const heading of uniqueStrings([...contentHeadings, ...segmentHeadings, ...segmentContentHeadings])) {
2920
+ const normalized = normalizeHeadingCandidate(heading);
2576
2921
  if (!normalized)
2577
2922
  continue;
2923
+ if (!headingLooksImplementationRelevant(normalized))
2924
+ continue;
2925
+ if (/^software design specification$/i.test(normalized))
2926
+ continue;
2927
+ if (/^(?:\d+(?:\.\d+)*\.?\s*)?roles$/i.test(normalized))
2928
+ continue;
2929
+ if (sections.includes(normalized))
2930
+ continue;
2578
2931
  sections.push(normalized);
2579
2932
  if (sections.length >= limit)
2580
2933
  break;
@@ -2654,8 +3007,7 @@ export class CreateTasksService {
2654
3007
  }
2655
3008
  return { docSummary: blocks.join("\n\n") || "(no docs)", warnings };
2656
3009
  }
2657
- buildPrompt(projectKey, docs, projectBuildMethod, serviceCatalog, options) {
2658
- const docSummary = docs.map((doc, idx) => describeDoc(doc, idx)).join("\n");
3010
+ buildPrompt(projectKey, docSummary, projectBuildMethod, serviceCatalog, options) {
2659
3011
  const serviceCatalogSummary = this.buildServiceCatalogPromptSummary(serviceCatalog);
2660
3012
  const limits = [
2661
3013
  options.maxEpics ? `Limit epics to ${options.maxEpics}.` : "",
@@ -2677,6 +3029,8 @@ export class CreateTasksService {
2677
3029
  "- Keep epics actionable and implementation-oriented; avoid glossary/admin-only epics.",
2678
3030
  "- Prefer dependency-first sequencing: foundational setup epics before dependent feature epics.",
2679
3031
  "- Keep output derived from docs; do not assume stacks unless docs state them.",
3032
+ "- Use canonical documented names for modules, services, interfaces, commands, schemas, and files exactly as they appear in Docs and the project construction method.",
3033
+ "- Do not rename explicit documented targets or replace them with invented alternatives.",
2680
3034
  "- serviceIds is required and must contain one or more ids from the phase-0 service catalog below.",
2681
3035
  `- If an epic spans multiple services, include tag \"${CROSS_SERVICE_TAG}\" in tags.`,
2682
3036
  "Project construction method to follow:",
@@ -2727,9 +3081,9 @@ export class CreateTasksService {
2727
3081
  },
2728
3082
  {
2729
3083
  localId: "task-2",
2730
- title: "Integrate core contracts and dependencies",
3084
+ title: "Integrate core dependencies and interfaces",
2731
3085
  type: "feature",
2732
- description: "Wire key contracts/interfaces and dependency paths so core behavior can execute end-to-end.",
3086
+ description: "Wire key dependencies, interfaces, and integration paths so core behavior can execute end-to-end.",
2733
3087
  estimatedStoryPoints: 3,
2734
3088
  priorityHint: 20,
2735
3089
  dependsOnKeys: ["task-1"],
@@ -3000,6 +3354,7 @@ export class CreateTasksService {
3000
3354
  "- Use docdex handles when citing docs.",
3001
3355
  "- Keep stories direct and implementation-oriented; avoid placeholder-only narrative sections.",
3002
3356
  "- Keep story sequencing aligned with the project construction method.",
3357
+ "- Preserve canonical documented names for modules, services, interfaces, commands, schemas, and files exactly as written.",
3003
3358
  `Epic context (key=${epic.key ?? epic.localId ?? "TBD"}):`,
3004
3359
  epic.description ?? "(no description provided)",
3005
3360
  `Epic serviceIds: ${(epic.serviceIds ?? []).join(", ") || "(not provided)"}`,
@@ -3058,6 +3413,7 @@ export class CreateTasksService {
3058
3413
  "- Order tasks from foundational prerequisites to dependents based on documented dependency direction and startup constraints.",
3059
3414
  "- Avoid placeholder wording (TBD, TODO, to be defined, generic follow-up phrases).",
3060
3415
  "- Avoid documentation-only or glossary-only tasks unless story acceptance explicitly requires them.",
3416
+ "- Preserve canonical documented names for modules, services, interfaces, commands, schemas, and files exactly as written.",
3061
3417
  "- Use docdex handles when citing docs.",
3062
3418
  "- If OPENAPI_HINTS are present in Docs, align tasks with hinted service/capability/stage/test_requirements.",
3063
3419
  "- If SDS_COVERAGE_HINTS are present in Docs, cover the relevant SDS sections in implementation tasks.",
@@ -3157,11 +3513,11 @@ export class CreateTasksService {
3157
3513
  },
3158
3514
  {
3159
3515
  localId: "t-fallback-2",
3160
- title: `Integrate contracts for ${story.title}`,
3516
+ title: `Integrate dependencies for ${story.title}`,
3161
3517
  type: "feature",
3162
3518
  description: [
3163
- `Integrate dependent contracts/interfaces for "${story.title}" after core scope implementation.`,
3164
- "Align internal/external interfaces, data contracts, and dependency wiring with SDS/OpenAPI context.",
3519
+ `Integrate dependent interfaces and runtime dependencies for "${story.title}" after core scope implementation.`,
3520
+ "Align internal/external interfaces, data shapes, and dependency wiring with the documented context.",
3165
3521
  "Record dependency rationale and compatibility constraints in the task output.",
3166
3522
  ].join("\n"),
3167
3523
  estimatedStoryPoints: 3,
@@ -3261,50 +3617,99 @@ export class CreateTasksService {
3261
3617
  }
3262
3618
  return { epics: planEpics, stories: planStories, tasks: planTasks };
3263
3619
  }
3264
- buildSdsCoverageReport(projectKey, docs, plan) {
3265
- const sections = this.extractSdsSectionCandidates(docs, SDS_COVERAGE_REPORT_SECTION_LIMIT);
3266
- const normalize = (value) => value
3267
- .toLowerCase()
3268
- .replace(/[`*_]/g, "")
3269
- .replace(/[^a-z0-9\s/-]+/g, " ")
3270
- .replace(/\s+/g, " ")
3271
- .trim();
3272
- const planCorpus = normalize([
3620
+ buildCoverageCorpus(plan) {
3621
+ return normalizeCoverageText([
3273
3622
  ...plan.epics.map((epic) => `${epic.title} ${epic.description ?? ""} ${(epic.acceptanceCriteria ?? []).join(" ")}`),
3274
3623
  ...plan.stories.map((story) => `${story.title} ${story.userStory ?? ""} ${story.description ?? ""} ${(story.acceptanceCriteria ?? []).join(" ")}`),
3275
3624
  ...plan.tasks.map((task) => `${task.title} ${task.description ?? ""}`),
3276
3625
  ].join("\n"));
3277
- const matched = [];
3278
- const unmatched = [];
3279
- for (const section of sections) {
3280
- const normalizedSection = normalize(section);
3281
- if (!normalizedSection)
3282
- continue;
3283
- const keywords = normalizedSection
3284
- .split(/\s+/)
3285
- .filter((token) => token.length >= 4)
3286
- .slice(0, 6);
3287
- const hasDirectMatch = normalizedSection.length >= 6 && planCorpus.includes(normalizedSection);
3288
- const hasKeywordMatch = keywords.some((keyword) => planCorpus.includes(keyword));
3289
- if (hasDirectMatch || hasKeywordMatch) {
3290
- matched.push(section);
3626
+ }
3627
+ collectCoverageAnchorsFromBacklog(backlog) {
3628
+ const anchors = new Set();
3629
+ for (const task of backlog.tasks) {
3630
+ const sufficiencyAudit = task.metadata?.sufficiencyAudit;
3631
+ const anchor = typeof sufficiencyAudit?.anchor === "string" ? sufficiencyAudit.anchor.trim() : "";
3632
+ if (anchor)
3633
+ anchors.add(anchor);
3634
+ if (Array.isArray(sufficiencyAudit?.anchors)) {
3635
+ for (const value of sufficiencyAudit.anchors) {
3636
+ if (typeof value !== "string" || value.trim().length === 0)
3637
+ continue;
3638
+ anchors.add(value.trim());
3639
+ }
3291
3640
  }
3292
- else {
3293
- unmatched.push(section);
3641
+ }
3642
+ return anchors;
3643
+ }
3644
+ assertCoverageConsistency(projectKey, report, expected) {
3645
+ const sort = (values) => [...values].sort((left, right) => left.localeCompare(right));
3646
+ const sameSectionGaps = JSON.stringify(sort(report.missingSectionHeadings)) === JSON.stringify(sort(expected.missingSectionHeadings));
3647
+ const sameFolderGaps = JSON.stringify(sort(report.missingFolderEntries)) === JSON.stringify(sort(expected.missingFolderEntries));
3648
+ if (report.totalSignals !== expected.totalSignals ||
3649
+ report.coverageRatio !== expected.coverageRatio ||
3650
+ !sameSectionGaps ||
3651
+ !sameFolderGaps) {
3652
+ throw new Error(`create-tasks produced inconsistent coverage artifacts for project "${projectKey}". coverage-report.json diverged from task sufficiency coverage.`);
3653
+ }
3654
+ }
3655
+ async loadExpectedCoverageFromSufficiencyReport(reportPath) {
3656
+ if (!reportPath)
3657
+ return undefined;
3658
+ try {
3659
+ const raw = await fs.readFile(reportPath, "utf8");
3660
+ const parsed = JSON.parse(raw);
3661
+ const finalCoverage = parsed.finalCoverage;
3662
+ if (!finalCoverage)
3663
+ return undefined;
3664
+ if (typeof finalCoverage.coverageRatio !== "number" ||
3665
+ typeof finalCoverage.totalSignals !== "number" ||
3666
+ !Array.isArray(finalCoverage.missingSectionHeadings) ||
3667
+ !Array.isArray(finalCoverage.missingFolderEntries)) {
3668
+ return undefined;
3294
3669
  }
3670
+ return {
3671
+ coverageRatio: finalCoverage.coverageRatio,
3672
+ totalSignals: finalCoverage.totalSignals,
3673
+ missingSectionHeadings: finalCoverage.missingSectionHeadings.filter((value) => typeof value === "string"),
3674
+ missingFolderEntries: finalCoverage.missingFolderEntries.filter((value) => typeof value === "string"),
3675
+ };
3676
+ }
3677
+ catch {
3678
+ return undefined;
3295
3679
  }
3296
- const totalSections = matched.length + unmatched.length;
3297
- const coverageRatio = totalSections === 0 ? 1 : matched.length / totalSections;
3680
+ }
3681
+ buildSdsCoverageReport(projectKey, docs, plan, existingAnchors = new Set()) {
3682
+ const coverageSignals = collectSdsCoverageSignalsFromDocs(docs, {
3683
+ headingLimit: SDS_COVERAGE_REPORT_SECTION_LIMIT,
3684
+ folderLimit: SDS_COVERAGE_REPORT_FOLDER_LIMIT,
3685
+ });
3686
+ const coverage = evaluateSdsCoverage(this.buildCoverageCorpus(plan), {
3687
+ sectionHeadings: coverageSignals.sectionHeadings,
3688
+ folderEntries: coverageSignals.folderEntries,
3689
+ }, existingAnchors);
3690
+ const matchedSections = coverageSignals.sectionHeadings.filter((heading) => !coverage.missingSectionHeadings.includes(heading));
3691
+ const matchedFolderEntries = coverageSignals.folderEntries.filter((entry) => !coverage.missingFolderEntries.includes(entry));
3298
3692
  return {
3299
3693
  projectKey,
3300
3694
  generatedAt: new Date().toISOString(),
3301
- totalSections,
3302
- matched,
3303
- unmatched,
3304
- coverageRatio: Number(coverageRatio.toFixed(4)),
3305
- notes: totalSections === 0
3306
- ? ["No SDS section headings detected; coverage defaults to 1.0."]
3307
- : ["Coverage is heading-based heuristic match between SDS sections and generated epic/story/task corpus."],
3695
+ totalSignals: coverage.totalSignals,
3696
+ totalSections: coverageSignals.sectionHeadings.length,
3697
+ totalFolderEntries: coverageSignals.folderEntries.length,
3698
+ rawSectionSignals: coverageSignals.rawSectionHeadings.length,
3699
+ rawFolderSignals: coverageSignals.rawFolderEntries.length,
3700
+ skippedHeadingSignals: coverageSignals.skippedHeadingSignals,
3701
+ skippedFolderSignals: coverageSignals.skippedFolderSignals,
3702
+ matched: matchedSections,
3703
+ unmatched: coverage.missingSectionHeadings,
3704
+ matchedSections,
3705
+ missingSectionHeadings: coverage.missingSectionHeadings,
3706
+ matchedFolderEntries,
3707
+ missingFolderEntries: coverage.missingFolderEntries,
3708
+ existingAnchorsCount: existingAnchors.size,
3709
+ coverageRatio: coverage.coverageRatio,
3710
+ notes: coverage.totalSignals === 0
3711
+ ? ["No actionable SDS implementation signals detected; coverage defaults to 1.0."]
3712
+ : ["Coverage uses the same heading and folder signal model as task-sufficiency-audit."],
3308
3713
  };
3309
3714
  }
3310
3715
  async acquirePlanArtifactLock(baseDir, options) {
@@ -3365,7 +3770,166 @@ export class CreateTasksService {
3365
3770
  }
3366
3771
  }
3367
3772
  }
3368
- async writePlanArtifacts(projectKey, plan, docSummary, docs, buildPlan, serviceCatalog) {
3773
+ splitPersistedAcceptanceCriteria(value) {
3774
+ if (!value)
3775
+ return [];
3776
+ return uniqueStrings(value
3777
+ .split(/\r?\n/)
3778
+ .map((line) => line.replace(/^[-*]\s+/, "").trim())
3779
+ .filter(Boolean));
3780
+ }
3781
+ async loadPersistedBacklog(projectId) {
3782
+ const repoLike = this.workspaceRepo;
3783
+ if (typeof repoLike.getDb !== "function") {
3784
+ const epics = Array.isArray(repoLike.epics)
3785
+ ? repoLike.epics.filter((row) => row.projectId === projectId)
3786
+ : [];
3787
+ const stories = Array.isArray(repoLike.stories)
3788
+ ? repoLike.stories.filter((row) => row.projectId === projectId)
3789
+ : [];
3790
+ const tasks = Array.isArray(repoLike.tasks)
3791
+ ? repoLike.tasks.filter((row) => row.projectId === projectId)
3792
+ : [];
3793
+ const taskIds = new Set(tasks.map((task) => task.id));
3794
+ const dependencies = Array.isArray(repoLike.deps)
3795
+ ? repoLike.deps.filter((row) => taskIds.has(row.taskId))
3796
+ : [];
3797
+ return { epics, stories, tasks, dependencies };
3798
+ }
3799
+ const db = repoLike.getDb();
3800
+ const epicRows = await db.all(`SELECT id, project_id, key, title, description, story_points_total, priority, metadata_json, created_at, updated_at
3801
+ FROM epics
3802
+ WHERE project_id = ?
3803
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, projectId);
3804
+ const storyRows = await db.all(`SELECT id, project_id, epic_id, key, title, description, acceptance_criteria, story_points_total, priority, metadata_json, created_at, updated_at
3805
+ FROM user_stories
3806
+ WHERE project_id = ?
3807
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, projectId);
3808
+ const taskRows = await db.all(`SELECT id, project_id, epic_id, user_story_id, key, title, description, type, status, story_points, priority,
3809
+ assigned_agent_id, assignee_human, vcs_branch, vcs_base_branch, vcs_last_commit_sha, metadata_json,
3810
+ openapi_version_at_creation, created_at, updated_at
3811
+ FROM tasks
3812
+ WHERE project_id = ?
3813
+ ORDER BY COALESCE(priority, 2147483647), datetime(created_at), key`, projectId);
3814
+ const epics = epicRows.map((row) => ({
3815
+ id: row.id,
3816
+ projectId: row.project_id,
3817
+ key: row.key,
3818
+ title: row.title,
3819
+ description: row.description,
3820
+ storyPointsTotal: row.story_points_total ?? null,
3821
+ priority: row.priority ?? null,
3822
+ metadata: row.metadata_json ? JSON.parse(row.metadata_json) : undefined,
3823
+ createdAt: row.created_at,
3824
+ updatedAt: row.updated_at,
3825
+ }));
3826
+ const stories = storyRows.map((row) => ({
3827
+ id: row.id,
3828
+ projectId: row.project_id,
3829
+ epicId: row.epic_id,
3830
+ key: row.key,
3831
+ title: row.title,
3832
+ description: row.description,
3833
+ acceptanceCriteria: row.acceptance_criteria ?? null,
3834
+ storyPointsTotal: row.story_points_total ?? null,
3835
+ priority: row.priority ?? null,
3836
+ metadata: row.metadata_json ? JSON.parse(row.metadata_json) : undefined,
3837
+ createdAt: row.created_at,
3838
+ updatedAt: row.updated_at,
3839
+ }));
3840
+ const tasks = taskRows.map((row) => ({
3841
+ id: row.id,
3842
+ projectId: row.project_id,
3843
+ epicId: row.epic_id,
3844
+ userStoryId: row.user_story_id,
3845
+ key: row.key,
3846
+ title: row.title,
3847
+ description: row.description,
3848
+ type: row.type ?? null,
3849
+ status: row.status,
3850
+ storyPoints: row.story_points ?? null,
3851
+ priority: row.priority ?? null,
3852
+ assignedAgentId: row.assigned_agent_id ?? null,
3853
+ assigneeHuman: row.assignee_human ?? null,
3854
+ vcsBranch: row.vcs_branch ?? null,
3855
+ vcsBaseBranch: row.vcs_base_branch ?? null,
3856
+ vcsLastCommitSha: row.vcs_last_commit_sha ?? null,
3857
+ metadata: row.metadata_json ? JSON.parse(row.metadata_json) : undefined,
3858
+ openapiVersionAtCreation: row.openapi_version_at_creation ?? null,
3859
+ createdAt: row.created_at,
3860
+ updatedAt: row.updated_at,
3861
+ }));
3862
+ const dependencies = typeof repoLike.getTaskDependencies === "function"
3863
+ ? await repoLike.getTaskDependencies(tasks.map((task) => task.id))
3864
+ : [];
3865
+ return { epics, stories, tasks, dependencies };
3866
+ }
3867
+ buildPlanFromPersistedBacklog(backlog) {
3868
+ const storyById = new Map(backlog.stories.map((story) => [story.id, story]));
3869
+ const epicById = new Map(backlog.epics.map((epic) => [epic.id, epic]));
3870
+ const taskById = new Map(backlog.tasks.map((task) => [task.id, task]));
3871
+ const dependencyKeysByTaskId = new Map();
3872
+ for (const dependency of backlog.dependencies) {
3873
+ const current = dependencyKeysByTaskId.get(dependency.taskId) ?? [];
3874
+ const dependsOn = taskById.get(dependency.dependsOnTaskId)?.key;
3875
+ if (dependsOn && !current.includes(dependsOn))
3876
+ current.push(dependsOn);
3877
+ dependencyKeysByTaskId.set(dependency.taskId, current);
3878
+ }
3879
+ return {
3880
+ epics: backlog.epics.map((epic) => {
3881
+ const metadata = (epic.metadata ?? {});
3882
+ return {
3883
+ localId: epic.key,
3884
+ area: epic.key.split("-")[0]?.toLowerCase() || "proj",
3885
+ title: epic.title,
3886
+ description: epic.description,
3887
+ acceptanceCriteria: [],
3888
+ relatedDocs: normalizeRelatedDocs(metadata.doc_links),
3889
+ priorityHint: epic.priority ?? undefined,
3890
+ serviceIds: normalizeStringArray(metadata.service_ids),
3891
+ tags: normalizeStringArray(metadata.tags),
3892
+ stories: [],
3893
+ };
3894
+ }),
3895
+ stories: backlog.stories.map((story) => {
3896
+ const metadata = (story.metadata ?? {});
3897
+ return {
3898
+ localId: story.key,
3899
+ epicLocalId: epicById.get(story.epicId)?.key ?? story.epicId,
3900
+ title: story.title,
3901
+ userStory: undefined,
3902
+ description: story.description,
3903
+ acceptanceCriteria: this.splitPersistedAcceptanceCriteria(story.acceptanceCriteria),
3904
+ relatedDocs: normalizeRelatedDocs(metadata.doc_links),
3905
+ priorityHint: story.priority ?? undefined,
3906
+ tasks: [],
3907
+ };
3908
+ }),
3909
+ tasks: backlog.tasks.map((task) => {
3910
+ const metadata = (task.metadata ?? {});
3911
+ const testRequirements = (metadata.test_requirements ?? {});
3912
+ return {
3913
+ localId: task.key,
3914
+ epicLocalId: epicById.get(task.epicId)?.key ?? task.epicId,
3915
+ storyLocalId: storyById.get(task.userStoryId)?.key ?? task.userStoryId,
3916
+ title: task.title,
3917
+ type: task.type ?? "feature",
3918
+ description: task.description,
3919
+ estimatedStoryPoints: task.storyPoints ?? undefined,
3920
+ priorityHint: task.priority ?? undefined,
3921
+ dependsOnKeys: dependencyKeysByTaskId.get(task.id) ?? [],
3922
+ relatedDocs: normalizeRelatedDocs(metadata.doc_links),
3923
+ unitTests: normalizeStringArray(testRequirements.unit),
3924
+ componentTests: normalizeStringArray(testRequirements.component),
3925
+ integrationTests: normalizeStringArray(testRequirements.integration),
3926
+ apiTests: normalizeStringArray(testRequirements.api),
3927
+ qa: isPlainObject(metadata.qa) ? metadata.qa : undefined,
3928
+ };
3929
+ }),
3930
+ };
3931
+ }
3932
+ async writePlanArtifacts(projectKey, plan, docSummary, docs, buildPlan, serviceCatalog, options) {
3369
3933
  const baseDir = path.join(this.workspace.mcodaDir, "tasks", projectKey);
3370
3934
  await fs.mkdir(baseDir, { recursive: true });
3371
3935
  const releaseLock = await this.acquirePlanArtifactLock(baseDir);
@@ -3387,7 +3951,11 @@ export class CreateTasksService {
3387
3951
  await write("epics.json", plan.epics);
3388
3952
  await write("stories.json", plan.stories);
3389
3953
  await write("tasks.json", plan.tasks);
3390
- await write("coverage-report.json", this.buildSdsCoverageReport(projectKey, docs, plan));
3954
+ const coverageReport = this.buildSdsCoverageReport(projectKey, docs, plan, options?.existingCoverageAnchors ?? new Set());
3955
+ if (options?.expectedCoverage) {
3956
+ this.assertCoverageConsistency(projectKey, coverageReport, options.expectedCoverage);
3957
+ }
3958
+ await write("coverage-report.json", coverageReport);
3391
3959
  }
3392
3960
  finally {
3393
3961
  await releaseLock();
@@ -3656,6 +4224,8 @@ export class CreateTasksService {
3656
4224
  });
3657
4225
  let sdsPreflight;
3658
4226
  let sdsPreflightError;
4227
+ let sdsPreflightBlockingReasons = [];
4228
+ let continueAfterSdsPreflightWarnings = false;
3659
4229
  if (this.sdsPreflightFactory) {
3660
4230
  let sdsPreflightCloseError;
3661
4231
  try {
@@ -3667,7 +4237,7 @@ export class CreateTasksService {
3667
4237
  inputPaths: options.inputs,
3668
4238
  sdsPaths: options.inputs,
3669
4239
  writeArtifacts: true,
3670
- applyToSds: true,
4240
+ applyToSds: options.sdsPreflightApplyToSds === true,
3671
4241
  commitAppliedChanges: options.sdsPreflightCommit === true,
3672
4242
  commitMessage: options.sdsPreflightCommitMessage,
3673
4243
  });
@@ -3727,12 +4297,15 @@ export class CreateTasksService {
3727
4297
  }
3728
4298
  if (blockingReasons.length > 0) {
3729
4299
  sdsPreflightError = blockingReasons.join(" ");
4300
+ sdsPreflightBlockingReasons = [...blockingReasons];
4301
+ continueAfterSdsPreflightWarnings = true;
4302
+ await this.jobService.appendLog(job.id, `SDS preflight reported planning warnings but create-tasks will continue with remediation context: ${blockingReasons.join(" ")} Report: ${sdsPreflight.reportPath}\n`);
3730
4303
  }
3731
4304
  await this.jobService.writeCheckpoint(job.id, {
3732
4305
  stage: "sds_preflight",
3733
4306
  timestamp: new Date().toISOString(),
3734
4307
  details: {
3735
- status: blockingReasons.length > 0 ? "blocked" : "succeeded",
4308
+ status: blockingReasons.length > 0 ? "continued_with_warnings" : "succeeded",
3736
4309
  error: sdsPreflightError,
3737
4310
  readyForPlanning: sdsPreflight.readyForPlanning,
3738
4311
  qualityStatus: sdsPreflight.qualityStatus,
@@ -3747,28 +4320,37 @@ export class CreateTasksService {
3747
4320
  appliedToSds: sdsPreflight.appliedToSds,
3748
4321
  appliedSdsCount: sdsPreflight.appliedSdsPaths.length,
3749
4322
  commitHash: sdsPreflight.commitHash,
4323
+ blockingReasons,
4324
+ continuedWithWarnings: continueAfterSdsPreflightWarnings,
3750
4325
  warnings: preflightWarnings,
3751
4326
  },
3752
4327
  });
3753
- if (blockingReasons.length > 0) {
3754
- throw new Error(`create-tasks blocked by SDS preflight. ${blockingReasons.join(" ")} Report: ${sdsPreflight.reportPath}`);
3755
- }
3756
4328
  }
3757
- const preflightDocInputs = this.mergeDocInputs(options.inputs, sdsPreflight ? [...sdsPreflight.sourceSdsPaths, ...sdsPreflight.generatedDocPaths] : []);
4329
+ const preflightGeneratedDocInputs = sdsPreflight && (!sdsPreflight.appliedToSds || continueAfterSdsPreflightWarnings)
4330
+ ? sdsPreflight.generatedDocPaths
4331
+ : [];
4332
+ const preflightDocInputs = this.mergeDocInputs(options.inputs, sdsPreflight ? [...sdsPreflight.sourceSdsPaths, ...preflightGeneratedDocInputs] : []);
3758
4333
  const docs = await this.prepareDocs(preflightDocInputs);
3759
4334
  const { docSummary, warnings: indexedDocWarnings } = this.buildDocContext(docs);
3760
4335
  const docWarnings = uniqueStrings([...(sdsPreflight?.warnings ?? []), ...indexedDocWarnings]);
3761
- const discoveryGraph = this.buildServiceDependencyGraph({ epics: [], stories: [], tasks: [] }, docs);
3762
- const serviceCatalog = this.buildServiceCatalogArtifact(options.projectKey, docs, discoveryGraph);
3763
- const projectBuildMethod = this.buildProjectConstructionMethod(docs, discoveryGraph);
3764
- const projectBuildPlan = this.buildProjectPlanArtifact(options.projectKey, docs, discoveryGraph, projectBuildMethod);
3765
- const { prompt } = this.buildPrompt(options.projectKey, docs, projectBuildMethod, serviceCatalog, options);
4336
+ const initialArtifacts = this.derivePlanningArtifacts(options.projectKey, docs, { epics: [], stories: [], tasks: [] });
4337
+ const { discoveryGraph, topologySignals, serviceCatalog, projectBuildMethod, projectBuildPlan } = initialArtifacts;
4338
+ const { prompt } = this.buildPrompt(options.projectKey, docSummary, projectBuildMethod, serviceCatalog, options);
3766
4339
  const qaPreflight = await this.buildQaPreflight();
3767
4340
  const qaOverrides = this.buildQaOverrides(options);
3768
4341
  await this.jobService.writeCheckpoint(job.id, {
3769
4342
  stage: "docs_indexed",
3770
4343
  timestamp: new Date().toISOString(),
3771
- details: { count: docs.length, warnings: docWarnings, startupWaves: discoveryGraph.startupWaves.slice(0, 8) },
4344
+ details: {
4345
+ count: docs.length,
4346
+ warnings: docWarnings,
4347
+ startupWaves: discoveryGraph.startupWaves.slice(0, 8),
4348
+ topologySignals: {
4349
+ structureServices: topologySignals.structureServices.slice(0, 8),
4350
+ topologyHeadings: topologySignals.topologyHeadings.slice(0, 8),
4351
+ waveMentions: topologySignals.waveMentions.slice(0, 4),
4352
+ },
4353
+ },
3772
4354
  });
3773
4355
  await this.jobService.writeCheckpoint(job.id, {
3774
4356
  stage: "build_plan_defined",
@@ -3965,12 +4547,37 @@ export class CreateTasksService {
3965
4547
  throw new Error(`create-tasks blocked: task sufficiency audit did not reach full coverage. Report: ${sufficiencyAudit.reportPath}`);
3966
4548
  }
3967
4549
  }
4550
+ if ((sufficiencyAudit?.totalTasksAdded ?? 0) > 0) {
4551
+ await this.seedPriorities(options.projectKey);
4552
+ }
4553
+ const finalBacklog = await this.loadPersistedBacklog(project.id);
4554
+ const finalPlan = this.buildPlanFromPersistedBacklog(finalBacklog);
4555
+ const finalArtifacts = this.derivePlanningArtifacts(options.projectKey, docs, finalPlan);
4556
+ const finalCoverageAnchors = this.collectCoverageAnchorsFromBacklog(finalBacklog);
4557
+ const expectedCoverage = await this.loadExpectedCoverageFromSufficiencyReport(sufficiencyAudit?.reportPath);
4558
+ await this.writePlanArtifacts(options.projectKey, finalPlan, docSummary, docs, finalArtifacts.projectBuildPlan, finalArtifacts.serviceCatalog, {
4559
+ existingCoverageAnchors: finalCoverageAnchors,
4560
+ expectedCoverage,
4561
+ });
4562
+ await this.jobService.writeCheckpoint(job.id, {
4563
+ stage: "plan_refreshed",
4564
+ timestamp: new Date().toISOString(),
4565
+ details: {
4566
+ folder,
4567
+ epics: finalBacklog.epics.length,
4568
+ stories: finalBacklog.stories.length,
4569
+ tasks: finalBacklog.tasks.length,
4570
+ dependencies: finalBacklog.dependencies.length,
4571
+ services: finalArtifacts.serviceCatalog.services.length,
4572
+ startupWaves: finalArtifacts.projectBuildPlan.startupWaves.length,
4573
+ },
4574
+ });
3968
4575
  await this.jobService.updateJobStatus(job.id, "completed", {
3969
4576
  payload: {
3970
- epicsCreated: epicRows.length,
3971
- storiesCreated: storyRows.length,
3972
- tasksCreated: taskRows.length,
3973
- dependenciesCreated: dependencyRows.length,
4577
+ epicsCreated: finalBacklog.epics.length,
4578
+ storiesCreated: finalBacklog.stories.length,
4579
+ tasksCreated: finalBacklog.tasks.length,
4580
+ dependenciesCreated: finalBacklog.dependencies.length,
3974
4581
  docs: docSummary,
3975
4582
  planFolder: folder,
3976
4583
  planSource,
@@ -3990,6 +4597,8 @@ export class CreateTasksService {
3990
4597
  reportPath: sdsPreflight.reportPath,
3991
4598
  openQuestionsPath: sdsPreflight.openQuestionsPath,
3992
4599
  gapAddendumPath: sdsPreflight.gapAddendumPath,
4600
+ blockingReasons: sdsPreflightBlockingReasons,
4601
+ continuedWithWarnings: continueAfterSdsPreflightWarnings,
3993
4602
  warnings: sdsPreflight.warnings,
3994
4603
  }
3995
4604
  : undefined,
@@ -4037,10 +4646,10 @@ export class CreateTasksService {
4037
4646
  return {
4038
4647
  jobId: job.id,
4039
4648
  commandRunId: commandRun.id,
4040
- epics: epicRows,
4041
- stories: storyRows,
4042
- tasks: taskRows,
4043
- dependencies: dependencyRows,
4649
+ epics: finalBacklog.epics,
4650
+ stories: finalBacklog.stories,
4651
+ tasks: finalBacklog.tasks,
4652
+ dependencies: finalBacklog.dependencies,
4044
4653
  };
4045
4654
  }
4046
4655
  catch (error) {