@cubis/foundry 0.3.80 → 0.3.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/dist/cli/core.js +295 -47
  2. package/dist/cli/core.js.map +1 -1
  3. package/package.json +1 -1
  4. package/src/cli/core.ts +395 -84
  5. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/architecture.md +25 -5
  6. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/implement-track.md +1 -1
  7. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/migrate.md +1 -1
  8. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/mobile.md +1 -1
  9. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/onboard.md +1 -1
  10. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/orchestrate.md +1 -1
  11. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/plan.md +1 -1
  12. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/refactor.md +1 -1
  13. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/release.md +1 -1
  14. package/workflows/workflows/agent-environment-setup/platforms/antigravity/workflows/spec.md +1 -1
  15. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/architecture.md +25 -5
  16. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/implement-track.md +1 -1
  17. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/migrate.md +1 -1
  18. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/mobile.md +1 -1
  19. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/onboard.md +1 -1
  20. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/orchestrate.md +1 -1
  21. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/plan.md +1 -1
  22. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/refactor.md +1 -1
  23. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/release.md +1 -1
  24. package/workflows/workflows/agent-environment-setup/platforms/claude/workflows/spec.md +1 -1
  25. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/architecture.md +25 -5
  26. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/implement-track.md +1 -1
  27. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/migrate.md +1 -1
  28. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/mobile.md +1 -1
  29. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/onboard.md +1 -1
  30. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/orchestrate.md +1 -1
  31. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/plan.md +1 -1
  32. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/refactor.md +1 -1
  33. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/release.md +1 -1
  34. package/workflows/workflows/agent-environment-setup/platforms/codex/workflows/spec.md +1 -1
  35. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/architecture.md +25 -5
  36. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/implement-track.md +1 -1
  37. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/migrate.md +1 -1
  38. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/mobile.md +1 -1
  39. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/onboard.md +1 -1
  40. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/orchestrate.md +1 -1
  41. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/plan.md +1 -1
  42. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/refactor.md +1 -1
  43. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/release.md +1 -1
  44. package/workflows/workflows/agent-environment-setup/platforms/copilot/workflows/spec.md +1 -1
  45. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/architecture.md +25 -5
  46. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/implement-track.md +1 -1
  47. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/migrate.md +1 -1
  48. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/mobile.md +1 -1
  49. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/onboard.md +1 -1
  50. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/orchestrate.md +1 -1
  51. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/plan.md +1 -1
  52. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/refactor.md +1 -1
  53. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/release.md +1 -1
  54. package/workflows/workflows/agent-environment-setup/platforms/gemini/workflows/spec.md +1 -1
  55. package/workflows/workflows/agent-environment-setup/shared/workflows/architecture.md +25 -5
  56. package/workflows/workflows/agent-environment-setup/shared/workflows/implement-track.md +1 -1
  57. package/workflows/workflows/agent-environment-setup/shared/workflows/migrate.md +1 -1
  58. package/workflows/workflows/agent-environment-setup/shared/workflows/mobile.md +1 -1
  59. package/workflows/workflows/agent-environment-setup/shared/workflows/onboard.md +1 -1
  60. package/workflows/workflows/agent-environment-setup/shared/workflows/orchestrate.md +1 -1
  61. package/workflows/workflows/agent-environment-setup/shared/workflows/plan.md +1 -1
  62. package/workflows/workflows/agent-environment-setup/shared/workflows/refactor.md +1 -1
  63. package/workflows/workflows/agent-environment-setup/shared/workflows/release.md +1 -1
  64. package/workflows/workflows/agent-environment-setup/shared/workflows/spec.md +1 -1
package/src/cli/core.ts CHANGED
@@ -71,8 +71,7 @@ const PRODUCT_FOUNDATION_BLOCK_END_RE =
71
71
  /<!--\s*cbx:product:foundation:end\s*-->/g;
72
72
  const ARCHITECTURE_DOC_BLOCK_START_RE =
73
73
  /<!--\s*cbx:architecture:doc:start[^>]*-->/g;
74
- const ARCHITECTURE_DOC_BLOCK_END_RE =
75
- /<!--\s*cbx:architecture:doc:end\s*-->/g;
74
+ const ARCHITECTURE_DOC_BLOCK_END_RE = /<!--\s*cbx:architecture:doc:end\s*-->/g;
76
75
  const TECH_ARCHITECTURE_BLOCK_START_RE =
77
76
  /<!--\s*cbx:architecture:tech:start[^>]*-->/g;
78
77
  const TECH_ARCHITECTURE_BLOCK_END_RE =
@@ -1056,7 +1055,8 @@ function inferArchitectureContractProfile(snapshot) {
1056
1055
  snapshot.topDirs.includes("app")
1057
1056
  ? "App-level UI patterns should be centralized and reused across screens."
1058
1057
  : null,
1059
- ]) || "No dedicated design-system directory detected; infer shared UI rules from current components and screens.";
1058
+ ]) ||
1059
+ "No dedicated design-system directory detected; infer shared UI rules from current components and screens.";
1060
1060
 
1061
1061
  const testingStrategy = [];
1062
1062
  if (snapshot.keyScripts.some((script) => script.name === "lint")) {
@@ -1107,7 +1107,8 @@ function buildArchitectureMermaid(snapshot) {
1107
1107
  ' user["User / Entry Point"] --> app["Application Surface"]',
1108
1108
  ];
1109
1109
  for (let index = 0; index < flowNodes.length; index += 1) {
1110
- const nodeName = flowNodes[index].replace(/[^A-Za-z0-9]/g, "") || `N${index}`;
1110
+ const nodeName =
1111
+ flowNodes[index].replace(/[^A-Za-z0-9]/g, "") || `N${index}`;
1111
1112
  lines.push(` app --> ${nodeName}["${flowNodes[index]}/"]`);
1112
1113
  }
1113
1114
  if (snapshot.isMcpServer || snapshot.mcpSignals.length > 0) {
@@ -1123,21 +1124,31 @@ function inferProductFoundationProfile(snapshot, specRoots = []) {
1123
1124
  const appRoots = (snapshot.architectureByApp || [])
1124
1125
  .map((item) => item.rootPath)
1125
1126
  .filter((value) => value && value !== ".");
1126
- const primarySurfaces = appRoots.length > 0 ? appRoots : snapshot.topDirs.slice(0, 6);
1127
+ const primarySurfaces =
1128
+ appRoots.length > 0 ? appRoots : snapshot.topDirs.slice(0, 6);
1127
1129
  const userPersonas = [];
1128
1130
 
1129
1131
  if (snapshot.frameworks.includes("Flutter")) {
1130
- userPersonas.push("End users interacting through mobile or desktop app surfaces");
1132
+ userPersonas.push(
1133
+ "End users interacting through mobile or desktop app surfaces",
1134
+ );
1131
1135
  }
1132
1136
  if (
1133
1137
  snapshot.frameworks.includes("Next.js") ||
1134
1138
  snapshot.frameworks.includes("React") ||
1135
1139
  snapshot.topDirs.includes("web")
1136
1140
  ) {
1137
- userPersonas.push("Browser users and internal operators using web-facing flows");
1141
+ userPersonas.push(
1142
+ "Browser users and internal operators using web-facing flows",
1143
+ );
1138
1144
  }
1139
- if (snapshot.frameworks.includes("NestJS") || snapshot.topDirs.includes("api")) {
1140
- userPersonas.push("Internal services, operators, or partner systems consuming API boundaries");
1145
+ if (
1146
+ snapshot.frameworks.includes("NestJS") ||
1147
+ snapshot.topDirs.includes("api")
1148
+ ) {
1149
+ userPersonas.push(
1150
+ "Internal services, operators, or partner systems consuming API boundaries",
1151
+ );
1141
1152
  }
1142
1153
  if (userPersonas.length === 0) {
1143
1154
  userPersonas.push(
@@ -1157,7 +1168,9 @@ function inferProductFoundationProfile(snapshot, specRoots = []) {
1157
1168
  );
1158
1169
  }
1159
1170
  if (snapshot.isMcpServer || snapshot.mcpSignals.length > 0) {
1160
- coreJourneys.push("Tool-assisted and MCP-driven workflows are part of the operating model and should stay stable.");
1171
+ coreJourneys.push(
1172
+ "Tool-assisted and MCP-driven workflows are part of the operating model and should stay stable.",
1173
+ );
1161
1174
  }
1162
1175
  if (coreJourneys.length === 0) {
1163
1176
  coreJourneys.push(
@@ -1266,12 +1279,16 @@ function buildArchitectureDocSection(snapshot, specRoots = []) {
1266
1279
  "### Bounded Contexts and Module Boundaries",
1267
1280
  ...(profile.moduleBoundaries.length > 0
1268
1281
  ? profile.moduleBoundaries.map((item) => `- ${item}`)
1269
- : ["- No strong top-level module boundaries were detected automatically."]),
1282
+ : [
1283
+ "- No strong top-level module boundaries were detected automatically.",
1284
+ ]),
1270
1285
  "",
1271
1286
  "### Architecture Signals by Surface",
1272
1287
  ...(profile.architectureSignals.length > 0
1273
1288
  ? profile.architectureSignals.map((item) => `- ${item}`)
1274
- : ["- No app-level architecture signals were inferred from the repo scan."]),
1289
+ : [
1290
+ "- No app-level architecture signals were inferred from the repo scan.",
1291
+ ]),
1275
1292
  "",
1276
1293
  "### Decision Areas to Preserve",
1277
1294
  ...profile.decisionAreas.map((item) => `- ${item}`),
@@ -1309,7 +1326,9 @@ function buildEngineeringArchitectureSection(snapshot) {
1309
1326
  "- Module and package boundaries to preserve:",
1310
1327
  ...(profile.moduleBoundaries.length > 0
1311
1328
  ? profile.moduleBoundaries.map((rule) => ` - ${rule}`)
1312
- : [" - No strong module boundary was detected automatically; keep new boundaries explicit in specs and ADRs."]),
1329
+ : [
1330
+ " - No strong module boundary was detected automatically; keep new boundaries explicit in specs and ADRs.",
1331
+ ]),
1313
1332
  "- Testability expectations:",
1314
1333
  ...profile.testingStrategy.map((rule) => ` - ${rule}`),
1315
1334
  "- Doc refresh policy:",
@@ -1374,7 +1393,9 @@ function buildTechArchitectureSection(snapshot) {
1374
1393
  "### Module / App Topology",
1375
1394
  ...(profile.moduleBoundaries.length > 0
1376
1395
  ? profile.moduleBoundaries.map((item) => `- ${item}`)
1377
- : ["- No significant top-level module boundaries detected automatically."]),
1396
+ : [
1397
+ "- No significant top-level module boundaries detected automatically.",
1398
+ ]),
1378
1399
  "",
1379
1400
  "### Flow Narratives",
1380
1401
  "- Describe the primary request, data, and background-job flows here when architecture generation runs.",
@@ -1402,11 +1423,12 @@ function buildRoadmapFoundationSection(snapshot, specRoots = []) {
1402
1423
  specRoots,
1403
1424
  };
1404
1425
  const hash = hashStableObject(payload);
1405
- const nowItems = specRoots.length > 0
1406
- ? specRoots.map((item) => `Track active change planning in \`${item}\`.`)
1407
- : [
1408
- "No active spec packs detected. Create a spec pack before starting the next non-trivial feature or migration.",
1409
- ];
1426
+ const nowItems =
1427
+ specRoots.length > 0
1428
+ ? specRoots.map((item) => `Track active change planning in \`${item}\`.`)
1429
+ : [
1430
+ "No active spec packs detected. Create a spec pack before starting the next non-trivial feature or migration.",
1431
+ ];
1410
1432
  const nextItems = [];
1411
1433
  if (snapshot.frameworks.length > 0) {
1412
1434
  nextItems.push(
@@ -1687,6 +1709,28 @@ function buildEngineeringRulesManagedBlock({
1687
1709
  const roadmapRef = toPosixPath(path.resolve(roadmapFilePath));
1688
1710
  const ruleRef = toPosixPath(path.resolve(ruleFilePath));
1689
1711
 
1712
+ const supportsAtImport = platform === "claude" || platform === "gemini";
1713
+ const ruleFileDir = path.dirname(path.resolve(ruleFilePath));
1714
+ const relProduct = toPosixPath(
1715
+ path.relative(ruleFileDir, path.resolve(productFilePath)),
1716
+ );
1717
+ const relArchitecture = toPosixPath(
1718
+ path.relative(ruleFileDir, path.resolve(architectureFilePath)),
1719
+ );
1720
+ const relTech = toPosixPath(
1721
+ path.relative(ruleFileDir, path.resolve(techMdFilePath)),
1722
+ );
1723
+
1724
+ const importLines = supportsAtImport
1725
+ ? [
1726
+ "",
1727
+ "Foundation docs (auto-imported into context):",
1728
+ `@${relProduct}`,
1729
+ `@${relArchitecture}`,
1730
+ `@${relTech}`,
1731
+ ]
1732
+ : [];
1733
+
1690
1734
  return [
1691
1735
  `<!-- cbx:engineering:auto:start platform=${platform} version=1 -->`,
1692
1736
  "## Engineering Guardrails (auto-managed)",
@@ -1698,13 +1742,14 @@ function buildEngineeringRulesManagedBlock({
1698
1742
  `- Project tech map: \`${techRef}\``,
1699
1743
  `- Delivery roadmap: \`${roadmapRef}\``,
1700
1744
  `- Active platform rule file: \`${ruleRef}\``,
1745
+ ...importLines,
1701
1746
  "",
1702
1747
  "Hard policy:",
1703
1748
  "1. Start from product outcomes and ship the smallest valuable slice.",
1704
1749
  "2. Keep architecture simple (KISS) and avoid speculative work (YAGNI).",
1705
1750
  "3. Apply SOLID pragmatically to reduce change risk, not add ceremony.",
1706
1751
  "4. Use clear naming with focused responsibilities and explicit boundaries.",
1707
- `5. For non-trivial work, read ${FOUNDATION_DOCS_DIR}/PRODUCT.md, ENGINEERING_RULES.md, ${FOUNDATION_DOCS_DIR}/ARCHITECTURE.md, and ${FOUNDATION_DOCS_DIR}/TECH.md in that order when they exist before planning or implementation.`,
1752
+ `5. For non-trivial work, read ${FOUNDATION_DOCS_DIR}/PRODUCT.md, ENGINEERING_RULES.md, ${FOUNDATION_DOCS_DIR}/ARCHITECTURE.md, and ${FOUNDATION_DOCS_DIR}/TECH.md in that order when they exist before planning or implementation. Check ${FOUNDATION_DOCS_DIR}/PRODUCT.md for domain glossary and ${FOUNDATION_DOCS_DIR}/TECH.md for build/validation commands.`,
1708
1753
  "6. Require validation evidence (lint/types/tests) before merge.",
1709
1754
  "7. Use Decision Log response style.",
1710
1755
  "8. Every Decision Log must include a `Skills Used` section listing skill, workflow, or agent names.",
@@ -3557,7 +3602,8 @@ function targetStateKey(platform, scope) {
3557
3602
  }
3558
3603
 
3559
3604
  function getStateFilePath(scope, cwd = process.cwd()) {
3560
- if (scope === "global") return path.join(os.homedir(), ".cbx", "state.json");
3605
+ if (scope === "global")
3606
+ return path.join(resolveManagedHomeDir(), ".cbx", "state.json");
3561
3607
  return path.join(cwd, ".cbx", "workflows-state.json");
3562
3608
  }
3563
3609
 
@@ -5317,7 +5363,7 @@ async function writeGeneratedArtifact({
5317
5363
 
5318
5364
  function resolveLegacyPostmanConfigPath({ scope, cwd = process.cwd() }) {
5319
5365
  if (scope === "global") {
5320
- return path.join(os.homedir(), ".cbx", LEGACY_POSTMAN_CONFIG_FILENAME);
5366
+ return path.join(resolveManagedHomeDir(), ".cbx", LEGACY_POSTMAN_CONFIG_FILENAME);
5321
5367
  }
5322
5368
  const workspaceRoot = findWorkspaceRoot(cwd);
5323
5369
  return path.join(workspaceRoot, LEGACY_POSTMAN_CONFIG_FILENAME);
@@ -5325,7 +5371,7 @@ function resolveLegacyPostmanConfigPath({ scope, cwd = process.cwd() }) {
5325
5371
 
5326
5372
  function resolveCbxConfigPath({ scope, cwd = process.cwd() }) {
5327
5373
  if (scope === "global") {
5328
- return path.join(os.homedir(), ".cbx", CBX_CONFIG_FILENAME);
5374
+ return path.join(resolveManagedHomeDir(), ".cbx", CBX_CONFIG_FILENAME);
5329
5375
  }
5330
5376
  const workspaceRoot = findWorkspaceRoot(cwd);
5331
5377
  return path.join(workspaceRoot, CBX_CONFIG_FILENAME);
@@ -5345,14 +5391,23 @@ async function assertNoLegacyOnlyPostmanConfig({ scope, cwd = process.cwd() }) {
5345
5391
 
5346
5392
  function resolveMcpRootPath({ scope, cwd = process.cwd() }) {
5347
5393
  if (scope === "global") {
5348
- return path.join(os.homedir(), ".cbx", "mcp");
5394
+ return path.join(resolveManagedHomeDir(), ".cbx", "mcp");
5349
5395
  }
5350
5396
  const workspaceRoot = findWorkspaceRoot(cwd);
5351
5397
  return path.join(workspaceRoot, ".cbx", "mcp");
5352
5398
  }
5353
5399
 
5400
+ function resolveManagedHomeDir() {
5401
+ const override = String(
5402
+ process.env.HOME ||
5403
+ process.env.USERPROFILE ||
5404
+ "",
5405
+ ).trim();
5406
+ return override || os.homedir();
5407
+ }
5408
+
5354
5409
  function resolveManagedCredentialsEnvPath() {
5355
- return path.join(os.homedir(), ".cbx", CBX_CREDENTIALS_ENV_FILENAME);
5410
+ return path.join(resolveManagedHomeDir(), ".cbx", CBX_CREDENTIALS_ENV_FILENAME);
5356
5411
  }
5357
5412
 
5358
5413
  function parseShellEnvValue(rawValue) {
@@ -5491,11 +5546,7 @@ function resolveStitchMcpDefinitionPath({
5491
5546
  scope,
5492
5547
  cwd = process.cwd(),
5493
5548
  }) {
5494
- return path.join(
5495
- resolveMcpRootPath({ scope, cwd }),
5496
- platform,
5497
- "stitch.json",
5498
- );
5549
+ return path.join(resolveMcpRootPath({ scope, cwd }), platform, "stitch.json");
5499
5550
  }
5500
5551
 
5501
5552
  function buildPostmanAuthHeader({
@@ -7109,7 +7160,11 @@ async function configurePostmanInstallArtifacts({
7109
7160
  })
7110
7161
  : null;
7111
7162
  const credentialEnvVarNames = [];
7112
- if (persistCredentials && shouldInstallPostman && effectiveApiKeySource === "env") {
7163
+ if (
7164
+ persistCredentials &&
7165
+ shouldInstallPostman &&
7166
+ effectiveApiKeySource === "env"
7167
+ ) {
7113
7168
  credentialEnvVarNames.push(
7114
7169
  effectiveApiKeyEnvVar || POSTMAN_API_KEY_ENV_VAR,
7115
7170
  );
@@ -8365,7 +8420,8 @@ function printPostmanSetupSummary({ postmanSetup }) {
8365
8420
  `- .gitignore (${ignoreResult.filePath}): ${ignoreResult.action}`,
8366
8421
  );
8367
8422
  }
8368
- for (const cleanupResult of postmanSetup.legacyDefinitionCleanupResults || []) {
8423
+ for (const cleanupResult of postmanSetup.legacyDefinitionCleanupResults ||
8424
+ []) {
8369
8425
  console.log(
8370
8426
  `- Legacy direct MCP cleanup (${cleanupResult.path}): ${cleanupResult.action}`,
8371
8427
  );
@@ -8842,10 +8898,7 @@ function withInstallOptions(command) {
8842
8898
  "--stitch",
8843
8899
  "optional: configure Stitch profiles and gateway-backed Foundry MCP wiring",
8844
8900
  )
8845
- .option(
8846
- "--playwright",
8847
- "optional: include Playwright MCP server wiring",
8848
- )
8901
+ .option("--playwright", "optional: include Playwright MCP server wiring")
8849
8902
  .option(
8850
8903
  "--postman-api-key <key>",
8851
8904
  "deprecated: inline key mode is disabled. Use env vars + profiles.",
@@ -11010,7 +11063,7 @@ async function runWorkflowConfigKeysList(options) {
11010
11063
  cwd,
11011
11064
  });
11012
11065
 
11013
- console.log(`Config file: ${configPath}`);
11066
+ console.log(`Config file: ${toPosixPath(configPath)}`);
11014
11067
  if (!existing.exists) {
11015
11068
  console.log("Status: missing");
11016
11069
  return;
@@ -11321,15 +11374,16 @@ async function runWorkflowConfigKeysMigrateInline(options) {
11321
11374
  }
11322
11375
  console.log(`Legacy direct MCP cleanup actions: ${cleanupResults.length}`);
11323
11376
  for (const cleanup of cleanupResults) {
11324
- console.log(`- ${cleanup.action} ${cleanup.path}`);
11377
+ console.log(`- ${cleanup.action} ${toPosixPath(cleanup.path)}`);
11325
11378
  }
11326
11379
  if (secureArtifacts?.mcpRuntimeResult) {
11327
11380
  console.log(
11328
- `Secure platform MCP target: ${secureArtifacts.mcpRuntimeResult.action} (${secureArtifacts.mcpRuntimeResult.path || "n/a"})`,
11381
+ `Secure platform MCP target: ${secureArtifacts.mcpRuntimeResult.action} (${secureArtifacts.mcpRuntimeResult.path ? toPosixPath(secureArtifacts.mcpRuntimeResult.path) : "n/a"})`,
11329
11382
  );
11330
11383
  }
11331
- for (const cleanup of secureArtifacts?.legacyDefinitionCleanupResults || []) {
11332
- console.log(`- ${cleanup.action} ${cleanup.path}`);
11384
+ for (const cleanup of secureArtifacts?.legacyDefinitionCleanupResults ||
11385
+ []) {
11386
+ console.log(`- ${cleanup.action} ${toPosixPath(cleanup.path)}`);
11333
11387
  }
11334
11388
  for (const warning of secureArtifacts?.warnings || []) {
11335
11389
  console.log(`Warning: ${warning}`);
@@ -11356,14 +11410,17 @@ async function runWorkflowConfigKeysDoctor(options) {
11356
11410
  cwd,
11357
11411
  });
11358
11412
 
11359
- console.log(`Config file: ${configPath}`);
11413
+ console.log(`Config file: ${toPosixPath(configPath)}`);
11360
11414
  if (!existing.exists) {
11361
11415
  console.log("Status: missing");
11362
11416
  return;
11363
11417
  }
11364
11418
 
11365
11419
  const configFindings = collectInlineCredentialFindings(existingValue);
11366
- const artifactFindings = await collectCredentialLeakFindings({ scope, cwd });
11420
+ const artifactFindings = await collectCredentialLeakFindings({
11421
+ scope,
11422
+ cwd,
11423
+ });
11367
11424
  const migrationPreview = migrateInlineCredentialsInConfig(existingValue);
11368
11425
 
11369
11426
  console.log(`Inline key findings: ${configFindings.length}`);
@@ -11373,7 +11430,7 @@ async function runWorkflowConfigKeysDoctor(options) {
11373
11430
 
11374
11431
  console.log(`Credential leak findings: ${artifactFindings.length}`);
11375
11432
  for (const finding of artifactFindings) {
11376
- console.log(`- ${finding.filePath} [${finding.matches.join(", ")}]`);
11433
+ console.log(`- ${toPosixPath(finding.filePath)} [${finding.matches.join(", ")}]`);
11377
11434
  }
11378
11435
 
11379
11436
  if (migrationPreview.requiredEnvVars.length > 0) {
@@ -12291,7 +12348,7 @@ async function runMcpServe(options) {
12291
12348
 
12292
12349
  function resolveCbxRootPath({ scope, cwd = process.cwd() }) {
12293
12350
  if (scope === "global") {
12294
- return path.join(os.homedir(), ".cbx");
12351
+ return path.join(resolveManagedHomeDir(), ".cbx");
12295
12352
  }
12296
12353
  const workspaceRoot = findWorkspaceRoot(cwd);
12297
12354
  return path.join(workspaceRoot, ".cbx");
@@ -12832,9 +12889,7 @@ function printInstallEngineeringSummary({ engineeringResults, techResult }) {
12832
12889
 
12833
12890
  function printInstallDocumentationNotice() {
12834
12891
  console.log("\nProject backbone docs:");
12835
- console.log(
12836
- "- Install only wires the rule references and workflow assets.",
12837
- );
12892
+ console.log("- Install only wires the rule references and workflow assets.");
12838
12893
  console.log(
12839
12894
  `- Use \`cbx rules init\` to scaffold ENGINEERING_RULES.md and TECH.md, or \`cbx build architecture --platform <codex|claude|gemini|copilot>\` to generate ${FOUNDATION_DOCS_DIR}/PRODUCT.md, ${FOUNDATION_DOCS_DIR}/ARCHITECTURE.md, ${FOUNDATION_DOCS_DIR}/TECH.md, and ADR scaffolds.`,
12840
12895
  );
@@ -12895,9 +12950,10 @@ async function upsertEngineeringArtifacts({
12895
12950
  engineeringResults.push({
12896
12951
  ruleFilePath: target.ruleFilePath,
12897
12952
  rulesFilePath: scaffold.engineeringRulesPath,
12898
- rulesFileResult: scaffold.rulesArchitectureResult.action === "unchanged"
12899
- ? scaffold.rulesFileResult
12900
- : scaffold.rulesArchitectureResult,
12953
+ rulesFileResult:
12954
+ scaffold.rulesArchitectureResult.action === "unchanged"
12955
+ ? scaffold.rulesFileResult
12956
+ : scaffold.rulesArchitectureResult,
12901
12957
  blockResult,
12902
12958
  });
12903
12959
  }
@@ -13083,7 +13139,11 @@ async function resolveArchitectureInspectionAnchors(
13083
13139
  return ordered.slice(0, 18);
13084
13140
  }
13085
13141
 
13086
- function resolveArchitectureConditionalSkills(snapshot, specRoots, researchMode) {
13142
+ function resolveArchitectureConditionalSkills(
13143
+ snapshot,
13144
+ specRoots,
13145
+ researchMode,
13146
+ ) {
13087
13147
  const conditional = [];
13088
13148
  const frameworks = new Set(snapshot.frameworks || []);
13089
13149
  const topDirs = new Set(snapshot.topDirs || []);
@@ -13137,7 +13197,9 @@ async function resolveArchitectureSkillPathHints(platform, cwd, skillIds) {
13137
13197
  if (!skillsDir) return [];
13138
13198
  return skillIds
13139
13199
  .map((skillId) => path.join(skillsDir, skillId, "SKILL.md"))
13140
- .map((filePath) => toPosixPath(path.relative(findWorkspaceRoot(cwd), filePath)));
13200
+ .map((filePath) =>
13201
+ toPosixPath(path.relative(findWorkspaceRoot(cwd), filePath)),
13202
+ );
13141
13203
  }
13142
13204
 
13143
13205
  function buildArchitecturePrompt({
@@ -13163,13 +13225,26 @@ function buildArchitecturePrompt({
13163
13225
  return `${label}: ${item.architectureSignals.join(", ")}`;
13164
13226
  });
13165
13227
 
13228
+ const platformCapabilities = {
13229
+ codex:
13230
+ "You can read, write, and execute shell commands. Use `codex exec` mode.",
13231
+ claude:
13232
+ "You can read, write files, and run bash commands. Use non-interactive mode.",
13233
+ gemini:
13234
+ "You can read, write files, and run commands within your sandbox. Follow Gemini CLI conventions.",
13235
+ copilot:
13236
+ "You can read, write files, and use terminal commands. Follow Copilot agent conventions.",
13237
+ };
13238
+
13166
13239
  return [
13167
13240
  `You are running inside ${platform}.`,
13241
+ platformCapabilities[platform] || "",
13168
13242
  "",
13169
13243
  "Objective:",
13170
13244
  `- Inspect the repository at ${toPosixPath(workspaceRoot)} and author or refresh the core foundation docs in ${productPath}, ${architecturePath}, ${techPath}, ${adrReadmePath}, and ${adrTemplatePath}.`,
13171
13245
  "- The content should be primarily AI-authored from repository inspection, not copied from placeholder scaffolding.",
13172
13246
  "- Preserve manual content outside the managed `cbx:*` markers.",
13247
+ "- The output docs must be immediately useful to any AI agent (Copilot, Claude, Gemini, Codex) inspecting this repo for the first time, reducing search and exploration time.",
13173
13248
  "",
13174
13249
  "Required skill bundle:",
13175
13250
  `- Load these exact skill IDs first: ${coreSkills.map((skillId) => `\`${skillId}\``).join(", ")}`,
@@ -13189,29 +13264,109 @@ function buildArchitecturePrompt({
13189
13264
  ? `- Architecture signals: ${architectureSignals.join(" | ")}`
13190
13265
  : "- Architecture signals: none confidently inferred from the repo scan",
13191
13266
  `- Entry points: ${snapshot.entryPoints.length > 0 ? snapshot.entryPoints.slice(0, 8).join(" | ") : "none detected"}`,
13192
- `- Key scripts: ${snapshot.keyScripts.length > 0 ? snapshot.keyScripts.slice(0, 8).map((item) => `${item.name}=${item.command}`).join(" | ") : "none detected"}`,
13267
+ `- Key scripts: ${
13268
+ snapshot.keyScripts.length > 0
13269
+ ? snapshot.keyScripts
13270
+ .slice(0, 8)
13271
+ .map((item) => `${item.name}=${item.command}`)
13272
+ .join(" | ")
13273
+ : "none detected"
13274
+ }`,
13193
13275
  `- Inspection anchors: ${inspectionAnchors.length > 0 ? inspectionAnchors.join(", ") : "no concrete anchors detected; inspect the repo root, main source trees, and manifest files manually"}`,
13194
13276
  "",
13277
+ "Markdown formatting rules (apply to all generated docs):",
13278
+ "- Start each file with a single `# Title` heading. Never use more than one H1 per file.",
13279
+ "- Use `## Heading` for major sections, `### Heading` for subsections. Never skip heading levels (e.g., do not jump from `##` to `####`).",
13280
+ "- Separate headings from surrounding content with exactly one blank line above and below.",
13281
+ "- Use fenced code blocks with triple backticks and a language identifier (```bash, ```typescript, ```json, ```yaml, ```mermaid) for all code, commands, and diagrams. Never use indented code blocks.",
13282
+ "- Use `-` for unordered lists. Use `1.` for ordered lists. Indent nested lists by 2 spaces.",
13283
+ "- Use `inline code` backticks for file paths, command names, env vars, config keys, and identifiers.",
13284
+ "- Use Mermaid fenced blocks (```mermaid) for diagrams. Validate that diagram syntax is correct: `graph TD`, `sequenceDiagram`, `flowchart LR`, or `C4Context` style. Every node and edge must be syntactically valid.",
13285
+ "- Tables must have a header row, a separator row with dashes and pipes, and aligned columns. Example:",
13286
+ " ```",
13287
+ " | Column A | Column B |",
13288
+ " | -------- | -------- |",
13289
+ " | value | value |",
13290
+ " ```",
13291
+ "- Use `> blockquote` only for callouts or important notes, prefixed with **Note:** or **Warning:**.",
13292
+ "- Relative links to other repo files should use repo-relative paths: `[ARCHITECTURE.md](docs/foundation/ARCHITECTURE.md)`.",
13293
+ "- End every file with a single trailing newline. No trailing whitespace on lines.",
13294
+ "",
13195
13295
  "Execution contract:",
13196
13296
  "1. Inspect the repository first before writing any backbone doc content. Derive structure, product surfaces, runtime boundaries, and technical constraints from the actual codebase.",
13197
13297
  "2. Complete a real inspection pass before drafting. At minimum, inspect the concrete anchors listed above, plus any adjacent directories needed to understand the main execution paths, data boundaries, and integration surfaces.",
13198
13298
  "3. Do not infer architecture from filenames alone when you can open representative files. Read enough source to validate the main app boundaries, runtime flows, and persistence/integration patterns.",
13199
13299
  `4. Then read ${productPath}, ${architecturePath}, and ${techPath} in that order when they exist so you can preserve useful manual context and update existing managed sections cleanly.`,
13200
13300
  `5. Replace or update only the content between the existing managed markers in ${productPath}, ${architecturePath}, and ${techPath}. Do not append a second marker block.`,
13201
- `6. In ${productPath}, write a concrete product foundation: product purpose, primary users/operators, main journeys, business capabilities, operational constraints, and what future contributors must preserve.`,
13202
- `7. In ${architecturePath}, write a lean but detailed architecture backbone in a pragmatic arc42/C4 style: system purpose and constraints, bounded contexts, major building blocks, dependency rules, data and integration boundaries, runtime flows, deployment/operability notes, testing/debugging strategy, and only the diagram levels that add real value.`,
13203
- `8. In ${techPath}, write the developer-facing technical map: stack, repo layout, key commands, entrypoints, data stores, external services, environment/config surfaces, MCP/tooling footprint, and change hotspots future agents should inspect before editing code.`,
13204
- "9. Every major claim should be grounded in repository evidence. Mention concrete repo paths in the docs when a structural claim would otherwise be ambiguous.",
13205
- "10. Avoid placeholder filler, generic checklists, and duplicated content across files. Each doc should have a clear job.",
13206
- "11. Do not create ROADMAP.md, ENGINEERING_RULES.md, or other extra docs unless the prompt explicitly asks for them.",
13301
+ "",
13302
+ `6. In ${productPath}, write a concrete product foundation:`,
13303
+ " - Product purpose with a one-sentence elevator pitch an AI agent can use as context.",
13304
+ " - Primary users/operators with their key goals.",
13305
+ " - Main journeys as numbered sequences an agent can follow to understand the happy path.",
13306
+ " - Business capabilities that matter, linked to repo paths that implement them.",
13307
+ " - Operational constraints and SLA/uptime expectations if evident.",
13308
+ " - What future contributors must preserve (invariants, contracts, compatibility guarantees).",
13309
+ " - A domain glossary defining project-specific terms, abbreviations, and bounded-context language so AI agents use consistent vocabulary.",
13310
+ "",
13311
+ `7. In ${architecturePath}, write a lean but detailed architecture backbone in a pragmatic arc42/C4 style:`,
13312
+ " - Architecture classification (monolith, modular monolith, microservices, serverless, hybrid) with evidence.",
13313
+ " - System purpose, constraints, and architectural drivers.",
13314
+ " - Bounded contexts with ownership boundaries mapped to directories.",
13315
+ " - Major building blocks as a table or Mermaid C4 diagram with responsibilities.",
13316
+ " - Dependency rules: what can import what, forbidden coupling, and layering policy.",
13317
+ " - Data and integration boundaries with protocol/format details.",
13318
+ " - Runtime flows as Mermaid sequence diagrams for the top 2-3 critical paths.",
13319
+ " - Crosscutting concerns: logging, auth, error handling, i18n patterns with repo-path evidence.",
13320
+ " - Quality requirements derived from the codebase (performance budgets, test coverage, accessibility).",
13321
+ " - Known risks and tech debt visible in the codebase (TODOs, deprecated deps, missing tests).",
13322
+ " - Deployment/operability notes.",
13323
+ " - Testing/debugging strategy with concrete test commands and coverage tooling.",
13324
+ " - A dedicated folder-structure guide listing every important directory, what it owns, and contributor rules.",
13325
+ "",
13326
+ `8. In ${techPath}, write the developer-facing technical map that an AI agent can use to start working immediately:`,
13327
+ " - Stack snapshot as a table (runtime, language, framework, version if discoverable).",
13328
+ " - Repository layout: directory tree with one-line purpose per directory.",
13329
+ " - Entrypoints: the exact files that bootstrap each app/service/CLI.",
13330
+ " - Key commands: the exact shell commands for bootstrap, build, test, lint, format, run, and deploy. Validate that these commands actually exist in the project manifests. Document required order and preconditions.",
13331
+ " - Build and validation: the validated sequence of commands to go from clean clone to passing CI locally, including environment prerequisites (Node version, Python version, Docker, etc.).",
13332
+ " - CI/CD pipeline: describe the CI/CD workflow files, their triggers, and what checks must pass before merge.",
13333
+ " - Runtime data stores and migration commands.",
13334
+ " - External services and integration surfaces with protocol details.",
13335
+ " - Environment and config surfaces: list every env var the app reads, its purpose, default value if any, and whether it is required or optional.",
13336
+ " - MCP/tooling footprint if present.",
13337
+ " - Generated artifacts: files that are auto-generated and must not be hand-edited.",
13338
+ " - Error patterns and debugging: common error messages, their causes, and resolution steps discovered during inspection.",
13339
+ " - Change hotspots: files/directories that change most often or have the most coupling, so agents know where to look first.",
13340
+ " - Practical editing notes: conventions for naming, imports, test file placement, and PR hygiene.",
13341
+ "",
13342
+ `9. ${techPath} should complement ${architecturePath}; do not repeat the same structure prose unless it helps a developer act faster.`,
13343
+ "",
13344
+ `10. Use exact required headings in ${productPath}: \`## Product Scope\`, \`## Product Purpose\`, \`## Primary Users And Operators\`, \`## Main Journeys\`, \`## Business Capabilities That Matter\`, \`## Operational Constraints\`, \`## Preservation Rules For Future Contributors\`, \`## Domain Glossary\`.`,
13345
+ `11. Use exact required headings in ${architecturePath}: \`## Architecture Type\`, \`## System Purpose\`, \`## Constraints And Architectural Drivers\`, \`## Repository Structure Guide\`, \`## Bounded Contexts\`, \`## Major Building Blocks\`, \`## Dependency Rules\`, \`## Data Boundaries\`, \`## Integration Boundaries\`, \`## Runtime Flows\`, \`## Crosscutting Concerns\`, \`## Quality Requirements\`, \`## Risks And Tech Debt\`, \`## Deployment And Operability\`, \`## Testing And Debugging Strategy\`, \`## Architectural Guidance\`.`,
13346
+ `12. Use exact required headings in ${techPath}: \`## Stack Snapshot\`, \`## Repository Layout\`, \`## Entrypoints\`, \`## Key Commands\`, \`## Build And Validation\`, \`## CI CD Pipeline\`, \`## Runtime Data Stores\`, \`## External Services And Integration Surfaces\`, \`## Environment And Config Surfaces\`, \`## Generated Artifacts To Respect\`, \`## Error Patterns And Debugging\`, \`## Change Hotspots\`, \`## Practical Editing Notes\`.`,
13347
+ "",
13348
+ "13. Every major claim should be grounded in repository evidence. Mention concrete repo paths in the docs when a structural claim would otherwise be ambiguous.",
13349
+ "14. Avoid placeholder filler, generic checklists, and duplicated content across files. Each doc should have a clear job.",
13350
+ "15. Do not create ROADMAP.md, ENGINEERING_RULES.md, or other extra docs unless the prompt explicitly asks for them.",
13207
13351
  researchMode === "never"
13208
- ? "12. Stay repo-only. Do not use outside research."
13209
- : "12. Use repo evidence first. Use official docs when needed. Treat Reddit or community sources only as labeled secondary evidence.",
13352
+ ? "16. Stay repo-only. Do not use outside research."
13353
+ : "16. Use repo evidence first. Use official docs when needed. Treat Reddit or community sources only as labeled secondary evidence.",
13210
13354
  researchMode === "always"
13211
- ? `13. Include an external research evidence subsection in ${techPath} with clearly labeled primary and secondary evidence.`
13212
- : "13. Include external research notes only if they materially informed the architecture update.",
13213
- `14. If the project clearly follows Clean Architecture, feature-first modules, DDD, modular monolith, or another stable structure, make that explicit in ${architecturePath} with evidence from the repo.`,
13214
- `15. Ensure ${adrReadmePath} and ${adrTemplatePath} exist as ADR entrypoints, but keep them lean.`,
13355
+ ? `17. Include an external research evidence subsection in ${techPath} with clearly labeled primary and secondary evidence.`
13356
+ : "17. Include external research notes only if they materially informed the architecture update.",
13357
+ `18. If the project clearly follows Clean Architecture, feature-first modules, DDD, modular monolith, or another stable structure, make that explicit in ${architecturePath} with evidence from the repo.`,
13358
+ `19. Ensure ${adrReadmePath} and ${adrTemplatePath} exist as ADR entrypoints, but keep them lean.`,
13359
+ "20. In all docs, when referencing other foundation docs, use relative markdown links: `[ARCHITECTURE.md](docs/foundation/ARCHITECTURE.md)`. This lets AI agents and humans navigate between docs.",
13360
+ "21. Validate all Mermaid diagram syntax before writing. Each diagram must render without errors. Use simple node IDs (alphanumeric, no special characters) and quote labels containing spaces.",
13361
+ "22. For each key command documented, note the expected exit code (0 for success) and any common failure modes. This helps AI agents validate their own changes.",
13362
+ "",
13363
+ "Platform context-loading awareness (these docs will be @imported into agent rule files):",
13364
+ "- Claude loads CLAUDE.md at session start via @file imports; each imported doc should be concise and self-contained.",
13365
+ "- Gemini loads GEMINI.md hierarchically with JIT context; structure docs with clear H2 headings so sections are independently useful.",
13366
+ "- Codex concatenates AGENTS.md files root-to-CWD with a default 32 KiB combined limit; keep total foundation doc prose lean.",
13367
+ "- Copilot loads copilot-instructions.md automatically; headings and inline code markers aid discoverability.",
13368
+ "- Target each individual foundation doc under 300 lines so it stays effective when imported into any platform's context window.",
13369
+ "- Front-load the most actionable information (commands, paths, constraints) in each doc; put supplementary detail later.",
13215
13370
  "",
13216
13371
  "Return one JSON object on the last line with this shape:",
13217
13372
  `{"files_written":["${productPath}","${architecturePath}","${techPath}","${adrReadmePath}","${adrTemplatePath}"],"research_used":false,"gaps":[],"next_actions":[]}`,
@@ -13220,9 +13375,40 @@ function buildArchitecturePrompt({
13220
13375
  ].join("\n");
13221
13376
  }
13222
13377
 
13378
+ let architectureExecFileCaptureOverride = null;
13379
+ let architectureSpawnCaptureOverride = null;
13380
+
13381
+ export function __setArchitectureCommandCaptureForTests(overrides = {}) {
13382
+ architectureExecFileCaptureOverride =
13383
+ overrides.execFileCapture || architectureExecFileCaptureOverride;
13384
+ architectureSpawnCaptureOverride =
13385
+ overrides.spawnCapture || architectureSpawnCaptureOverride;
13386
+ }
13387
+
13388
+ export function __resetArchitectureCommandCaptureForTests() {
13389
+ architectureExecFileCaptureOverride = null;
13390
+ architectureSpawnCaptureOverride = null;
13391
+ }
13392
+
13223
13393
  async function execFileCapture(command, args, options = {}) {
13394
+ if (architectureExecFileCaptureOverride) {
13395
+ return await architectureExecFileCaptureOverride(command, args, options);
13396
+ }
13397
+ const resolvedCommand =
13398
+ process.platform === "win32"
13399
+ ? await resolveWindowsCommand(command)
13400
+ : command;
13401
+ if (
13402
+ process.platform === "win32" &&
13403
+ /\.(cmd|bat)$/i.test(resolvedCommand)
13404
+ ) {
13405
+ return await spawnCapture(resolvedCommand, args, {
13406
+ ...options,
13407
+ useShell: true,
13408
+ });
13409
+ }
13224
13410
  try {
13225
- const result = await execFile(command, args, {
13411
+ const result = await execFile(resolvedCommand, args, {
13226
13412
  ...options,
13227
13413
  maxBuffer: 8 * 1024 * 1024,
13228
13414
  });
@@ -13233,7 +13419,9 @@ async function execFileCapture(command, args, options = {}) {
13233
13419
  };
13234
13420
  } catch (error) {
13235
13421
  if (error?.code === "ENOENT") {
13236
- throw new Error(`Required CLI '${command}' is not installed or not on PATH.`);
13422
+ throw new Error(
13423
+ `Required CLI '${command}' is not installed or not on PATH.`,
13424
+ );
13237
13425
  }
13238
13426
  return {
13239
13427
  ok: false,
@@ -13244,14 +13432,51 @@ async function execFileCapture(command, args, options = {}) {
13244
13432
  }
13245
13433
  }
13246
13434
 
13435
+ async function resolveWindowsCommand(command) {
13436
+ if (process.platform !== "win32") return command;
13437
+ if (path.isAbsolute(command) || /[\\/]/.test(command)) return command;
13438
+ try {
13439
+ const result = await execFile("where.exe", [command], {
13440
+ windowsHide: true,
13441
+ maxBuffer: 1024 * 1024,
13442
+ });
13443
+ const resolved = String(result.stdout || "")
13444
+ .split(/\r?\n/)
13445
+ .map((line) => line.trim())
13446
+ .find(Boolean);
13447
+ if (resolved) return resolved;
13448
+ } catch (error) {
13449
+ if (error?.code === "ENOENT") {
13450
+ throw new Error(`Required CLI '${command}' is not installed or not on PATH.`);
13451
+ }
13452
+ }
13453
+ const missingError = new Error(
13454
+ `Required CLI '${command}' is not installed or not on PATH.`,
13455
+ );
13456
+ missingError.code = "ENOENT";
13457
+ throw missingError;
13458
+ }
13459
+
13247
13460
  async function spawnCapture(command, args, options = {}) {
13248
- const { cwd, env, streamOutput = false } = options;
13461
+ if (architectureSpawnCaptureOverride) {
13462
+ return await architectureSpawnCaptureOverride(command, args, options);
13463
+ }
13464
+ const { cwd, env, streamOutput = false, useShell } = options;
13465
+ const resolvedCommand =
13466
+ process.platform === "win32"
13467
+ ? await resolveWindowsCommand(command)
13468
+ : command;
13469
+ const shell =
13470
+ typeof useShell === "boolean"
13471
+ ? useShell
13472
+ : process.platform === "win32" && /\.(cmd|bat)$/i.test(resolvedCommand);
13249
13473
  return await new Promise((resolve, reject) => {
13250
13474
  let stdout = "";
13251
13475
  let stderr = "";
13252
- const child = spawn(command, args, {
13476
+ const child = spawn(resolvedCommand, args, {
13253
13477
  cwd,
13254
13478
  env,
13479
+ shell,
13255
13480
  stdio: ["ignore", "pipe", "pipe"],
13256
13481
  });
13257
13482
 
@@ -13270,7 +13495,9 @@ async function spawnCapture(command, args, options = {}) {
13270
13495
  child.on("error", (error) => {
13271
13496
  if (error?.code === "ENOENT") {
13272
13497
  reject(
13273
- new Error(`Required CLI '${command}' is not installed or not on PATH.`),
13498
+ new Error(
13499
+ `Required CLI '${command}' is not installed or not on PATH.`,
13500
+ ),
13274
13501
  );
13275
13502
  return;
13276
13503
  }
@@ -13476,15 +13703,102 @@ async function captureFileContents(filePaths) {
13476
13703
  return snapshot;
13477
13704
  }
13478
13705
 
13706
+ function collectTaggedBlocks(content, startPattern, endPattern) {
13707
+ const blocks = [];
13708
+ let cursor = 0;
13709
+ const startMatcher = new RegExp(
13710
+ startPattern.source,
13711
+ startPattern.flags.replace(/g/g, ""),
13712
+ );
13713
+ const endMatcher = new RegExp(
13714
+ endPattern.source,
13715
+ endPattern.flags.replace(/g/g, ""),
13716
+ );
13717
+ while (cursor < content.length) {
13718
+ const remaining = content.slice(cursor);
13719
+ const startMatch = remaining.match(startMatcher);
13720
+ if (!startMatch || startMatch.index == null) break;
13721
+ const startIndex = cursor + startMatch.index;
13722
+ const afterStart = content.slice(startIndex + startMatch[0].length);
13723
+ const endMatch = afterStart.match(endMatcher);
13724
+ if (!endMatch || endMatch.index == null) break;
13725
+ const endIndex =
13726
+ startIndex + startMatch[0].length + endMatch.index + endMatch[0].length;
13727
+ const block = content.slice(startIndex, endIndex);
13728
+ const inner = content
13729
+ .slice(startIndex + startMatch[0].length, endIndex - endMatch[0].length)
13730
+ .trim();
13731
+ blocks.push({
13732
+ startIndex,
13733
+ endIndex,
13734
+ block,
13735
+ score: inner.length,
13736
+ });
13737
+ cursor = endIndex;
13738
+ }
13739
+ return blocks;
13740
+ }
13741
+
13742
+ async function collapseDuplicateTaggedBlocks({
13743
+ targetPath,
13744
+ startPattern,
13745
+ endPattern,
13746
+ }) {
13747
+ if (!(await pathExists(targetPath))) return { changed: false };
13748
+
13749
+ const content = await readFile(targetPath, "utf8");
13750
+ const blocks = collectTaggedBlocks(content, startPattern, endPattern);
13751
+ if (blocks.length <= 1) return { changed: false };
13752
+
13753
+ const bestBlock = [...blocks].sort((a, b) => b.score - a.score)[0];
13754
+ const first = blocks[0];
13755
+ const last = blocks[blocks.length - 1];
13756
+ const normalized =
13757
+ content.slice(0, first.startIndex) +
13758
+ bestBlock.block +
13759
+ content.slice(last.endIndex);
13760
+
13761
+ if (normalized === content) return { changed: false };
13762
+
13763
+ await writeFile(targetPath, normalized, "utf8");
13764
+ return { changed: true };
13765
+ }
13766
+
13767
+ async function normalizeArchitectureBuildOutputs(scaffold) {
13768
+ await collapseDuplicateTaggedBlocks({
13769
+ targetPath: scaffold.productPath,
13770
+ startPattern: PRODUCT_FOUNDATION_BLOCK_START_RE,
13771
+ endPattern: PRODUCT_FOUNDATION_BLOCK_END_RE,
13772
+ });
13773
+ await collapseDuplicateTaggedBlocks({
13774
+ targetPath: scaffold.architectureDocPath,
13775
+ startPattern: ARCHITECTURE_DOC_BLOCK_START_RE,
13776
+ endPattern: ARCHITECTURE_DOC_BLOCK_END_RE,
13777
+ });
13778
+ await collapseDuplicateTaggedBlocks({
13779
+ targetPath: scaffold.techMdPath,
13780
+ startPattern: TECH_ARCHITECTURE_BLOCK_START_RE,
13781
+ endPattern: TECH_ARCHITECTURE_BLOCK_END_RE,
13782
+ });
13783
+ }
13784
+
13479
13785
  async function readArchitectureDriftStatus(workspaceRoot, snapshot) {
13480
- const productPath = path.join(workspaceRoot, FOUNDATION_DOCS_DIR, "PRODUCT.md");
13786
+ const productPath = path.join(
13787
+ workspaceRoot,
13788
+ FOUNDATION_DOCS_DIR,
13789
+ "PRODUCT.md",
13790
+ );
13481
13791
  const architecturePath = path.join(
13482
13792
  workspaceRoot,
13483
13793
  FOUNDATION_DOCS_DIR,
13484
13794
  "ARCHITECTURE.md",
13485
13795
  );
13486
13796
  const techPath = path.join(workspaceRoot, FOUNDATION_DOCS_DIR, "TECH.md");
13487
- const adrReadmePath = path.join(workspaceRoot, FOUNDATION_ADR_DIR, "README.md");
13797
+ const adrReadmePath = path.join(
13798
+ workspaceRoot,
13799
+ FOUNDATION_ADR_DIR,
13800
+ "README.md",
13801
+ );
13488
13802
  const metadataPath = path.join(
13489
13803
  workspaceRoot,
13490
13804
  ".cbx",
@@ -13673,9 +13987,7 @@ async function runBuildArchitecture(options) {
13673
13987
  console.log(`Workspace: ${toPosixPath(workspaceRoot)}`);
13674
13988
  console.log(`Adapter: ${adapter.binary}`);
13675
13989
  console.log(`Research mode: ${researchMode}`);
13676
- console.log(
13677
- `Managed targets: ${summary.managedTargets.join(", ")}`,
13678
- );
13990
+ console.log(`Managed targets: ${summary.managedTargets.join(", ")}`);
13679
13991
  console.log(`Skill bundle: ${skillBundle.join(", ")}`);
13680
13992
  console.log(`Invocation: ${[adapter.binary, ...args].join(" ")}`);
13681
13993
  }
@@ -13701,15 +14013,19 @@ async function runBuildArchitecture(options) {
13701
14013
  throw new Error(explainArchitectureBuildFailure(platform, execution));
13702
14014
  }
13703
14015
 
14016
+ await normalizeArchitectureBuildOutputs(scaffold);
14017
+
13704
14018
  const filesAfter = await captureFileContents(managedFilePaths);
13705
14019
  const changedFiles = managedFilePaths
13706
14020
  .filter((filePath) => filesBefore[filePath] !== filesAfter[filePath])
13707
14021
  .map((filePath) => toPosixPath(path.relative(workspaceRoot, filePath)));
13708
14022
 
13709
14023
  const techContent =
13710
- filesAfter[scaffold.techMdPath] ?? (await readFile(scaffold.techMdPath, "utf8"));
14024
+ filesAfter[scaffold.techMdPath] ??
14025
+ (await readFile(scaffold.techMdPath, "utf8"));
13711
14026
  const productContent =
13712
- filesAfter[scaffold.productPath] ?? (await readFile(scaffold.productPath, "utf8"));
14027
+ filesAfter[scaffold.productPath] ??
14028
+ (await readFile(scaffold.productPath, "utf8"));
13713
14029
  const architectureContent =
13714
14030
  filesAfter[scaffold.architectureDocPath] ??
13715
14031
  (await readFile(scaffold.architectureDocPath, "utf8"));
@@ -13805,12 +14121,7 @@ function normalizeInitPlatforms(value) {
13805
14121
  }
13806
14122
 
13807
14123
  function normalizeInitMcpSelections(value) {
13808
- const allowed = new Set([
13809
- "cubis-foundry",
13810
- "postman",
13811
- "stitch",
13812
- "playwright",
13813
- ]);
14124
+ const allowed = new Set(["cubis-foundry", "postman", "stitch", "playwright"]);
13814
14125
  const items = Array.isArray(value) ? value : parseCsvOption(value);
13815
14126
  const normalized = [];
13816
14127
  for (const item of items) {