@nathapp/nax 0.32.2 → 0.34.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +191 -6
  2. package/dist/nax.js +1150 -382
  3. package/package.json +1 -1
  4. package/src/cli/analyze.ts +145 -0
  5. package/src/cli/config.ts +9 -0
  6. package/src/config/defaults.ts +8 -0
  7. package/src/config/schema.ts +1 -0
  8. package/src/config/schemas.ts +10 -0
  9. package/src/config/types.ts +18 -0
  10. package/src/context/elements.ts +13 -0
  11. package/src/context/greenfield.ts +1 -1
  12. package/src/decompose/apply.ts +44 -0
  13. package/src/decompose/builder.ts +181 -0
  14. package/src/decompose/index.ts +8 -0
  15. package/src/decompose/sections/codebase.ts +26 -0
  16. package/src/decompose/sections/constraints.ts +32 -0
  17. package/src/decompose/sections/index.ts +4 -0
  18. package/src/decompose/sections/sibling-stories.ts +25 -0
  19. package/src/decompose/sections/target-story.ts +31 -0
  20. package/src/decompose/types.ts +55 -0
  21. package/src/decompose/validators/complexity.ts +45 -0
  22. package/src/decompose/validators/coverage.ts +134 -0
  23. package/src/decompose/validators/dependency.ts +91 -0
  24. package/src/decompose/validators/index.ts +35 -0
  25. package/src/decompose/validators/overlap.ts +128 -0
  26. package/src/execution/crash-recovery.ts +8 -0
  27. package/src/execution/escalation/tier-escalation.ts +9 -2
  28. package/src/execution/iteration-runner.ts +2 -0
  29. package/src/execution/lifecycle/run-completion.ts +100 -15
  30. package/src/execution/parallel-executor.ts +20 -1
  31. package/src/execution/pipeline-result-handler.ts +5 -1
  32. package/src/execution/runner.ts +20 -0
  33. package/src/execution/sequential-executor.ts +2 -11
  34. package/src/hooks/types.ts +20 -10
  35. package/src/interaction/index.ts +1 -0
  36. package/src/interaction/triggers.ts +21 -0
  37. package/src/interaction/types.ts +7 -0
  38. package/src/metrics/tracker.ts +7 -0
  39. package/src/metrics/types.ts +2 -0
  40. package/src/pipeline/stages/review.ts +6 -0
  41. package/src/pipeline/stages/routing.ts +89 -0
  42. package/src/pipeline/types.ts +2 -0
  43. package/src/plugins/types.ts +33 -0
  44. package/src/prd/index.ts +7 -2
  45. package/src/prd/types.ts +17 -2
  46. package/src/review/orchestrator.ts +1 -0
  47. package/src/review/types.ts +2 -0
  48. package/src/tdd/isolation.ts +1 -1
package/dist/nax.js CHANGED
@@ -17993,7 +17993,7 @@ var init_zod = __esm(() => {
17993
17993
  });
17994
17994
 
17995
17995
  // src/config/schemas.ts
17996
- var TokenPricingSchema, ModelDefSchema, ModelEntrySchema, ModelMapSchema, ModelTierSchema, TierConfigSchema, AutoModeConfigSchema, RectificationConfigSchema, RegressionGateConfigSchema, SmartTestRunnerConfigSchema, SMART_TEST_RUNNER_DEFAULT, smartTestRunnerFieldSchema, ExecutionConfigSchema, QualityConfigSchema, TddConfigSchema, ConstitutionConfigSchema, AnalyzeConfigSchema, ReviewConfigSchema, PlanConfigSchema, AcceptanceConfigSchema, TestCoverageConfigSchema, ContextAutoDetectConfigSchema, ContextConfigSchema, AdaptiveRoutingConfigSchema, LlmRoutingConfigSchema, RoutingConfigSchema, OptimizerConfigSchema, PluginConfigEntrySchema, HooksConfigSchema, InteractionConfigSchema, StorySizeGateConfigSchema, PrecheckConfigSchema, PromptsConfigSchema, NaxConfigSchema;
17996
+ var TokenPricingSchema, ModelDefSchema, ModelEntrySchema, ModelMapSchema, ModelTierSchema, TierConfigSchema, AutoModeConfigSchema, RectificationConfigSchema, RegressionGateConfigSchema, SmartTestRunnerConfigSchema, SMART_TEST_RUNNER_DEFAULT, smartTestRunnerFieldSchema, ExecutionConfigSchema, QualityConfigSchema, TddConfigSchema, ConstitutionConfigSchema, AnalyzeConfigSchema, ReviewConfigSchema, PlanConfigSchema, AcceptanceConfigSchema, TestCoverageConfigSchema, ContextAutoDetectConfigSchema, ContextConfigSchema, AdaptiveRoutingConfigSchema, LlmRoutingConfigSchema, RoutingConfigSchema, OptimizerConfigSchema, PluginConfigEntrySchema, HooksConfigSchema, InteractionConfigSchema, StorySizeGateConfigSchema, PrecheckConfigSchema, PromptsConfigSchema, DecomposeConfigSchema, NaxConfigSchema;
17997
17997
  var init_schemas3 = __esm(() => {
17998
17998
  init_zod();
17999
17999
  TokenPricingSchema = exports_external.object({
@@ -18231,6 +18231,14 @@ var init_schemas3 = __esm(() => {
18231
18231
  PromptsConfigSchema = exports_external.object({
18232
18232
  overrides: exports_external.record(exports_external.enum(["test-writer", "implementer", "verifier", "single-session"]), exports_external.string().min(1, "Override path must be non-empty")).optional()
18233
18233
  });
18234
+ DecomposeConfigSchema = exports_external.object({
18235
+ trigger: exports_external.enum(["auto", "confirm", "disabled"]).default("auto"),
18236
+ maxAcceptanceCriteria: exports_external.number().int().min(1).default(6),
18237
+ maxSubstories: exports_external.number().int().min(1).default(5),
18238
+ maxSubstoryComplexity: exports_external.enum(["simple", "medium", "complex", "expert"]).default("medium"),
18239
+ maxRetries: exports_external.number().int().min(0).default(2),
18240
+ model: exports_external.string().min(1).default("balanced")
18241
+ });
18234
18242
  NaxConfigSchema = exports_external.object({
18235
18243
  version: exports_external.number(),
18236
18244
  models: ModelMapSchema,
@@ -18250,7 +18258,8 @@ var init_schemas3 = __esm(() => {
18250
18258
  hooks: HooksConfigSchema.optional(),
18251
18259
  interaction: InteractionConfigSchema.optional(),
18252
18260
  precheck: PrecheckConfigSchema.optional(),
18253
- prompts: PromptsConfigSchema.optional()
18261
+ prompts: PromptsConfigSchema.optional(),
18262
+ decompose: DecomposeConfigSchema.optional()
18254
18263
  }).refine((data) => data.version === 1, {
18255
18264
  message: "Invalid version: expected 1",
18256
18265
  path: ["version"]
@@ -18414,7 +18423,15 @@ var init_defaults = __esm(() => {
18414
18423
  maxBulletPoints: 8
18415
18424
  }
18416
18425
  },
18417
- prompts: {}
18426
+ prompts: {},
18427
+ decompose: {
18428
+ trigger: "auto",
18429
+ maxAcceptanceCriteria: 6,
18430
+ maxSubstories: 5,
18431
+ maxSubstoryComplexity: "medium",
18432
+ maxRetries: 2,
18433
+ model: "balanced"
18434
+ }
18418
18435
  };
18419
18436
  });
18420
18437
 
@@ -18424,6 +18441,120 @@ var init_schema = __esm(() => {
18424
18441
  init_defaults();
18425
18442
  });
18426
18443
 
18444
+ // src/decompose/apply.ts
18445
+ function applyDecomposition(prd, result) {
18446
+ const { subStories } = result;
18447
+ if (subStories.length === 0)
18448
+ return;
18449
+ const parentStoryId = subStories[0].parentStoryId;
18450
+ const originalIndex = prd.userStories.findIndex((s) => s.id === parentStoryId);
18451
+ if (originalIndex === -1)
18452
+ return;
18453
+ prd.userStories[originalIndex].status = "decomposed";
18454
+ const newStories = subStories.map((sub) => ({
18455
+ id: sub.id,
18456
+ title: sub.title,
18457
+ description: sub.description,
18458
+ acceptanceCriteria: sub.acceptanceCriteria,
18459
+ tags: sub.tags,
18460
+ dependencies: sub.dependencies,
18461
+ status: "pending",
18462
+ passes: false,
18463
+ escalations: [],
18464
+ attempts: 0,
18465
+ parentStoryId: sub.parentStoryId
18466
+ }));
18467
+ prd.userStories.splice(originalIndex + 1, 0, ...newStories);
18468
+ }
18469
+
18470
+ // src/decompose/sections/codebase.ts
18471
+ function buildCodebaseSection(scan) {
18472
+ const deps = Object.entries(scan.dependencies).slice(0, 15).map(([k, v]) => ` ${k}: ${v}`).join(`
18473
+ `);
18474
+ return [
18475
+ "# Codebase Context",
18476
+ "",
18477
+ "**File Tree:**",
18478
+ scan.fileTree,
18479
+ "",
18480
+ "**Dependencies:**",
18481
+ deps || " (none)",
18482
+ "",
18483
+ `**Test Patterns:** ${scan.testPatterns.join(", ")}`
18484
+ ].join(`
18485
+ `);
18486
+ }
18487
+
18488
+ // src/decompose/sections/constraints.ts
18489
+ function buildConstraintsSection(config2) {
18490
+ return [
18491
+ "# Decomposition Constraints",
18492
+ "",
18493
+ `- **Max sub-stories:** ${config2.maxSubStories}`,
18494
+ `- **Max complexity per sub-story:** ${config2.maxComplexity}`,
18495
+ "",
18496
+ "Respond with ONLY a JSON array (no markdown code fences):",
18497
+ "[{",
18498
+ ` "id": "PARENT-ID-1",`,
18499
+ ` "parentStoryId": "PARENT-ID",`,
18500
+ ` "title": "Sub-story title",`,
18501
+ ` "description": "What to implement",`,
18502
+ ` "acceptanceCriteria": ["Criterion 1"],`,
18503
+ ` "tags": [],`,
18504
+ ` "dependencies": [],`,
18505
+ ` "complexity": "simple",`,
18506
+ ` "nonOverlapJustification": "Why this sub-story does not overlap with sibling stories"`,
18507
+ "}]",
18508
+ "",
18509
+ "The nonOverlapJustification field is required for every sub-story."
18510
+ ].join(`
18511
+ `);
18512
+ }
18513
+
18514
+ // src/decompose/sections/sibling-stories.ts
18515
+ function buildSiblingStoriesSection(targetStory, prd) {
18516
+ const siblings = prd.userStories.filter((s) => s.id !== targetStory.id);
18517
+ if (siblings.length === 0) {
18518
+ return `# Sibling Stories
18519
+
18520
+ No other stories exist in this PRD.`;
18521
+ }
18522
+ const entries = siblings.map((s) => {
18523
+ const acSummary = s.acceptanceCriteria.slice(0, 3).join("; ");
18524
+ return `- **${s.id}** \u2014 ${s.title} [${s.status}]
18525
+ AC: ${acSummary}`;
18526
+ }).join(`
18527
+ `);
18528
+ return ["# Sibling Stories", "", "Avoid overlapping with these existing stories in the PRD:", "", entries].join(`
18529
+ `);
18530
+ }
18531
+
18532
+ // src/decompose/sections/target-story.ts
18533
+ function buildTargetStorySection(story) {
18534
+ const ac = story.acceptanceCriteria.map((c, i) => `${i + 1}. ${c}`).join(`
18535
+ `);
18536
+ const tags = story.tags.length > 0 ? story.tags.join(", ") : "none";
18537
+ const deps = story.dependencies.length > 0 ? story.dependencies.join(", ") : "none";
18538
+ return [
18539
+ "# Target Story to Decompose",
18540
+ "",
18541
+ `**ID:** ${story.id}`,
18542
+ `**Title:** ${story.title}`,
18543
+ "",
18544
+ "**Description:**",
18545
+ story.description,
18546
+ "",
18547
+ "**Acceptance Criteria:**",
18548
+ ac,
18549
+ "",
18550
+ `**Tags:** ${tags}`,
18551
+ `**Dependencies:** ${deps}`,
18552
+ "",
18553
+ "Decompose this story into smaller sub-stories that can each be implemented independently."
18554
+ ].join(`
18555
+ `);
18556
+ }
18557
+
18427
18558
  // src/routing/chain.ts
18428
18559
  class StrategyChain {
18429
18560
  strategies;
@@ -19526,90 +19657,471 @@ var init_routing = __esm(() => {
19526
19657
  init_batch_route();
19527
19658
  });
19528
19659
 
19529
- // package.json
19530
- var package_default;
19531
- var init_package = __esm(() => {
19532
- package_default = {
19533
- name: "@nathapp/nax",
19534
- version: "0.32.2",
19535
- description: "AI Coding Agent Orchestrator \u2014 loops until done",
19536
- type: "module",
19537
- bin: {
19538
- nax: "./dist/nax.js"
19539
- },
19540
- scripts: {
19541
- prepare: "git config core.hooksPath .githooks",
19542
- dev: "bun run bin/nax.ts",
19543
- build: 'bun build bin/nax.ts --outdir dist --target bun --define "GIT_COMMIT=\\"$(git rev-parse --short HEAD)\\""',
19544
- typecheck: "bun x tsc --noEmit",
19545
- lint: "bun x biome check src/ bin/",
19546
- test: "NAX_SKIP_PRECHECK=1 bun test test/ --timeout=60000",
19547
- "test:watch": "bun test --watch",
19548
- "test:unit": "bun test ./test/unit/ --timeout=60000",
19549
- "test:integration": "bun test ./test/integration/ --timeout=60000",
19550
- "test:ui": "bun test ./test/ui/ --timeout=60000",
19551
- prepublishOnly: "bun run build"
19552
- },
19553
- dependencies: {
19554
- "@anthropic-ai/sdk": "^0.74.0",
19555
- "@types/react": "^19.2.14",
19556
- chalk: "^5.6.2",
19557
- commander: "^13.1.0",
19558
- ink: "^6.7.0",
19559
- "ink-spinner": "^5.0.0",
19560
- "ink-testing-library": "^4.0.0",
19561
- react: "^19.2.4",
19562
- zod: "^4.3.6"
19563
- },
19564
- devDependencies: {
19565
- "@biomejs/biome": "^1.9.4",
19566
- "@types/bun": "^1.3.8",
19567
- "react-devtools-core": "^7.0.1",
19568
- typescript: "^5.7.3"
19569
- },
19570
- license: "MIT",
19571
- author: "William Khoo",
19572
- keywords: [
19573
- "ai",
19574
- "agent",
19575
- "orchestrator",
19576
- "tdd",
19577
- "coding"
19578
- ],
19579
- files: [
19580
- "dist/",
19581
- "src/",
19582
- "bin/",
19583
- "README.md",
19584
- "CHANGELOG.md"
19585
- ]
19660
+ // src/decompose/validators/complexity.ts
19661
+ function validateComplexity2(substories, maxComplexity) {
19662
+ const errors3 = [];
19663
+ const warnings = [];
19664
+ const maxOrder = COMPLEXITY_ORDER[maxComplexity];
19665
+ for (const sub of substories) {
19666
+ const assignedOrder = COMPLEXITY_ORDER[sub.complexity];
19667
+ if (assignedOrder > maxOrder) {
19668
+ errors3.push(`Substory ${sub.id} complexity "${sub.complexity}" exceeds maxComplexity "${maxComplexity}"`);
19669
+ }
19670
+ const classified = classifyComplexity2(sub.title, sub.description, sub.acceptanceCriteria, sub.tags);
19671
+ if (classified !== sub.complexity) {
19672
+ const classifiedOrder = COMPLEXITY_ORDER[classified] ?? 0;
19673
+ if (classifiedOrder > assignedOrder) {
19674
+ warnings.push(`Substory ${sub.id} is assigned complexity "${sub.complexity}" but classifier estimates "${classified}" \u2014 may be underestimated`);
19675
+ }
19676
+ }
19677
+ }
19678
+ return { valid: errors3.length === 0, errors: errors3, warnings };
19679
+ }
19680
+ var COMPLEXITY_ORDER;
19681
+ var init_complexity = __esm(() => {
19682
+ init_routing();
19683
+ COMPLEXITY_ORDER = {
19684
+ simple: 0,
19685
+ medium: 1,
19686
+ complex: 2,
19687
+ expert: 3
19586
19688
  };
19587
19689
  });
19588
19690
 
19589
- // src/version.ts
19590
- var NAX_VERSION, NAX_COMMIT, NAX_BUILD_INFO;
19591
- var init_version = __esm(() => {
19592
- init_package();
19593
- NAX_VERSION = package_default.version;
19594
- NAX_COMMIT = (() => {
19595
- try {
19596
- if (/^[0-9a-f]{6,10}$/.test("1012b41"))
19597
- return "1012b41";
19598
- } catch {}
19599
- try {
19600
- const result = Bun.spawnSync(["git", "rev-parse", "--short", "HEAD"], {
19601
- cwd: import.meta.dir,
19602
- stderr: "ignore"
19603
- });
19604
- if (result.exitCode === 0) {
19605
- const hash2 = result.stdout.toString().trim();
19606
- if (/^[0-9a-f]{6,10}$/.test(hash2))
19607
- return hash2;
19691
+ // src/decompose/validators/coverage.ts
19692
+ function extractKeywords(text) {
19693
+ return text.toLowerCase().split(/[\s,.:;!?()\[\]{}"'`\-_/\\]+/).filter((w) => w.length > 2 && !STOP_WORDS.has(w));
19694
+ }
19695
+ function commonPrefixLength(a, b) {
19696
+ let i = 0;
19697
+ while (i < a.length && i < b.length && a[i] === b[i])
19698
+ i++;
19699
+ return i;
19700
+ }
19701
+ function keywordsMatch(a, b) {
19702
+ return a === b || commonPrefixLength(a, b) >= 5;
19703
+ }
19704
+ function isCovered(originalAc, substoryAcs) {
19705
+ const originalKw = extractKeywords(originalAc);
19706
+ if (originalKw.length === 0)
19707
+ return true;
19708
+ const substoryKwList = substoryAcs.flatMap(extractKeywords);
19709
+ let matchCount = 0;
19710
+ for (const kw of originalKw) {
19711
+ if (substoryKwList.some((s) => keywordsMatch(kw, s))) {
19712
+ matchCount++;
19713
+ }
19714
+ }
19715
+ return matchCount > originalKw.length / 2;
19716
+ }
19717
+ function validateCoverage(originalStory, substories) {
19718
+ const warnings = [];
19719
+ const allSubstoryAcs = substories.flatMap((s) => s.acceptanceCriteria);
19720
+ for (const ac of originalStory.acceptanceCriteria ?? []) {
19721
+ if (!isCovered(ac, allSubstoryAcs)) {
19722
+ warnings.push(`Original AC not covered by any substory: "${ac}"`);
19723
+ }
19724
+ }
19725
+ return { valid: true, errors: [], warnings };
19726
+ }
19727
+ var STOP_WORDS;
19728
+ var init_coverage = __esm(() => {
19729
+ STOP_WORDS = new Set([
19730
+ "a",
19731
+ "an",
19732
+ "the",
19733
+ "and",
19734
+ "or",
19735
+ "but",
19736
+ "is",
19737
+ "are",
19738
+ "was",
19739
+ "were",
19740
+ "be",
19741
+ "been",
19742
+ "being",
19743
+ "have",
19744
+ "has",
19745
+ "had",
19746
+ "do",
19747
+ "does",
19748
+ "did",
19749
+ "will",
19750
+ "would",
19751
+ "could",
19752
+ "should",
19753
+ "may",
19754
+ "might",
19755
+ "can",
19756
+ "to",
19757
+ "of",
19758
+ "in",
19759
+ "on",
19760
+ "at",
19761
+ "for",
19762
+ "with",
19763
+ "by",
19764
+ "from",
19765
+ "as",
19766
+ "it",
19767
+ "its",
19768
+ "that",
19769
+ "this",
19770
+ "these",
19771
+ "those",
19772
+ "not",
19773
+ "no",
19774
+ "so",
19775
+ "if",
19776
+ "then",
19777
+ "than",
19778
+ "when",
19779
+ "which",
19780
+ "who",
19781
+ "what",
19782
+ "how",
19783
+ "all",
19784
+ "each",
19785
+ "any",
19786
+ "up",
19787
+ "out",
19788
+ "about",
19789
+ "into",
19790
+ "through",
19791
+ "after",
19792
+ "before"
19793
+ ]);
19794
+ });
19795
+
19796
+ // src/decompose/validators/dependency.ts
19797
+ function detectCycles(substories) {
19798
+ const errors3 = [];
19799
+ const idSet = new Set(substories.map((s) => s.id));
19800
+ const adj = new Map;
19801
+ for (const sub of substories) {
19802
+ adj.set(sub.id, sub.dependencies.filter((d) => idSet.has(d)));
19803
+ }
19804
+ const WHITE = 0;
19805
+ const GRAY = 1;
19806
+ const BLACK = 2;
19807
+ const color = new Map;
19808
+ for (const id of idSet)
19809
+ color.set(id, WHITE);
19810
+ const reported = new Set;
19811
+ function dfs(id, path) {
19812
+ color.set(id, GRAY);
19813
+ for (const dep of adj.get(id) ?? []) {
19814
+ if (color.get(dep) === GRAY) {
19815
+ const cycleKey = [...path, dep].sort().join(",");
19816
+ if (!reported.has(cycleKey)) {
19817
+ reported.add(cycleKey);
19818
+ const cycleStart = path.indexOf(dep);
19819
+ const cycleNodes = cycleStart >= 0 ? path.slice(cycleStart) : path;
19820
+ const cycleStr = [...cycleNodes, dep].join(" -> ");
19821
+ errors3.push(`Circular dependency detected: ${cycleStr}`);
19822
+ }
19823
+ } else if (color.get(dep) === WHITE) {
19824
+ dfs(dep, [...path, dep]);
19825
+ }
19826
+ }
19827
+ color.set(id, BLACK);
19828
+ }
19829
+ for (const id of idSet) {
19830
+ if (color.get(id) === WHITE) {
19831
+ dfs(id, [id]);
19832
+ }
19833
+ }
19834
+ return errors3;
19835
+ }
19836
+ function validateDependencies(substories, existingStoryIds) {
19837
+ const errors3 = [];
19838
+ const substoryIdSet = new Set(substories.map((s) => s.id));
19839
+ const existingIdSet = new Set(existingStoryIds);
19840
+ const allKnownIds = new Set([...substoryIdSet, ...existingIdSet]);
19841
+ for (const sub of substories) {
19842
+ if (existingIdSet.has(sub.id)) {
19843
+ errors3.push(`Substory ID "${sub.id}" collides with existing PRD story \u2014 duplicate IDs are not allowed`);
19844
+ }
19845
+ }
19846
+ for (const sub of substories) {
19847
+ for (const dep of sub.dependencies) {
19848
+ if (!allKnownIds.has(dep)) {
19849
+ errors3.push(`Substory ${sub.id} references non-existent story ID "${dep}"`);
19608
19850
  }
19609
- } catch {}
19610
- return "dev";
19611
- })();
19612
- NAX_BUILD_INFO = NAX_COMMIT === "dev" ? `v${NAX_VERSION}` : `v${NAX_VERSION} (${NAX_COMMIT})`;
19851
+ }
19852
+ }
19853
+ const cycleErrors = detectCycles(substories);
19854
+ errors3.push(...cycleErrors);
19855
+ return { valid: errors3.length === 0, errors: errors3, warnings: [] };
19856
+ }
19857
+
19858
+ // src/decompose/validators/overlap.ts
19859
+ function extractKeywords2(texts) {
19860
+ const words = texts.join(" ").toLowerCase().split(/[\s,.:;!?()\[\]{}"'`\-_/\\]+/).filter((w) => w.length > 2 && !STOP_WORDS2.has(w) && !/^\d+$/.test(w));
19861
+ return new Set(words);
19862
+ }
19863
+ function jaccardSimilarity(a, b) {
19864
+ if (a.size === 0 && b.size === 0)
19865
+ return 0;
19866
+ let intersectionSize = 0;
19867
+ for (const word of a) {
19868
+ if (b.has(word))
19869
+ intersectionSize++;
19870
+ }
19871
+ const unionSize = a.size + b.size - intersectionSize;
19872
+ return unionSize === 0 ? 0 : intersectionSize / unionSize;
19873
+ }
19874
+ function substoryKeywords(s) {
19875
+ return extractKeywords2([s.title, ...s.tags]);
19876
+ }
19877
+ function storyKeywords(s) {
19878
+ return extractKeywords2([s.title, ...s.tags ?? []]);
19879
+ }
19880
+ function validateOverlap(substories, existingStories) {
19881
+ const errors3 = [];
19882
+ const warnings = [];
19883
+ for (const sub of substories) {
19884
+ const subKw = substoryKeywords(sub);
19885
+ for (const existing of existingStories) {
19886
+ const exKw = storyKeywords(existing);
19887
+ const sim = jaccardSimilarity(subKw, exKw);
19888
+ if (sim > 0.8) {
19889
+ errors3.push(`Substory ${sub.id} overlaps with existing story ${existing.id} (similarity ${sim.toFixed(2)} > 0.8)`);
19890
+ } else if (sim > 0.6) {
19891
+ warnings.push(`Substory ${sub.id} may overlap with existing story ${existing.id} (similarity ${sim.toFixed(2)} > 0.6)`);
19892
+ }
19893
+ }
19894
+ }
19895
+ return { valid: errors3.length === 0, errors: errors3, warnings };
19896
+ }
19897
+ var STOP_WORDS2;
19898
+ var init_overlap = __esm(() => {
19899
+ STOP_WORDS2 = new Set([
19900
+ "a",
19901
+ "an",
19902
+ "the",
19903
+ "and",
19904
+ "or",
19905
+ "but",
19906
+ "is",
19907
+ "are",
19908
+ "was",
19909
+ "were",
19910
+ "be",
19911
+ "been",
19912
+ "being",
19913
+ "have",
19914
+ "has",
19915
+ "had",
19916
+ "do",
19917
+ "does",
19918
+ "did",
19919
+ "will",
19920
+ "would",
19921
+ "could",
19922
+ "should",
19923
+ "may",
19924
+ "might",
19925
+ "can",
19926
+ "to",
19927
+ "of",
19928
+ "in",
19929
+ "on",
19930
+ "at",
19931
+ "for",
19932
+ "with",
19933
+ "by",
19934
+ "from",
19935
+ "as",
19936
+ "it",
19937
+ "its",
19938
+ "that",
19939
+ "this",
19940
+ "these",
19941
+ "those",
19942
+ "not",
19943
+ "no",
19944
+ "so",
19945
+ "if",
19946
+ "then",
19947
+ "than",
19948
+ "when",
19949
+ "which",
19950
+ "who",
19951
+ "what",
19952
+ "how",
19953
+ "all",
19954
+ "each",
19955
+ "any",
19956
+ "up",
19957
+ "out",
19958
+ "about",
19959
+ "into",
19960
+ "through",
19961
+ "after",
19962
+ "before"
19963
+ ]);
19964
+ });
19965
+
19966
+ // src/decompose/validators/index.ts
19967
+ function runAllValidators(originalStory, substories, existingStories, config2) {
19968
+ const existingIds = existingStories.map((s) => s.id);
19969
+ const maxComplexity = config2.maxComplexity ?? "medium";
19970
+ const results = [
19971
+ validateOverlap(substories, existingStories),
19972
+ validateCoverage(originalStory, substories),
19973
+ validateComplexity2(substories, maxComplexity),
19974
+ validateDependencies(substories, existingIds)
19975
+ ];
19976
+ const errors3 = results.flatMap((r) => r.errors);
19977
+ const warnings = results.flatMap((r) => r.warnings);
19978
+ return { valid: errors3.length === 0, errors: errors3, warnings };
19979
+ }
19980
+ var init_validators = __esm(() => {
19981
+ init_complexity();
19982
+ init_coverage();
19983
+ init_overlap();
19984
+ });
19985
+
19986
+ // src/decompose/builder.ts
19987
+ class DecomposeBuilder {
19988
+ _story;
19989
+ _prd;
19990
+ _scan;
19991
+ _cfg;
19992
+ constructor(story) {
19993
+ this._story = story;
19994
+ }
19995
+ static for(story) {
19996
+ return new DecomposeBuilder(story);
19997
+ }
19998
+ prd(prd) {
19999
+ this._prd = prd;
20000
+ return this;
20001
+ }
20002
+ codebase(scan) {
20003
+ this._scan = scan;
20004
+ return this;
20005
+ }
20006
+ config(cfg) {
20007
+ this._cfg = cfg;
20008
+ return this;
20009
+ }
20010
+ buildPrompt(errorFeedback) {
20011
+ const sections = [];
20012
+ sections.push(buildTargetStorySection(this._story));
20013
+ if (this._prd) {
20014
+ sections.push(buildSiblingStoriesSection(this._story, this._prd));
20015
+ }
20016
+ if (this._scan) {
20017
+ sections.push(buildCodebaseSection(this._scan));
20018
+ }
20019
+ if (this._cfg) {
20020
+ sections.push(buildConstraintsSection(this._cfg));
20021
+ }
20022
+ if (errorFeedback) {
20023
+ sections.push(`## Validation Errors from Previous Attempt
20024
+
20025
+ Fix the following errors and try again:
20026
+
20027
+ ${errorFeedback}`);
20028
+ }
20029
+ return sections.join(SECTION_SEP);
20030
+ }
20031
+ async decompose(adapter) {
20032
+ const cfg = this._cfg;
20033
+ const maxRetries = cfg?.maxRetries ?? 0;
20034
+ const existingStories = this._prd ? this._prd.userStories.filter((s) => s.id !== this._story.id) : [];
20035
+ let lastResult;
20036
+ let errorFeedback;
20037
+ for (let attempt = 0;attempt <= maxRetries; attempt++) {
20038
+ const prompt = this.buildPrompt(errorFeedback);
20039
+ const raw = await adapter.decompose(prompt);
20040
+ const parsed = parseSubStories(raw);
20041
+ if (!parsed.validation.valid) {
20042
+ lastResult = parsed;
20043
+ errorFeedback = parsed.validation.errors.join(`
20044
+ `);
20045
+ continue;
20046
+ }
20047
+ const config2 = cfg ?? { maxSubStories: 5, maxComplexity: "medium" };
20048
+ const validation = runAllValidators(this._story, parsed.subStories, existingStories, config2);
20049
+ if (!validation.valid) {
20050
+ lastResult = { subStories: parsed.subStories, validation };
20051
+ errorFeedback = validation.errors.join(`
20052
+ `);
20053
+ continue;
20054
+ }
20055
+ return { subStories: parsed.subStories, validation };
20056
+ }
20057
+ return lastResult ?? {
20058
+ subStories: [],
20059
+ validation: { valid: false, errors: ["Decomposition failed after all retries"], warnings: [] }
20060
+ };
20061
+ }
20062
+ }
20063
+ function parseSubStories(output) {
20064
+ const fenceMatch = output.match(/```(?:json)?\s*(\[[\s\S]*?\])\s*```/);
20065
+ let jsonText = fenceMatch ? fenceMatch[1] : output;
20066
+ if (!fenceMatch) {
20067
+ const arrayMatch = output.match(/\[[\s\S]*\]/);
20068
+ if (arrayMatch) {
20069
+ jsonText = arrayMatch[0];
20070
+ }
20071
+ }
20072
+ let parsed;
20073
+ try {
20074
+ parsed = JSON.parse(jsonText.trim());
20075
+ } catch (err) {
20076
+ return {
20077
+ subStories: [],
20078
+ validation: { valid: false, errors: [`Failed to parse JSON: ${err.message}`], warnings: [] }
20079
+ };
20080
+ }
20081
+ if (!Array.isArray(parsed)) {
20082
+ return {
20083
+ subStories: [],
20084
+ validation: { valid: false, errors: ["Output is not a JSON array"], warnings: [] }
20085
+ };
20086
+ }
20087
+ const errors3 = [];
20088
+ const subStories = [];
20089
+ for (const [index, item] of parsed.entries()) {
20090
+ if (typeof item !== "object" || item === null) {
20091
+ errors3.push(`Item at index ${index} is not an object`);
20092
+ continue;
20093
+ }
20094
+ const r = item;
20095
+ subStories.push({
20096
+ id: String(r.id ?? ""),
20097
+ parentStoryId: String(r.parentStoryId ?? ""),
20098
+ title: String(r.title ?? ""),
20099
+ description: String(r.description ?? ""),
20100
+ acceptanceCriteria: Array.isArray(r.acceptanceCriteria) ? r.acceptanceCriteria : [],
20101
+ tags: Array.isArray(r.tags) ? r.tags : [],
20102
+ dependencies: Array.isArray(r.dependencies) ? r.dependencies : [],
20103
+ complexity: normalizeComplexity(r.complexity),
20104
+ nonOverlapJustification: String(r.nonOverlapJustification ?? "")
20105
+ });
20106
+ }
20107
+ return {
20108
+ subStories,
20109
+ validation: { valid: errors3.length === 0, errors: errors3, warnings: [] }
20110
+ };
20111
+ }
20112
+ function normalizeComplexity(value) {
20113
+ if (value === "simple" || value === "medium" || value === "complex" || value === "expert") {
20114
+ return value;
20115
+ }
20116
+ return "medium";
20117
+ }
20118
+ var SECTION_SEP = `
20119
+
20120
+ ---
20121
+
20122
+ `;
20123
+ var init_builder2 = __esm(() => {
20124
+ init_validators();
19613
20125
  });
19614
20126
 
19615
20127
  // src/prd/types.ts
@@ -19623,8 +20135,8 @@ function isStalled(prd) {
19623
20135
  const remaining = prd.userStories.filter((s) => s.status !== "passed" && s.status !== "skipped");
19624
20136
  if (remaining.length === 0)
19625
20137
  return false;
19626
- const blockedIds = new Set(prd.userStories.filter((s) => s.status === "blocked" || s.status === "failed" || s.status === "paused").map((s) => s.id));
19627
- return remaining.every((s) => s.status === "blocked" || s.status === "failed" || s.status === "paused" || s.dependencies.some((dep) => blockedIds.has(dep)));
20138
+ const blockedIds = new Set(prd.userStories.filter((s) => s.status === "blocked" || s.status === "failed" || s.status === "paused" || s.status === "regression-failed").map((s) => s.id));
20139
+ return remaining.every((s) => s.status === "blocked" || s.status === "failed" || s.status === "paused" || s.status === "regression-failed" || s.dependencies.some((dep) => blockedIds.has(dep)));
19628
20140
  }
19629
20141
 
19630
20142
  // src/prd/index.ts
@@ -19673,7 +20185,7 @@ function getNextStory(prd, currentStoryId, maxRetries) {
19673
20185
  }
19674
20186
  }
19675
20187
  const completedIds = new Set(prd.userStories.filter((s) => s.passes || s.status === "passed" || s.status === "skipped").map((s) => s.id));
19676
- return prd.userStories.find((s) => !s.passes && s.status !== "passed" && s.status !== "skipped" && s.status !== "blocked" && s.status !== "failed" && s.status !== "paused" && s.dependencies.every((dep) => completedIds.has(dep))) ?? null;
20188
+ return prd.userStories.find((s) => !s.passes && s.status !== "passed" && s.status !== "skipped" && s.status !== "blocked" && s.status !== "failed" && s.status !== "paused" && s.status !== "decomposed" && s.dependencies.every((dep) => completedIds.has(dep))) ?? null;
19677
20189
  }
19678
20190
  function isComplete(prd) {
19679
20191
  return prd.userStories.every((s) => s.passes || s.status === "passed" || s.status === "skipped");
@@ -19682,11 +20194,12 @@ function countStories(prd) {
19682
20194
  return {
19683
20195
  total: prd.userStories.length,
19684
20196
  passed: prd.userStories.filter((s) => s.passes || s.status === "passed").length,
19685
- failed: prd.userStories.filter((s) => s.status === "failed").length,
19686
- pending: prd.userStories.filter((s) => !s.passes && s.status !== "passed" && s.status !== "failed" && s.status !== "skipped" && s.status !== "blocked" && s.status !== "paused").length,
20197
+ failed: prd.userStories.filter((s) => s.status === "failed" || s.status === "regression-failed").length,
20198
+ pending: prd.userStories.filter((s) => !s.passes && s.status !== "passed" && s.status !== "failed" && s.status !== "skipped" && s.status !== "blocked" && s.status !== "paused" && s.status !== "regression-failed" && s.status !== "decomposed").length,
19687
20199
  skipped: prd.userStories.filter((s) => s.status === "skipped").length,
19688
20200
  blocked: prd.userStories.filter((s) => s.status === "blocked").length,
19689
- paused: prd.userStories.filter((s) => s.status === "paused").length
20201
+ paused: prd.userStories.filter((s) => s.status === "paused").length,
20202
+ decomposed: prd.userStories.filter((s) => s.status === "decomposed").length
19690
20203
  };
19691
20204
  }
19692
20205
  function markStoryPassed(prd, storyId) {
@@ -19724,6 +20237,92 @@ var init_prd = __esm(() => {
19724
20237
  PRD_MAX_FILE_SIZE = 5 * 1024 * 1024;
19725
20238
  });
19726
20239
 
20240
+ // package.json
20241
+ var package_default;
20242
+ var init_package = __esm(() => {
20243
+ package_default = {
20244
+ name: "@nathapp/nax",
20245
+ version: "0.34.0",
20246
+ description: "AI Coding Agent Orchestrator \u2014 loops until done",
20247
+ type: "module",
20248
+ bin: {
20249
+ nax: "./dist/nax.js"
20250
+ },
20251
+ scripts: {
20252
+ prepare: "git config core.hooksPath .githooks",
20253
+ dev: "bun run bin/nax.ts",
20254
+ build: 'bun build bin/nax.ts --outdir dist --target bun --define "GIT_COMMIT=\\"$(git rev-parse --short HEAD)\\""',
20255
+ typecheck: "bun x tsc --noEmit",
20256
+ lint: "bun x biome check src/ bin/",
20257
+ test: "NAX_SKIP_PRECHECK=1 bun test test/ --timeout=60000",
20258
+ "test:watch": "bun test --watch",
20259
+ "test:unit": "bun test ./test/unit/ --timeout=60000",
20260
+ "test:integration": "bun test ./test/integration/ --timeout=60000",
20261
+ "test:ui": "bun test ./test/ui/ --timeout=60000",
20262
+ prepublishOnly: "bun run build"
20263
+ },
20264
+ dependencies: {
20265
+ "@anthropic-ai/sdk": "^0.74.0",
20266
+ "@types/react": "^19.2.14",
20267
+ chalk: "^5.6.2",
20268
+ commander: "^13.1.0",
20269
+ ink: "^6.7.0",
20270
+ "ink-spinner": "^5.0.0",
20271
+ "ink-testing-library": "^4.0.0",
20272
+ react: "^19.2.4",
20273
+ zod: "^4.3.6"
20274
+ },
20275
+ devDependencies: {
20276
+ "@biomejs/biome": "^1.9.4",
20277
+ "@types/bun": "^1.3.8",
20278
+ "react-devtools-core": "^7.0.1",
20279
+ typescript: "^5.7.3"
20280
+ },
20281
+ license: "MIT",
20282
+ author: "William Khoo",
20283
+ keywords: [
20284
+ "ai",
20285
+ "agent",
20286
+ "orchestrator",
20287
+ "tdd",
20288
+ "coding"
20289
+ ],
20290
+ files: [
20291
+ "dist/",
20292
+ "src/",
20293
+ "bin/",
20294
+ "README.md",
20295
+ "CHANGELOG.md"
20296
+ ]
20297
+ };
20298
+ });
20299
+
20300
+ // src/version.ts
20301
+ var NAX_VERSION, NAX_COMMIT, NAX_BUILD_INFO;
20302
+ var init_version = __esm(() => {
20303
+ init_package();
20304
+ NAX_VERSION = package_default.version;
20305
+ NAX_COMMIT = (() => {
20306
+ try {
20307
+ if (/^[0-9a-f]{6,10}$/.test("a679961"))
20308
+ return "a679961";
20309
+ } catch {}
20310
+ try {
20311
+ const result = Bun.spawnSync(["git", "rev-parse", "--short", "HEAD"], {
20312
+ cwd: import.meta.dir,
20313
+ stderr: "ignore"
20314
+ });
20315
+ if (result.exitCode === 0) {
20316
+ const hash2 = result.stdout.toString().trim();
20317
+ if (/^[0-9a-f]{6,10}$/.test(hash2))
20318
+ return hash2;
20319
+ }
20320
+ } catch {}
20321
+ return "dev";
20322
+ })();
20323
+ NAX_BUILD_INFO = NAX_COMMIT === "dev" ? `v${NAX_VERSION}` : `v${NAX_VERSION} (${NAX_COMMIT})`;
20324
+ });
20325
+
19727
20326
  // src/errors.ts
19728
20327
  var NaxError, AgentNotFoundError, AgentNotInstalledError, StoryLimitExceededError, LockAcquisitionError;
19729
20328
  var init_errors3 = __esm(() => {
@@ -19785,6 +20384,8 @@ function collectStoryMetrics(ctx, storyStartTime) {
19785
20384
  const modelDef = modelEntry ? resolveModel(modelEntry) : null;
19786
20385
  const modelUsed = modelDef?.model || routing.modelTier;
19787
20386
  const initialComplexity = story.routing?.initialComplexity ?? routing.complexity;
20387
+ const isTddStrategy = routing.testStrategy === "three-session-tdd" || routing.testStrategy === "three-session-tdd-lite";
20388
+ const fullSuiteGatePassed = isTddStrategy ? ctx.fullSuiteGatePassed ?? false : false;
19788
20389
  return {
19789
20390
  storyId: story.id,
19790
20391
  complexity: routing.complexity,
@@ -19798,7 +20399,8 @@ function collectStoryMetrics(ctx, storyStartTime) {
19798
20399
  durationMs: agentResult?.durationMs || 0,
19799
20400
  firstPassSuccess,
19800
20401
  startedAt: storyStartTime,
19801
- completedAt: new Date().toISOString()
20402
+ completedAt: new Date().toISOString(),
20403
+ fullSuiteGatePassed
19802
20404
  };
19803
20405
  }
19804
20406
  function collectBatchMetrics(ctx, storyStartTime) {
@@ -19827,7 +20429,8 @@ function collectBatchMetrics(ctx, storyStartTime) {
19827
20429
  durationMs: durationPerStory,
19828
20430
  firstPassSuccess: true,
19829
20431
  startedAt: storyStartTime,
19830
- completedAt: new Date().toISOString()
20432
+ completedAt: new Date().toISOString(),
20433
+ fullSuiteGatePassed: false
19831
20434
  };
19832
20435
  });
19833
20436
  }
@@ -20015,6 +20618,11 @@ var init_types2 = __esm(() => {
20015
20618
  safety: "yellow",
20016
20619
  defaultSummary: "Human review required for story {{storyId}} \u2014 skip and continue?"
20017
20620
  },
20621
+ "story-oversized": {
20622
+ defaultFallback: "continue",
20623
+ safety: "yellow",
20624
+ defaultSummary: "Story {{storyId}} is oversized ({{criteriaCount}} acceptance criteria) \u2014 decompose into smaller stories?"
20625
+ },
20018
20626
  "story-ambiguity": {
20019
20627
  defaultFallback: "continue",
20020
20628
  safety: "green",
@@ -21103,6 +21711,20 @@ async function checkReviewGate(context, config2, chain) {
21103
21711
  const response = await executeTrigger("review-gate", context, config2, chain);
21104
21712
  return response.action === "approve";
21105
21713
  }
21714
+ async function checkStoryOversized(context, config2, chain) {
21715
+ if (!isTriggerEnabled("story-oversized", config2))
21716
+ return "continue";
21717
+ try {
21718
+ const response = await executeTrigger("story-oversized", context, config2, chain);
21719
+ if (response.action === "approve")
21720
+ return "decompose";
21721
+ if (response.action === "skip")
21722
+ return "skip";
21723
+ return "continue";
21724
+ } catch {
21725
+ return "continue";
21726
+ }
21727
+ }
21106
21728
  var init_triggers = __esm(() => {
21107
21729
  init_types2();
21108
21730
  });
@@ -21641,7 +22263,8 @@ class ReviewOrchestrator {
21641
22263
  name: reviewer.name,
21642
22264
  passed: result.passed,
21643
22265
  output: result.output,
21644
- exitCode: result.exitCode
22266
+ exitCode: result.exitCode,
22267
+ findings: result.findings
21645
22268
  });
21646
22269
  if (!result.passed) {
21647
22270
  builtIn.pluginReviewers = pluginResults;
@@ -21697,6 +22320,10 @@ var init_review = __esm(() => {
21697
22320
  const result = await reviewOrchestrator.review(ctx.config.review, ctx.workdir, ctx.config.execution, ctx.plugins);
21698
22321
  ctx.reviewResult = result.builtIn;
21699
22322
  if (!result.success) {
22323
+ const allFindings = result.builtIn.pluginReviewers?.flatMap((pr) => pr.findings ?? []) ?? [];
22324
+ if (allFindings.length > 0) {
22325
+ ctx.reviewFindings = allFindings;
22326
+ }
21700
22327
  if (result.pluginFailed) {
21701
22328
  if (ctx.interaction && isTriggerEnabled("security-review", ctx.config)) {
21702
22329
  const shouldContinue = await _reviewDeps.checkSecurityReview({ featureName: ctx.prd.feature, storyId: ctx.story.id }, ctx.config, ctx.interaction);
@@ -22026,7 +22653,7 @@ var init_constitution2 = __esm(() => {
22026
22653
  });
22027
22654
 
22028
22655
  // src/context/auto-detect.ts
22029
- function extractKeywords(title) {
22656
+ function extractKeywords3(title) {
22030
22657
  const stopWords = new Set([
22031
22658
  "the",
22032
22659
  "a",
@@ -22079,7 +22706,7 @@ function extractKeywords(title) {
22079
22706
  async function autoDetectContextFiles(options) {
22080
22707
  const { workdir, storyTitle, maxFiles = 5 } = options;
22081
22708
  const logger = getLogger();
22082
- const keywords = extractKeywords(storyTitle);
22709
+ const keywords = extractKeywords3(storyTitle);
22083
22710
  if (keywords.length === 0) {
22084
22711
  logger.debug("auto-detect", "No keywords extracted from story title", { storyTitle });
22085
22712
  return [];
@@ -22213,6 +22840,20 @@ function formatPriorFailures(failures) {
22213
22840
  }
22214
22841
  }
22215
22842
  }
22843
+ if (failure.reviewFindings && failure.reviewFindings.length > 0) {
22844
+ parts.push(`
22845
+ **Review Findings (fix these issues):**`);
22846
+ for (const finding of failure.reviewFindings) {
22847
+ const source = finding.source ? ` (${finding.source})` : "";
22848
+ parts.push(`
22849
+ - **[${finding.severity}]** \`${finding.file}:${finding.line}\`${source}`);
22850
+ parts.push(` **Rule:** ${finding.ruleId}`);
22851
+ parts.push(` **Issue:** ${finding.message}`);
22852
+ if (finding.url) {
22853
+ parts.push(` **Docs:** ${finding.url}`);
22854
+ }
22855
+ }
22856
+ }
22216
22857
  parts.push("");
22217
22858
  }
22218
22859
  return parts.join(`
@@ -22440,7 +23081,7 @@ async function generateTestCoverageSummary(options) {
22440
23081
  var COMMON_TEST_DIRS;
22441
23082
  var init_test_scanner = __esm(() => {
22442
23083
  init_logger2();
22443
- init_builder2();
23084
+ init_builder3();
22444
23085
  COMMON_TEST_DIRS = ["test", "tests", "__tests__", "src/__tests__", "spec"];
22445
23086
  });
22446
23087
 
@@ -22719,7 +23360,7 @@ ${content}
22719
23360
  }
22720
23361
  }
22721
23362
  var _deps4;
22722
- var init_builder2 = __esm(() => {
23363
+ var init_builder3 = __esm(() => {
22723
23364
  init_logger2();
22724
23365
  init_prd();
22725
23366
  init_auto_detect();
@@ -22733,7 +23374,7 @@ var init_builder2 = __esm(() => {
22733
23374
 
22734
23375
  // src/context/index.ts
22735
23376
  var init_context = __esm(() => {
22736
- init_builder2();
23377
+ init_builder3();
22737
23378
  init_test_scanner();
22738
23379
  init_auto_detect();
22739
23380
  });
@@ -23948,7 +24589,7 @@ ${this._constitution}`);
23948
24589
  sections.push(this._contextMd);
23949
24590
  }
23950
24591
  sections.push(buildConventionsSection());
23951
- return sections.join(SECTION_SEP);
24592
+ return sections.join(SECTION_SEP2);
23952
24593
  }
23953
24594
  async _resolveRoleBody() {
23954
24595
  if (this._workdir && this._loaderConfig) {
@@ -23970,18 +24611,18 @@ ${this._constitution}`);
23970
24611
  return buildRoleTaskSection(this._role, variant);
23971
24612
  }
23972
24613
  }
23973
- var SECTION_SEP = `
24614
+ var SECTION_SEP2 = `
23974
24615
 
23975
24616
  ---
23976
24617
 
23977
24618
  `;
23978
- var init_builder3 = __esm(() => {
24619
+ var init_builder4 = __esm(() => {
23979
24620
  init_isolation2();
23980
24621
  });
23981
24622
 
23982
24623
  // src/prompts/index.ts
23983
24624
  var init_prompts2 = __esm(() => {
23984
- init_builder3();
24625
+ init_builder4();
23985
24626
  });
23986
24627
 
23987
24628
  // src/tdd/session-runner.ts
@@ -25940,9 +26581,25 @@ var init_regression2 = __esm(() => {
25940
26581
  });
25941
26582
 
25942
26583
  // src/pipeline/stages/routing.ts
26584
+ async function runDecompose(story, prd, config2, _workdir) {
26585
+ const naxDecompose = config2.decompose;
26586
+ const builderConfig = {
26587
+ maxSubStories: naxDecompose?.maxSubstories ?? 5,
26588
+ maxComplexity: naxDecompose?.maxSubstoryComplexity ?? "medium",
26589
+ maxRetries: naxDecompose?.maxRetries ?? 2
26590
+ };
26591
+ const adapter = {
26592
+ async decompose(_prompt) {
26593
+ throw new Error("[decompose] No LLM adapter configured for story decomposition");
26594
+ }
26595
+ };
26596
+ return DecomposeBuilder.for(story).prd(prd).config(builderConfig).decompose(adapter);
26597
+ }
25943
26598
  var routingStage, _routingDeps;
25944
26599
  var init_routing2 = __esm(() => {
25945
26600
  init_greenfield();
26601
+ init_builder2();
26602
+ init_triggers();
25946
26603
  init_logger2();
25947
26604
  init_prd();
25948
26605
  init_routing();
@@ -26011,6 +26668,46 @@ var init_routing2 = __esm(() => {
26011
26668
  if (!isBatch) {
26012
26669
  logger.debug("routing", ctx.routing.reasoning);
26013
26670
  }
26671
+ const decomposeConfig = ctx.config.decompose;
26672
+ if (decomposeConfig) {
26673
+ const acCount = ctx.story.acceptanceCriteria.length;
26674
+ const complexity = ctx.routing.complexity;
26675
+ const isOversized = acCount > decomposeConfig.maxAcceptanceCriteria && (complexity === "complex" || complexity === "expert");
26676
+ if (isOversized) {
26677
+ if (decomposeConfig.trigger === "disabled") {
26678
+ logger.warn("routing", `Story ${ctx.story.id} is oversized (${acCount} ACs) but decompose is disabled \u2014 continuing with original`);
26679
+ } else if (decomposeConfig.trigger === "auto") {
26680
+ const result = await _routingDeps.runDecompose(ctx.story, ctx.prd, ctx.config, ctx.workdir);
26681
+ if (result.validation.valid) {
26682
+ _routingDeps.applyDecomposition(ctx.prd, result);
26683
+ if (ctx.prdPath) {
26684
+ await _routingDeps.savePRD(ctx.prd, ctx.prdPath);
26685
+ }
26686
+ logger.info("routing", `Story ${ctx.story.id} decomposed into ${result.subStories.length} substories`);
26687
+ return { action: "skip", reason: `Decomposed into ${result.subStories.length} substories` };
26688
+ }
26689
+ logger.warn("routing", `Story ${ctx.story.id} decompose failed after retries \u2014 continuing with original`, {
26690
+ errors: result.validation.errors
26691
+ });
26692
+ } else if (decomposeConfig.trigger === "confirm") {
26693
+ const action = await _routingDeps.checkStoryOversized({ featureName: ctx.prd.feature, storyId: ctx.story.id, criteriaCount: acCount }, ctx.config, ctx.interaction);
26694
+ if (action === "decompose") {
26695
+ const result = await _routingDeps.runDecompose(ctx.story, ctx.prd, ctx.config, ctx.workdir);
26696
+ if (result.validation.valid) {
26697
+ _routingDeps.applyDecomposition(ctx.prd, result);
26698
+ if (ctx.prdPath) {
26699
+ await _routingDeps.savePRD(ctx.prd, ctx.prdPath);
26700
+ }
26701
+ logger.info("routing", `Story ${ctx.story.id} decomposed into ${result.subStories.length} substories`);
26702
+ return { action: "skip", reason: `Decomposed into ${result.subStories.length} substories` };
26703
+ }
26704
+ logger.warn("routing", `Story ${ctx.story.id} decompose failed after retries \u2014 continuing with original`, {
26705
+ errors: result.validation.errors
26706
+ });
26707
+ }
26708
+ }
26709
+ }
26710
+ }
26014
26711
  return { action: "continue" };
26015
26712
  }
26016
26713
  };
@@ -26020,7 +26717,10 @@ var init_routing2 = __esm(() => {
26020
26717
  isGreenfieldStory,
26021
26718
  clearCache,
26022
26719
  savePRD,
26023
- computeStoryContentHash
26720
+ computeStoryContentHash,
26721
+ applyDecomposition,
26722
+ runDecompose,
26723
+ checkStoryOversized
26024
26724
  };
26025
26725
  });
26026
26726
 
@@ -27326,251 +28026,6 @@ var init_precheck = __esm(() => {
27326
28026
  };
27327
28027
  });
27328
28028
 
27329
- // src/execution/crash-recovery.ts
27330
- import { appendFileSync as appendFileSync2 } from "fs";
27331
- async function writeFatalLog(jsonlFilePath, signal, error48) {
27332
- if (!jsonlFilePath)
27333
- return;
27334
- try {
27335
- const fatalEntry = {
27336
- timestamp: new Date().toISOString(),
27337
- level: "error",
27338
- stage: "crash-recovery",
27339
- message: error48 ? `Uncaught exception: ${error48.message}` : `Process terminated by ${signal}`,
27340
- data: {
27341
- signal,
27342
- ...error48 && {
27343
- stack: error48.stack,
27344
- name: error48.name
27345
- }
27346
- }
27347
- };
27348
- const line = `${JSON.stringify(fatalEntry)}
27349
- `;
27350
- appendFileSync2(jsonlFilePath, line);
27351
- } catch (err) {
27352
- console.error("[crash-recovery] Failed to write fatal log:", err);
27353
- }
27354
- }
27355
- async function writeRunComplete(ctx, exitReason) {
27356
- if (!ctx.jsonlFilePath || !ctx.runId || !ctx.feature)
27357
- return;
27358
- const logger = getSafeLogger();
27359
- try {
27360
- const totalCost = ctx.getTotalCost();
27361
- const iterations = ctx.getIterations();
27362
- const startTime = ctx.getStartTime?.() ?? Date.now();
27363
- const durationMs = Date.now() - startTime;
27364
- const totalStories = ctx.getTotalStories?.() ?? 0;
27365
- const storiesCompleted = ctx.getStoriesCompleted?.() ?? 0;
27366
- const runCompleteEntry = {
27367
- timestamp: new Date().toISOString(),
27368
- level: "info",
27369
- stage: "run.complete",
27370
- message: "Feature execution terminated",
27371
- data: {
27372
- runId: ctx.runId,
27373
- feature: ctx.feature,
27374
- success: false,
27375
- exitReason,
27376
- totalCost,
27377
- iterations,
27378
- totalStories,
27379
- storiesCompleted,
27380
- durationMs
27381
- }
27382
- };
27383
- const line = `${JSON.stringify(runCompleteEntry)}
27384
- `;
27385
- appendFileSync2(ctx.jsonlFilePath, line);
27386
- logger?.debug("crash-recovery", "run.complete event written", { exitReason });
27387
- } catch (err) {
27388
- console.error("[crash-recovery] Failed to write run.complete event:", err);
27389
- }
27390
- }
27391
- async function updateStatusToCrashed(statusWriter, totalCost, iterations, signal, featureDir) {
27392
- try {
27393
- statusWriter.setRunStatus("crashed");
27394
- await statusWriter.update(totalCost, iterations, {
27395
- crashedAt: new Date().toISOString(),
27396
- crashSignal: signal
27397
- });
27398
- if (featureDir) {
27399
- await statusWriter.writeFeatureStatus(featureDir, totalCost, iterations, {
27400
- crashedAt: new Date().toISOString(),
27401
- crashSignal: signal
27402
- });
27403
- }
27404
- } catch (err) {
27405
- console.error("[crash-recovery] Failed to update status.json:", err);
27406
- }
27407
- }
27408
- function installCrashHandlers(ctx) {
27409
- if (handlersInstalled) {
27410
- return () => {};
27411
- }
27412
- const logger = getSafeLogger();
27413
- const handleSignal = async (signal) => {
27414
- const hardDeadline = setTimeout(() => {
27415
- process.exit(128 + getSignalNumber(signal));
27416
- }, 1e4);
27417
- if (hardDeadline.unref)
27418
- hardDeadline.unref();
27419
- logger?.error("crash-recovery", `Received ${signal}, shutting down...`, { signal });
27420
- if (ctx.pidRegistry) {
27421
- await ctx.pidRegistry.killAll();
27422
- }
27423
- ctx.emitError?.(signal.toLowerCase());
27424
- await writeFatalLog(ctx.jsonlFilePath, signal);
27425
- await writeRunComplete(ctx, signal.toLowerCase());
27426
- await updateStatusToCrashed(ctx.statusWriter, ctx.getTotalCost(), ctx.getIterations(), signal, ctx.featureDir);
27427
- stopHeartbeat();
27428
- clearTimeout(hardDeadline);
27429
- process.exit(128 + getSignalNumber(signal));
27430
- };
27431
- const sigtermHandler = () => handleSignal("SIGTERM");
27432
- const sigintHandler = () => handleSignal("SIGINT");
27433
- const sighupHandler = () => handleSignal("SIGHUP");
27434
- process.on("SIGTERM", sigtermHandler);
27435
- process.on("SIGINT", sigintHandler);
27436
- process.on("SIGHUP", sighupHandler);
27437
- const uncaughtExceptionHandler = async (error48) => {
27438
- logger?.error("crash-recovery", "Uncaught exception", {
27439
- error: error48.message,
27440
- stack: error48.stack
27441
- });
27442
- if (ctx.pidRegistry) {
27443
- await ctx.pidRegistry.killAll();
27444
- }
27445
- ctx.emitError?.("uncaughtException");
27446
- await writeFatalLog(ctx.jsonlFilePath, "uncaughtException", error48);
27447
- await updateStatusToCrashed(ctx.statusWriter, ctx.getTotalCost(), ctx.getIterations(), "uncaughtException", ctx.featureDir);
27448
- stopHeartbeat();
27449
- process.exit(1);
27450
- };
27451
- process.on("uncaughtException", uncaughtExceptionHandler);
27452
- const unhandledRejectionHandler = async (reason, promise2) => {
27453
- const error48 = reason instanceof Error ? reason : new Error(String(reason));
27454
- logger?.error("crash-recovery", "Unhandled promise rejection", {
27455
- error: error48.message,
27456
- stack: error48.stack
27457
- });
27458
- if (ctx.pidRegistry) {
27459
- await ctx.pidRegistry.killAll();
27460
- }
27461
- ctx.emitError?.("unhandledRejection");
27462
- await writeFatalLog(ctx.jsonlFilePath, "unhandledRejection", error48);
27463
- await updateStatusToCrashed(ctx.statusWriter, ctx.getTotalCost(), ctx.getIterations(), "unhandledRejection", ctx.featureDir);
27464
- stopHeartbeat();
27465
- process.exit(1);
27466
- };
27467
- process.on("unhandledRejection", unhandledRejectionHandler);
27468
- handlersInstalled = true;
27469
- logger?.debug("crash-recovery", "Crash handlers installed");
27470
- return () => {
27471
- process.removeListener("SIGTERM", sigtermHandler);
27472
- process.removeListener("SIGINT", sigintHandler);
27473
- process.removeListener("SIGHUP", sighupHandler);
27474
- process.removeListener("uncaughtException", uncaughtExceptionHandler);
27475
- process.removeListener("unhandledRejection", unhandledRejectionHandler);
27476
- handlersInstalled = false;
27477
- logger?.debug("crash-recovery", "Crash handlers unregistered");
27478
- };
27479
- }
27480
- function startHeartbeat(statusWriter, getTotalCost, getIterations, jsonlFilePath) {
27481
- const logger = getSafeLogger();
27482
- stopHeartbeat();
27483
- heartbeatTimer = setInterval(async () => {
27484
- logger?.debug("crash-recovery", "Heartbeat");
27485
- if (jsonlFilePath) {
27486
- try {
27487
- const heartbeatEntry = {
27488
- timestamp: new Date().toISOString(),
27489
- level: "debug",
27490
- stage: "heartbeat",
27491
- message: "Process alive",
27492
- data: {
27493
- pid: process.pid,
27494
- memoryUsageMB: Math.round(process.memoryUsage().heapUsed / 1024 / 1024)
27495
- }
27496
- };
27497
- const line = `${JSON.stringify(heartbeatEntry)}
27498
- `;
27499
- appendFileSync2(jsonlFilePath, line);
27500
- } catch (err) {
27501
- logger?.warn("crash-recovery", "Failed to write heartbeat", { error: err.message });
27502
- }
27503
- }
27504
- try {
27505
- await statusWriter.update(getTotalCost(), getIterations(), {
27506
- lastHeartbeat: new Date().toISOString()
27507
- });
27508
- } catch (err) {
27509
- logger?.warn("crash-recovery", "Failed to update status during heartbeat", {
27510
- error: err.message
27511
- });
27512
- }
27513
- }, 60000);
27514
- logger?.debug("crash-recovery", "Heartbeat started (60s interval)");
27515
- }
27516
- function stopHeartbeat() {
27517
- if (heartbeatTimer) {
27518
- clearInterval(heartbeatTimer);
27519
- heartbeatTimer = null;
27520
- getSafeLogger()?.debug("crash-recovery", "Heartbeat stopped");
27521
- }
27522
- }
27523
- async function writeExitSummary(jsonlFilePath, totalCost, iterations, storiesCompleted, durationMs) {
27524
- if (!jsonlFilePath)
27525
- return;
27526
- const logger = getSafeLogger();
27527
- try {
27528
- const summaryEntry = {
27529
- timestamp: new Date().toISOString(),
27530
- level: "info",
27531
- stage: "exit-summary",
27532
- message: "Run completed",
27533
- data: {
27534
- totalCost,
27535
- iterations,
27536
- storiesCompleted,
27537
- durationMs,
27538
- exitedCleanly: true
27539
- }
27540
- };
27541
- const line = `${JSON.stringify(summaryEntry)}
27542
- `;
27543
- appendFileSync2(jsonlFilePath, line);
27544
- logger?.debug("crash-recovery", "Exit summary written");
27545
- } catch (err) {
27546
- logger?.warn("crash-recovery", "Failed to write exit summary", { error: err.message });
27547
- }
27548
- }
27549
- function getSignalNumber(signal) {
27550
- const signalMap = {
27551
- SIGTERM: 15,
27552
- SIGINT: 2,
27553
- SIGHUP: 1
27554
- };
27555
- return signalMap[signal] ?? 15;
27556
- }
27557
- var heartbeatTimer = null, handlersInstalled = false;
27558
- var init_crash_recovery = __esm(() => {
27559
- init_logger2();
27560
- });
27561
-
27562
- // src/execution/escalation/escalation.ts
27563
- function escalateTier(currentTier, tierOrder) {
27564
- const currentIndex = tierOrder.findIndex((t) => t.tier === currentTier);
27565
- if (currentIndex === -1 || currentIndex === tierOrder.length - 1) {
27566
- return null;
27567
- }
27568
- return tierOrder[currentIndex + 1].tier;
27569
- }
27570
- function calculateMaxIterations(tierOrder) {
27571
- return tierOrder.reduce((sum, t) => sum + t.attempts, 0);
27572
- }
27573
-
27574
28029
  // src/hooks/runner.ts
27575
28030
  import { existsSync as existsSync25 } from "fs";
27576
28031
  import { join as join30 } from "path";
@@ -27746,6 +28201,251 @@ var init_hooks = __esm(() => {
27746
28201
  init_runner3();
27747
28202
  });
27748
28203
 
28204
+ // src/execution/crash-recovery.ts
28205
+ import { appendFileSync as appendFileSync2 } from "fs";
28206
+ async function writeFatalLog(jsonlFilePath, signal, error48) {
28207
+ if (!jsonlFilePath)
28208
+ return;
28209
+ try {
28210
+ const fatalEntry = {
28211
+ timestamp: new Date().toISOString(),
28212
+ level: "error",
28213
+ stage: "crash-recovery",
28214
+ message: error48 ? `Uncaught exception: ${error48.message}` : `Process terminated by ${signal}`,
28215
+ data: {
28216
+ signal,
28217
+ ...error48 && {
28218
+ stack: error48.stack,
28219
+ name: error48.name
28220
+ }
28221
+ }
28222
+ };
28223
+ const line = `${JSON.stringify(fatalEntry)}
28224
+ `;
28225
+ appendFileSync2(jsonlFilePath, line);
28226
+ } catch (err) {
28227
+ console.error("[crash-recovery] Failed to write fatal log:", err);
28228
+ }
28229
+ }
28230
+ async function writeRunComplete(ctx, exitReason) {
28231
+ if (!ctx.jsonlFilePath || !ctx.runId || !ctx.feature)
28232
+ return;
28233
+ const logger = getSafeLogger();
28234
+ try {
28235
+ const totalCost = ctx.getTotalCost();
28236
+ const iterations = ctx.getIterations();
28237
+ const startTime = ctx.getStartTime?.() ?? Date.now();
28238
+ const durationMs = Date.now() - startTime;
28239
+ const totalStories = ctx.getTotalStories?.() ?? 0;
28240
+ const storiesCompleted = ctx.getStoriesCompleted?.() ?? 0;
28241
+ const runCompleteEntry = {
28242
+ timestamp: new Date().toISOString(),
28243
+ level: "info",
28244
+ stage: "run.complete",
28245
+ message: "Feature execution terminated",
28246
+ data: {
28247
+ runId: ctx.runId,
28248
+ feature: ctx.feature,
28249
+ success: false,
28250
+ exitReason,
28251
+ totalCost,
28252
+ iterations,
28253
+ totalStories,
28254
+ storiesCompleted,
28255
+ durationMs
28256
+ }
28257
+ };
28258
+ const line = `${JSON.stringify(runCompleteEntry)}
28259
+ `;
28260
+ appendFileSync2(ctx.jsonlFilePath, line);
28261
+ logger?.debug("crash-recovery", "run.complete event written", { exitReason });
28262
+ } catch (err) {
28263
+ console.error("[crash-recovery] Failed to write run.complete event:", err);
28264
+ }
28265
+ }
28266
+ async function updateStatusToCrashed(statusWriter, totalCost, iterations, signal, featureDir) {
28267
+ try {
28268
+ statusWriter.setRunStatus("crashed");
28269
+ await statusWriter.update(totalCost, iterations, {
28270
+ crashedAt: new Date().toISOString(),
28271
+ crashSignal: signal
28272
+ });
28273
+ if (featureDir) {
28274
+ await statusWriter.writeFeatureStatus(featureDir, totalCost, iterations, {
28275
+ crashedAt: new Date().toISOString(),
28276
+ crashSignal: signal
28277
+ });
28278
+ }
28279
+ } catch (err) {
28280
+ console.error("[crash-recovery] Failed to update status.json:", err);
28281
+ }
28282
+ }
28283
+ function installCrashHandlers(ctx) {
28284
+ if (handlersInstalled) {
28285
+ return () => {};
28286
+ }
28287
+ const logger = getSafeLogger();
28288
+ const handleSignal = async (signal) => {
28289
+ const hardDeadline = setTimeout(() => {
28290
+ process.exit(128 + getSignalNumber(signal));
28291
+ }, 1e4);
28292
+ if (hardDeadline.unref)
28293
+ hardDeadline.unref();
28294
+ logger?.error("crash-recovery", `Received ${signal}, shutting down...`, { signal });
28295
+ if (ctx.pidRegistry) {
28296
+ await ctx.pidRegistry.killAll();
28297
+ }
28298
+ ctx.emitError?.(signal.toLowerCase());
28299
+ await writeFatalLog(ctx.jsonlFilePath, signal);
28300
+ await writeRunComplete(ctx, signal.toLowerCase());
28301
+ await updateStatusToCrashed(ctx.statusWriter, ctx.getTotalCost(), ctx.getIterations(), signal, ctx.featureDir);
28302
+ stopHeartbeat();
28303
+ clearTimeout(hardDeadline);
28304
+ process.exit(128 + getSignalNumber(signal));
28305
+ };
28306
+ const sigtermHandler = () => handleSignal("SIGTERM");
28307
+ const sigintHandler = () => handleSignal("SIGINT");
28308
+ const sighupHandler = () => handleSignal("SIGHUP");
28309
+ process.on("SIGTERM", sigtermHandler);
28310
+ process.on("SIGINT", sigintHandler);
28311
+ process.on("SIGHUP", sighupHandler);
28312
+ const uncaughtExceptionHandler = async (error48) => {
28313
+ logger?.error("crash-recovery", "Uncaught exception", {
28314
+ error: error48.message,
28315
+ stack: error48.stack
28316
+ });
28317
+ if (ctx.pidRegistry) {
28318
+ await ctx.pidRegistry.killAll();
28319
+ }
28320
+ ctx.emitError?.("uncaughtException");
28321
+ await writeFatalLog(ctx.jsonlFilePath, "uncaughtException", error48);
28322
+ await updateStatusToCrashed(ctx.statusWriter, ctx.getTotalCost(), ctx.getIterations(), "uncaughtException", ctx.featureDir);
28323
+ stopHeartbeat();
28324
+ process.exit(1);
28325
+ };
28326
+ process.on("uncaughtException", uncaughtExceptionHandler);
28327
+ const unhandledRejectionHandler = async (reason, promise2) => {
28328
+ const error48 = reason instanceof Error ? reason : new Error(String(reason));
28329
+ logger?.error("crash-recovery", "Unhandled promise rejection", {
28330
+ error: error48.message,
28331
+ stack: error48.stack
28332
+ });
28333
+ if (ctx.pidRegistry) {
28334
+ await ctx.pidRegistry.killAll();
28335
+ }
28336
+ ctx.emitError?.("unhandledRejection");
28337
+ await writeFatalLog(ctx.jsonlFilePath, "unhandledRejection", error48);
28338
+ await updateStatusToCrashed(ctx.statusWriter, ctx.getTotalCost(), ctx.getIterations(), "unhandledRejection", ctx.featureDir);
28339
+ stopHeartbeat();
28340
+ process.exit(1);
28341
+ };
28342
+ process.on("unhandledRejection", unhandledRejectionHandler);
28343
+ handlersInstalled = true;
28344
+ logger?.debug("crash-recovery", "Crash handlers installed");
28345
+ return () => {
28346
+ process.removeListener("SIGTERM", sigtermHandler);
28347
+ process.removeListener("SIGINT", sigintHandler);
28348
+ process.removeListener("SIGHUP", sighupHandler);
28349
+ process.removeListener("uncaughtException", uncaughtExceptionHandler);
28350
+ process.removeListener("unhandledRejection", unhandledRejectionHandler);
28351
+ handlersInstalled = false;
28352
+ logger?.debug("crash-recovery", "Crash handlers unregistered");
28353
+ };
28354
+ }
28355
+ function startHeartbeat(statusWriter, getTotalCost, getIterations, jsonlFilePath) {
28356
+ const logger = getSafeLogger();
28357
+ stopHeartbeat();
28358
+ heartbeatTimer = setInterval(async () => {
28359
+ logger?.debug("crash-recovery", "Heartbeat");
28360
+ if (jsonlFilePath) {
28361
+ try {
28362
+ const heartbeatEntry = {
28363
+ timestamp: new Date().toISOString(),
28364
+ level: "debug",
28365
+ stage: "heartbeat",
28366
+ message: "Process alive",
28367
+ data: {
28368
+ pid: process.pid,
28369
+ memoryUsageMB: Math.round(process.memoryUsage().heapUsed / 1024 / 1024)
28370
+ }
28371
+ };
28372
+ const line = `${JSON.stringify(heartbeatEntry)}
28373
+ `;
28374
+ appendFileSync2(jsonlFilePath, line);
28375
+ } catch (err) {
28376
+ logger?.warn("crash-recovery", "Failed to write heartbeat", { error: err.message });
28377
+ }
28378
+ }
28379
+ try {
28380
+ await statusWriter.update(getTotalCost(), getIterations(), {
28381
+ lastHeartbeat: new Date().toISOString()
28382
+ });
28383
+ } catch (err) {
28384
+ logger?.warn("crash-recovery", "Failed to update status during heartbeat", {
28385
+ error: err.message
28386
+ });
28387
+ }
28388
+ }, 60000);
28389
+ logger?.debug("crash-recovery", "Heartbeat started (60s interval)");
28390
+ }
28391
+ function stopHeartbeat() {
28392
+ if (heartbeatTimer) {
28393
+ clearInterval(heartbeatTimer);
28394
+ heartbeatTimer = null;
28395
+ getSafeLogger()?.debug("crash-recovery", "Heartbeat stopped");
28396
+ }
28397
+ }
28398
+ async function writeExitSummary(jsonlFilePath, totalCost, iterations, storiesCompleted, durationMs) {
28399
+ if (!jsonlFilePath)
28400
+ return;
28401
+ const logger = getSafeLogger();
28402
+ try {
28403
+ const summaryEntry = {
28404
+ timestamp: new Date().toISOString(),
28405
+ level: "info",
28406
+ stage: "exit-summary",
28407
+ message: "Run completed",
28408
+ data: {
28409
+ totalCost,
28410
+ iterations,
28411
+ storiesCompleted,
28412
+ durationMs,
28413
+ exitedCleanly: true
28414
+ }
28415
+ };
28416
+ const line = `${JSON.stringify(summaryEntry)}
28417
+ `;
28418
+ appendFileSync2(jsonlFilePath, line);
28419
+ logger?.debug("crash-recovery", "Exit summary written");
28420
+ } catch (err) {
28421
+ logger?.warn("crash-recovery", "Failed to write exit summary", { error: err.message });
28422
+ }
28423
+ }
28424
+ function getSignalNumber(signal) {
28425
+ const signalMap = {
28426
+ SIGTERM: 15,
28427
+ SIGINT: 2,
28428
+ SIGHUP: 1
28429
+ };
28430
+ return signalMap[signal] ?? 15;
28431
+ }
28432
+ var heartbeatTimer = null, handlersInstalled = false;
28433
+ var init_crash_recovery = __esm(() => {
28434
+ init_logger2();
28435
+ });
28436
+
28437
+ // src/execution/escalation/escalation.ts
28438
+ function escalateTier(currentTier, tierOrder) {
28439
+ const currentIndex = tierOrder.findIndex((t) => t.tier === currentTier);
28440
+ if (currentIndex === -1 || currentIndex === tierOrder.length - 1) {
28441
+ return null;
28442
+ }
28443
+ return tierOrder[currentIndex + 1].tier;
28444
+ }
28445
+ function calculateMaxIterations(tierOrder) {
28446
+ return tierOrder.reduce((sum, t) => sum + t.attempts, 0);
28447
+ }
28448
+
27749
28449
  // src/execution/escalation/tier-outcome.ts
27750
28450
  async function handleNoTierAvailable(ctx, failureCategory) {
27751
28451
  const logger = getSafeLogger();
@@ -27837,12 +28537,13 @@ var init_tier_outcome = __esm(() => {
27837
28537
  });
27838
28538
 
27839
28539
  // src/execution/escalation/tier-escalation.ts
27840
- function buildEscalationFailure(story, currentTier) {
28540
+ function buildEscalationFailure(story, currentTier, reviewFindings) {
27841
28541
  return {
27842
28542
  attempt: (story.attempts ?? 0) + 1,
27843
28543
  modelTier: currentTier,
27844
28544
  stage: "escalation",
27845
28545
  summary: `Failed with tier ${currentTier}, escalating to next tier`,
28546
+ reviewFindings: reviewFindings && reviewFindings.length > 0 ? reviewFindings : undefined,
27846
28547
  timestamp: new Date().toISOString()
27847
28548
  };
27848
28549
  }
@@ -27885,6 +28586,7 @@ async function handleTierEscalation(ctx) {
27885
28586
  const storiesToEscalate = ctx.isBatchExecution && escalateWholeBatch ? ctx.storiesToExecute : [ctx.story];
27886
28587
  const escalateRetryAsLite = ctx.pipelineResult.context.retryAsLite === true;
27887
28588
  const escalateFailureCategory = ctx.pipelineResult.context.tddFailureCategory;
28589
+ const escalateReviewFindings = ctx.pipelineResult.context.reviewFindings;
27888
28590
  const escalateRetryAsTestAfter = escalateFailureCategory === "greenfield-no-tests";
27889
28591
  const routingMode = ctx.config.routing.llm?.mode ?? "hybrid";
27890
28592
  if (!nextTier || !ctx.config.autoMode.escalation.enabled) {
@@ -27932,7 +28634,7 @@ async function handleTierEscalation(ctx) {
27932
28634
  const currentStoryTier = s.routing?.modelTier ?? ctx.routing.modelTier;
27933
28635
  const isChangingTier = currentStoryTier !== nextTier;
27934
28636
  const shouldResetAttempts = isChangingTier || shouldSwitchToTestAfter;
27935
- const escalationFailure = buildEscalationFailure(s, currentStoryTier);
28637
+ const escalationFailure = buildEscalationFailure(s, currentStoryTier, escalateReviewFindings);
27936
28638
  return {
27937
28639
  ...s,
27938
28640
  attempts: shouldResetAttempts ? 0 : (s.attempts ?? 0) + 1,
@@ -29226,7 +29928,8 @@ var init_parallel_lifecycle = __esm(() => {
29226
29928
  // src/execution/parallel-executor.ts
29227
29929
  var exports_parallel_executor = {};
29228
29930
  __export(exports_parallel_executor, {
29229
- runParallelExecution: () => runParallelExecution
29931
+ runParallelExecution: () => runParallelExecution,
29932
+ _parallelExecutorDeps: () => _parallelExecutorDeps
29230
29933
  });
29231
29934
  import * as os5 from "os";
29232
29935
  import path15 from "path";
@@ -29303,7 +30006,8 @@ async function runParallelExecution(options, initialPrd) {
29303
30006
  feature,
29304
30007
  totalCost
29305
30008
  });
29306
- await fireHook(hooks, "on-complete", hookCtx(feature, { status: "complete", cost: totalCost }), workdir);
30009
+ await _parallelExecutorDeps.fireHook(hooks, "on-all-stories-complete", hookCtx(feature, { status: "passed", cost: totalCost }), workdir);
30010
+ await _parallelExecutorDeps.fireHook(hooks, "on-complete", hookCtx(feature, { status: "complete", cost: totalCost }), workdir);
29307
30011
  const durationMs = Date.now() - startTime;
29308
30012
  const runCompletedAt = new Date().toISOString();
29309
30013
  const { handleParallelCompletion: handleParallelCompletion2 } = await Promise.resolve().then(() => (init_parallel_lifecycle(), exports_parallel_lifecycle));
@@ -29351,12 +30055,16 @@ async function runParallelExecution(options, initialPrd) {
29351
30055
  }
29352
30056
  return { prd, totalCost, storiesCompleted, completed: false };
29353
30057
  }
30058
+ var _parallelExecutorDeps;
29354
30059
  var init_parallel_executor = __esm(() => {
29355
30060
  init_hooks();
29356
30061
  init_logger2();
29357
30062
  init_prd();
29358
30063
  init_helpers();
29359
30064
  init_parallel();
30065
+ _parallelExecutorDeps = {
30066
+ fireHook
30067
+ };
29360
30068
  });
29361
30069
 
29362
30070
  // src/pipeline/subscribers/events-writer.ts
@@ -29746,11 +30454,13 @@ async function handlePipelineSuccess(ctx, pipelineResult) {
29746
30454
  }
29747
30455
  const storiesCompletedDelta = ctx.storiesToExecute.length;
29748
30456
  for (const completedStory of ctx.storiesToExecute) {
30457
+ const now = Date.now();
29749
30458
  logger?.info("story.complete", "Story completed successfully", {
29750
30459
  storyId: completedStory.id,
29751
30460
  storyTitle: completedStory.title,
29752
30461
  totalCost: ctx.totalCost + costDelta,
29753
- durationMs: Date.now() - ctx.startTime
30462
+ durationMs: now - ctx.startTime,
30463
+ storyDurationMs: ctx.storyStartTime ? now - ctx.storyStartTime : undefined
29754
30464
  });
29755
30465
  pipelineEventBus.emit({
29756
30466
  type: "story:completed",
@@ -29771,7 +30481,8 @@ async function handlePipelineSuccess(ctx, pipelineResult) {
29771
30481
  pendingStories: updatedCounts.pending,
29772
30482
  totalCost: ctx.totalCost + costDelta,
29773
30483
  costLimit: ctx.config.execution.costLimit,
29774
- elapsedMs: Date.now() - ctx.startTime
30484
+ elapsedMs: Date.now() - ctx.startTime,
30485
+ storyDurationMs: ctx.storyStartTime ? Date.now() - ctx.storyStartTime : undefined
29775
30486
  });
29776
30487
  return { storiesCompletedDelta, costDelta, prd, prdDirty: true };
29777
30488
  }
@@ -29877,6 +30588,7 @@ async function runIteration(ctx, prd, selection, iterations, totalCost, allStory
29877
30588
  prdDirty: dryRunResult.prdDirty
29878
30589
  };
29879
30590
  }
30591
+ const storyStartTime = Date.now();
29880
30592
  const storyGitRef = await captureGitRef(ctx.workdir);
29881
30593
  const pipelineContext = {
29882
30594
  config: ctx.config,
@@ -29924,7 +30636,8 @@ async function runIteration(ctx, prd, selection, iterations, totalCost, allStory
29924
30636
  isBatchExecution,
29925
30637
  allStoryMetrics,
29926
30638
  storyGitRef,
29927
- interactionChain: ctx.interactionChain
30639
+ interactionChain: ctx.interactionChain,
30640
+ storyStartTime
29928
30641
  };
29929
30642
  if (pipelineResult.success) {
29930
30643
  const r2 = await handlePipelineSuccess(handlerCtx, pipelineResult);
@@ -30053,14 +30766,6 @@ async function executeSequential(ctx, initialPrd) {
30053
30766
  return buildResult("pre-merge-aborted");
30054
30767
  }
30055
30768
  }
30056
- pipelineEventBus.emit({
30057
- type: "run:completed",
30058
- totalStories: 0,
30059
- passedStories: 0,
30060
- failedStories: 0,
30061
- durationMs: Date.now() - ctx.startTime,
30062
- totalCost
30063
- });
30064
30769
  return buildResult("completed");
30065
30770
  }
30066
30771
  const selected = selectNextStories(prd, ctx.config, ctx.batchPlan, currentBatchIndex, lastStoryId, ctx.useBatch);
@@ -30129,10 +30834,7 @@ async function executeSequential(ctx, initialPrd) {
30129
30834
  logger?.info("execution", "Running post-run pipeline (acceptance tests)");
30130
30835
  await runPipeline(postRunPipeline, { config: ctx.config, prd, workdir: ctx.workdir, story: prd.userStories[0] }, ctx.eventEmitter);
30131
30836
  return buildResult("max-iterations");
30132
- } finally {
30133
- stopHeartbeat();
30134
- writeExitSummary(ctx.logFilePath, totalCost, iterations, storiesCompleted, Date.now() - ctx.startTime);
30135
- }
30837
+ } finally {}
30136
30838
  }
30137
30839
  var init_sequential_executor = __esm(() => {
30138
30840
  init_triggers();
@@ -30547,6 +31249,16 @@ __export(exports_run_completion, {
30547
31249
  handleRunCompletion: () => handleRunCompletion,
30548
31250
  _runCompletionDeps: () => _runCompletionDeps
30549
31251
  });
31252
+ function shouldSkipDeferredRegression(allStoryMetrics, isSequential) {
31253
+ const effectiveSequential = isSequential !== false;
31254
+ if (!effectiveSequential) {
31255
+ return false;
31256
+ }
31257
+ if (allStoryMetrics.length === 0) {
31258
+ return false;
31259
+ }
31260
+ return allStoryMetrics.every((m) => m.fullSuiteGatePassed === true);
31261
+ }
30550
31262
  async function handleRunCompletion(options) {
30551
31263
  const logger = getSafeLogger();
30552
31264
  const {
@@ -30561,23 +31273,56 @@ async function handleRunCompletion(options) {
30561
31273
  startTime,
30562
31274
  workdir,
30563
31275
  statusWriter,
30564
- config: config2
31276
+ config: config2,
31277
+ hooksConfig,
31278
+ isSequential
30565
31279
  } = options;
30566
31280
  const regressionMode = config2.execution.regressionGate?.mode;
30567
31281
  if (regressionMode === "deferred" && config2.quality.commands.test) {
30568
- const regressionResult = await _runCompletionDeps.runDeferredRegression({
30569
- config: config2,
30570
- prd,
30571
- workdir
30572
- });
30573
- logger?.info("regression", "Deferred regression gate completed", {
30574
- success: regressionResult.success,
30575
- failedTests: regressionResult.failedTests,
30576
- affectedStories: regressionResult.affectedStories
30577
- });
31282
+ if (shouldSkipDeferredRegression(allStoryMetrics, isSequential)) {
31283
+ logger?.info("regression", "Smart-skip: skipping deferred regression (all stories passed full-suite gate in sequential mode)");
31284
+ } else {
31285
+ const regressionResult = await _runCompletionDeps.runDeferredRegression({
31286
+ config: config2,
31287
+ prd,
31288
+ workdir
31289
+ });
31290
+ logger?.info("regression", "Deferred regression gate completed", {
31291
+ success: regressionResult.success,
31292
+ failedTests: regressionResult.failedTests,
31293
+ affectedStories: regressionResult.affectedStories
31294
+ });
31295
+ if (!regressionResult.success) {
31296
+ for (const storyId of regressionResult.affectedStories) {
31297
+ const story = prd.userStories.find((s) => s.id === storyId);
31298
+ if (story) {
31299
+ story.status = "regression-failed";
31300
+ }
31301
+ }
31302
+ statusWriter.setRunStatus("failed");
31303
+ if (hooksConfig) {
31304
+ await _runCompletionDeps.fireHook(hooksConfig, "on-final-regression-fail", {
31305
+ event: "on-final-regression-fail",
31306
+ feature,
31307
+ status: "failed",
31308
+ failedTests: regressionResult.failedTests,
31309
+ affectedStories: regressionResult.affectedStories
31310
+ }, workdir);
31311
+ }
31312
+ }
31313
+ }
30578
31314
  }
30579
31315
  const durationMs = Date.now() - startTime;
30580
31316
  const runCompletedAt = new Date().toISOString();
31317
+ const finalCounts = countStories(prd);
31318
+ pipelineEventBus.emit({
31319
+ type: "run:completed",
31320
+ totalStories: finalCounts.total,
31321
+ passedStories: finalCounts.passed,
31322
+ failedStories: finalCounts.failed,
31323
+ durationMs,
31324
+ totalCost
31325
+ });
30581
31326
  const runMetrics = {
30582
31327
  runId,
30583
31328
  feature,
@@ -30586,12 +31331,15 @@ async function handleRunCompletion(options) {
30586
31331
  totalCost,
30587
31332
  totalStories: allStoryMetrics.length,
30588
31333
  storiesCompleted,
30589
- storiesFailed: countStories(prd).failed,
31334
+ storiesFailed: finalCounts.failed,
30590
31335
  totalDurationMs: durationMs,
30591
31336
  stories: allStoryMetrics
30592
31337
  };
30593
- await saveRunMetrics(workdir, runMetrics);
30594
- const finalCounts = countStories(prd);
31338
+ try {
31339
+ await saveRunMetrics(workdir, runMetrics);
31340
+ } catch (err) {
31341
+ logger?.warn("run.complete", "Failed to save run metrics", { error: String(err) });
31342
+ }
30595
31343
  const storyMetricsSummary = allStoryMetrics.map((sm) => ({
30596
31344
  storyId: sm.storyId,
30597
31345
  complexity: sm.complexity,
@@ -30635,12 +31383,15 @@ async function handleRunCompletion(options) {
30635
31383
  }
30636
31384
  var _runCompletionDeps;
30637
31385
  var init_run_completion = __esm(() => {
31386
+ init_runner3();
30638
31387
  init_logger2();
30639
31388
  init_metrics();
31389
+ init_event_bus();
30640
31390
  init_prd();
30641
31391
  init_run_regression();
30642
31392
  _runCompletionDeps = {
30643
- runDeferredRegression
31393
+ runDeferredRegression,
31394
+ fireHook
30644
31395
  };
30645
31396
  });
30646
31397
 
@@ -61663,7 +62414,9 @@ function detectTestPatterns(workdir, dependencies, devDependencies) {
61663
62414
 
61664
62415
  // src/cli/analyze.ts
61665
62416
  init_schema();
62417
+ init_builder2();
61666
62418
  init_logger2();
62419
+ init_prd();
61667
62420
  init_routing();
61668
62421
  init_version();
61669
62422
 
@@ -64013,7 +64766,14 @@ var FIELD_DESCRIPTIONS = {
64013
64766
  "prompts.overrides.test-writer": 'Path to custom test-writer prompt (e.g., ".nax/prompts/test-writer.md")',
64014
64767
  "prompts.overrides.implementer": 'Path to custom implementer prompt (e.g., ".nax/prompts/implementer.md")',
64015
64768
  "prompts.overrides.verifier": 'Path to custom verifier prompt (e.g., ".nax/prompts/verifier.md")',
64016
- "prompts.overrides.single-session": 'Path to custom single-session prompt (e.g., ".nax/prompts/single-session.md")'
64769
+ "prompts.overrides.single-session": 'Path to custom single-session prompt (e.g., ".nax/prompts/single-session.md")',
64770
+ decompose: "Story decomposition configuration (SD-003)",
64771
+ "decompose.trigger": "Decomposition trigger mode: auto | confirm | disabled",
64772
+ "decompose.maxAcceptanceCriteria": "Max acceptance criteria before flagging as oversized (default: 6)",
64773
+ "decompose.maxSubstories": "Max number of substories to generate (default: 5)",
64774
+ "decompose.maxSubstoryComplexity": "Max complexity for any generated substory (default: 'medium')",
64775
+ "decompose.maxRetries": "Max retries on decomposition validation failure (default: 2)",
64776
+ "decompose.model": "Model tier for decomposition LLM calls (default: 'balanced')"
64017
64777
  };
64018
64778
  async function loadConfigFile(path13) {
64019
64779
  if (!existsSync20(path13))
@@ -64782,6 +65542,7 @@ async function unlockCommand(options) {
64782
65542
  init_config();
64783
65543
 
64784
65544
  // src/execution/runner.ts
65545
+ init_hooks();
64785
65546
  init_logger2();
64786
65547
  init_prd();
64787
65548
  init_batch_route();
@@ -64826,8 +65587,12 @@ function precomputeBatchPlan(stories, maxBatchSize = DEFAULT_MAX_BATCH_SIZE) {
64826
65587
  // src/execution/runner.ts
64827
65588
  init_crash_recovery();
64828
65589
  init_helpers();
65590
+ init_story_context();
64829
65591
  init_escalation();
64830
65592
  init_escalation();
65593
+ var _runnerDeps = {
65594
+ fireHook
65595
+ };
64831
65596
  async function run(options) {
64832
65597
  const {
64833
65598
  prdPath,
@@ -65009,6 +65774,9 @@ async function run(options) {
65009
65774
  iterations = acceptanceResult.iterations;
65010
65775
  storiesCompleted = acceptanceResult.storiesCompleted;
65011
65776
  }
65777
+ if (isComplete(prd)) {
65778
+ await _runnerDeps.fireHook(hooks, "on-all-stories-complete", hookCtx(feature, { status: "passed", cost: totalCost }), workdir);
65779
+ }
65012
65780
  const { handleRunCompletion: handleRunCompletion2 } = await Promise.resolve().then(() => (init_run_completion(), exports_run_completion));
65013
65781
  const completionResult = await handleRunCompletion2({
65014
65782
  runId,