@nathapp/nax 0.58.4 → 0.59.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/nax.js +814 -696
  2. package/package.json +1 -1
package/dist/nax.js CHANGED
@@ -3527,6 +3527,8 @@ async function writePromptAudit(entry) {
3527
3527
  let baseDir;
3528
3528
  if (entry.auditDir) {
3529
3529
  baseDir = isAbsolute(entry.auditDir) ? entry.auditDir : join(entry.workdir, entry.auditDir);
3530
+ } else if (entry.projectDir) {
3531
+ baseDir = join(entry.projectDir, ".nax", "prompt-audit");
3530
3532
  } else {
3531
3533
  const wtMarker = `${sep}.nax-wt${sep}`;
3532
3534
  const wtIdx = entry.workdir.indexOf(wtMarker);
@@ -19186,6 +19188,7 @@ class AcpAgentAdapter {
19186
19188
  prompt: currentPrompt,
19187
19189
  sessionName,
19188
19190
  workdir: options.workdir,
19191
+ projectDir: options.projectDir,
19189
19192
  auditDir: _runAuditConfig.agent.promptAudit.dir,
19190
19193
  storyId: options.storyId,
19191
19194
  featureName: options.featureName,
@@ -19288,7 +19291,7 @@ class AcpAgentAdapter {
19288
19291
  };
19289
19292
  }
19290
19293
  async complete(prompt, _options) {
19291
- const timeoutMs = _options?.timeoutMs || 120000;
19294
+ const timeoutMs = _options?.timeoutMs ?? 120000;
19292
19295
  const permissionMode = resolvePermissions(_options?.config, "complete").mode;
19293
19296
  const workdir = _options?.workdir;
19294
19297
  const config2 = _options?.config;
@@ -19521,6 +19524,9 @@ class AcpAgentAdapter {
19521
19524
  }
19522
19525
  return { stories };
19523
19526
  }
19527
+ clearUnavailableAgents() {
19528
+ this._unavailableAgents.clear();
19529
+ }
19524
19530
  markUnavailable(agentName) {
19525
19531
  this._unavailableAgents.add(agentName);
19526
19532
  }
@@ -20089,6 +20095,75 @@ var init_interactive = __esm(() => {
20089
20095
  init_execution();
20090
20096
  });
20091
20097
 
20098
+ // src/config/path-security.ts
20099
+ import { existsSync as existsSync2, lstatSync, realpathSync } from "fs";
20100
+ import { basename, isAbsolute as isAbsolute3, normalize, resolve as resolve2 } from "path";
20101
+ function validateDirectory(dirPath, baseDir) {
20102
+ const resolved = resolve2(dirPath);
20103
+ if (!existsSync2(resolved)) {
20104
+ throw new Error(`Directory does not exist: ${dirPath}`);
20105
+ }
20106
+ let realPath;
20107
+ try {
20108
+ realPath = realpathSync(resolved);
20109
+ } catch (error48) {
20110
+ throw new Error(`Failed to resolve path: ${dirPath} (${error48.message})`);
20111
+ }
20112
+ try {
20113
+ const stats = lstatSync(realPath);
20114
+ if (!stats.isDirectory()) {
20115
+ throw new Error(`Not a directory: ${dirPath}`);
20116
+ }
20117
+ } catch (error48) {
20118
+ throw new Error(`Failed to stat path: ${dirPath} (${error48.message})`);
20119
+ }
20120
+ if (baseDir) {
20121
+ const resolvedBase = resolve2(baseDir);
20122
+ const realBase = existsSync2(resolvedBase) ? realpathSync(resolvedBase) : resolvedBase;
20123
+ if (!isWithinDirectory(realPath, realBase)) {
20124
+ throw new Error(`Path is outside allowed directory: ${dirPath} (resolved to ${realPath}, base: ${realBase})`);
20125
+ }
20126
+ }
20127
+ return realPath;
20128
+ }
20129
+ function isWithinDirectory(targetPath, basePath) {
20130
+ const normalizedTarget = normalize(targetPath);
20131
+ const normalizedBase = normalize(basePath);
20132
+ if (!isAbsolute3(normalizedTarget) || !isAbsolute3(normalizedBase)) {
20133
+ return false;
20134
+ }
20135
+ const baseWithSlash = normalizedBase.endsWith("/") ? normalizedBase : `${normalizedBase}/`;
20136
+ const targetWithSlash = normalizedTarget.endsWith("/") ? normalizedTarget : `${normalizedTarget}/`;
20137
+ return targetWithSlash.startsWith(baseWithSlash) || normalizedTarget === normalizedBase;
20138
+ }
20139
+ function validateFilePath(filePath, baseDir) {
20140
+ const resolved = resolve2(filePath);
20141
+ let realPath;
20142
+ try {
20143
+ if (!existsSync2(resolved)) {
20144
+ const parent = resolve2(resolved, "..");
20145
+ if (existsSync2(parent)) {
20146
+ const realParent = realpathSync(parent);
20147
+ realPath = resolve2(realParent, basename(resolved));
20148
+ } else {
20149
+ realPath = resolved;
20150
+ }
20151
+ } else {
20152
+ realPath = realpathSync(resolved);
20153
+ }
20154
+ } catch (error48) {
20155
+ throw new Error(`Failed to resolve path: ${filePath} (${error48.message})`);
20156
+ }
20157
+ const resolvedBase = resolve2(baseDir);
20158
+ const realBase = existsSync2(resolvedBase) ? realpathSync(resolvedBase) : resolvedBase;
20159
+ if (!isWithinDirectory(realPath, realBase)) {
20160
+ throw new Error(`Path is outside allowed directory: ${filePath} (resolved to ${realPath}, base: ${realBase})`);
20161
+ }
20162
+ return realPath;
20163
+ }
20164
+ var MAX_DIRECTORY_DEPTH = 10;
20165
+ var init_path_security = () => {};
20166
+
20092
20167
  // src/agents/shared/model-resolution.ts
20093
20168
  var exports_model_resolution = {};
20094
20169
  __export(exports_model_resolution, {
@@ -20114,7 +20189,7 @@ var init_model_resolution = __esm(() => {
20114
20189
  // src/agents/claude/plan.ts
20115
20190
  import { mkdtempSync, rmSync } from "fs";
20116
20191
  import { tmpdir } from "os";
20117
- import { join as join3 } from "path";
20192
+ import { join as join3, resolve as resolve3 } from "path";
20118
20193
  function buildPlanCommand(binary, options) {
20119
20194
  const cmd = [binary, "--permission-mode", "plan"];
20120
20195
  let modelDef = options.modelDef;
@@ -20134,17 +20209,12 @@ function buildPlanCommand(binary, options) {
20134
20209
 
20135
20210
  ${options.prompt}`;
20136
20211
  }
20137
- if (options.inputFile) {
20138
- try {
20139
- const inputContent = __require("fs").readFileSync(__require("path").resolve(options.workdir, options.inputFile), "utf-8");
20140
- fullPrompt = `${fullPrompt}
20212
+ if (options.resolvedInputContent) {
20213
+ fullPrompt = `${fullPrompt}
20141
20214
 
20142
20215
  ## Input Requirements
20143
20216
 
20144
- ${inputContent}`;
20145
- } catch (error48) {
20146
- throw new Error(`Failed to read input file ${options.inputFile}: ${error48.message}`);
20147
- }
20217
+ ${options.resolvedInputContent}`;
20148
20218
  }
20149
20219
  if (!options.interactive) {
20150
20220
  cmd.push("-p", fullPrompt);
@@ -20155,7 +20225,13 @@ ${inputContent}`;
20155
20225
  }
20156
20226
  async function runPlan(binary, options, pidRegistry) {
20157
20227
  const { resolveBalancedModelDef: resolveBalancedModelDef2 } = await Promise.resolve().then(() => (init_model_resolution(), exports_model_resolution));
20158
- const cmd = buildPlanCommand(binary, options);
20228
+ let resolvedOptions = options;
20229
+ if (options.inputFile) {
20230
+ const inputPath = validateFilePath(resolve3(options.workdir, options.inputFile), options.workdir);
20231
+ const resolvedInputContent = await Bun.file(inputPath).text();
20232
+ resolvedOptions = { ...options, resolvedInputContent };
20233
+ }
20234
+ const cmd = buildPlanCommand(binary, resolvedOptions);
20159
20235
  let modelDef = options.modelDef;
20160
20236
  if (!modelDef) {
20161
20237
  if (!options.config) {
@@ -20224,6 +20300,7 @@ async function runPlan(binary, options, pidRegistry) {
20224
20300
  }
20225
20301
  }
20226
20302
  var init_plan = __esm(() => {
20303
+ init_path_security();
20227
20304
  init_timeout_handler();
20228
20305
  init_logger2();
20229
20306
  init_env();
@@ -20376,8 +20453,8 @@ class ClaudeCodeAdapter {
20376
20453
  let stdoutTimeoutId;
20377
20454
  const stdout = await Promise.race([
20378
20455
  new Response(proc.stdout).text(),
20379
- new Promise((resolve2) => {
20380
- stdoutTimeoutId = setTimeout(() => resolve2(""), 5000);
20456
+ new Promise((resolve4) => {
20457
+ stdoutTimeoutId = setTimeout(() => resolve4(""), 5000);
20381
20458
  })
20382
20459
  ]);
20383
20460
  clearTimeout(stdoutTimeoutId);
@@ -20725,7 +20802,12 @@ function createAgentRegistry(config2) {
20725
20802
  installed: await agent.isInstalled()
20726
20803
  })));
20727
20804
  }
20728
- return { getAgent: getAgent2, getInstalledAgents: getInstalledAgents2, checkAgentHealth: checkAgentHealth2, protocol };
20805
+ function resetStoryState() {
20806
+ for (const adapter of acpCache.values()) {
20807
+ adapter.clearUnavailableAgents();
20808
+ }
20809
+ }
20810
+ return { getAgent: getAgent2, getInstalledAgents: getInstalledAgents2, checkAgentHealth: checkAgentHealth2, protocol, resetStoryState };
20729
20811
  }
20730
20812
  var ALL_AGENTS;
20731
20813
  var init_registry = __esm(() => {
@@ -20745,75 +20827,6 @@ var init_registry = __esm(() => {
20745
20827
  ];
20746
20828
  });
20747
20829
 
20748
- // src/config/path-security.ts
20749
- import { existsSync as existsSync3, lstatSync, realpathSync } from "fs";
20750
- import { basename, isAbsolute as isAbsolute3, normalize, resolve as resolve2 } from "path";
20751
- function validateDirectory(dirPath, baseDir) {
20752
- const resolved = resolve2(dirPath);
20753
- if (!existsSync3(resolved)) {
20754
- throw new Error(`Directory does not exist: ${dirPath}`);
20755
- }
20756
- let realPath;
20757
- try {
20758
- realPath = realpathSync(resolved);
20759
- } catch (error48) {
20760
- throw new Error(`Failed to resolve path: ${dirPath} (${error48.message})`);
20761
- }
20762
- try {
20763
- const stats = lstatSync(realPath);
20764
- if (!stats.isDirectory()) {
20765
- throw new Error(`Not a directory: ${dirPath}`);
20766
- }
20767
- } catch (error48) {
20768
- throw new Error(`Failed to stat path: ${dirPath} (${error48.message})`);
20769
- }
20770
- if (baseDir) {
20771
- const resolvedBase = resolve2(baseDir);
20772
- const realBase = existsSync3(resolvedBase) ? realpathSync(resolvedBase) : resolvedBase;
20773
- if (!isWithinDirectory(realPath, realBase)) {
20774
- throw new Error(`Path is outside allowed directory: ${dirPath} (resolved to ${realPath}, base: ${realBase})`);
20775
- }
20776
- }
20777
- return realPath;
20778
- }
20779
- function isWithinDirectory(targetPath, basePath) {
20780
- const normalizedTarget = normalize(targetPath);
20781
- const normalizedBase = normalize(basePath);
20782
- if (!isAbsolute3(normalizedTarget) || !isAbsolute3(normalizedBase)) {
20783
- return false;
20784
- }
20785
- const baseWithSlash = normalizedBase.endsWith("/") ? normalizedBase : `${normalizedBase}/`;
20786
- const targetWithSlash = normalizedTarget.endsWith("/") ? normalizedTarget : `${normalizedTarget}/`;
20787
- return targetWithSlash.startsWith(baseWithSlash) || normalizedTarget === normalizedBase;
20788
- }
20789
- function validateFilePath(filePath, baseDir) {
20790
- const resolved = resolve2(filePath);
20791
- let realPath;
20792
- try {
20793
- if (!existsSync3(resolved)) {
20794
- const parent = resolve2(resolved, "..");
20795
- if (existsSync3(parent)) {
20796
- const realParent = realpathSync(parent);
20797
- realPath = resolve2(realParent, basename(resolved));
20798
- } else {
20799
- realPath = resolved;
20800
- }
20801
- } else {
20802
- realPath = realpathSync(resolved);
20803
- }
20804
- } catch (error48) {
20805
- throw new Error(`Failed to resolve path: ${filePath} (${error48.message})`);
20806
- }
20807
- const resolvedBase = resolve2(baseDir);
20808
- const realBase = existsSync3(resolvedBase) ? realpathSync(resolvedBase) : resolvedBase;
20809
- if (!isWithinDirectory(realPath, realBase)) {
20810
- throw new Error(`Path is outside allowed directory: ${filePath} (resolved to ${realPath}, base: ${realBase})`);
20811
- }
20812
- return realPath;
20813
- }
20814
- var MAX_DIRECTORY_DEPTH = 10;
20815
- var init_path_security = () => {};
20816
-
20817
20830
  // src/utils/json-file.ts
20818
20831
  import { existsSync as existsSync6 } from "fs";
20819
20832
  async function loadJsonFile(path, context = "json-file") {
@@ -21008,7 +21021,7 @@ function isPlainObject2(value) {
21008
21021
 
21009
21022
  // src/config/paths.ts
21010
21023
  import { homedir as homedir2 } from "os";
21011
- import { join as join7, resolve as resolve3 } from "path";
21024
+ import { join as join7, resolve as resolve4 } from "path";
21012
21025
  function globalConfigDir() {
21013
21026
  const override = process.env[GLOBAL_CONFIG_DIR_ENV];
21014
21027
  if (override)
@@ -21016,7 +21029,7 @@ function globalConfigDir() {
21016
21029
  return join7(homedir2(), ".nax");
21017
21030
  }
21018
21031
  function projectConfigDir(projectRoot) {
21019
- return join7(resolve3(projectRoot), PROJECT_NAX_DIR);
21032
+ return join7(resolve4(projectRoot), PROJECT_NAX_DIR);
21020
21033
  }
21021
21034
  var GLOBAL_CONFIG_DIR_ENV = "NAX_GLOBAL_CONFIG_DIR", PROJECT_NAX_DIR = ".nax";
21022
21035
  var init_paths = () => {};
@@ -21170,12 +21183,12 @@ var init_profile = __esm(() => {
21170
21183
 
21171
21184
  // src/config/loader.ts
21172
21185
  import { existsSync as existsSync7 } from "fs";
21173
- import { basename as basename2, dirname as dirname3, join as join9, resolve as resolve4 } from "path";
21186
+ import { basename as basename2, dirname as dirname2, join as join9, resolve as resolve5 } from "path";
21174
21187
  function globalConfigPath() {
21175
21188
  return join9(globalConfigDir(), "config.json");
21176
21189
  }
21177
21190
  function findProjectDir(startDir = process.cwd()) {
21178
- let dir = resolve4(startDir);
21191
+ let dir = resolve5(startDir);
21179
21192
  let depth = 0;
21180
21193
  while (depth < MAX_DIRECTORY_DEPTH) {
21181
21194
  const candidate = join9(dir, PROJECT_NAX_DIR);
@@ -21226,7 +21239,7 @@ function applyBatchModeCompat(conf) {
21226
21239
  async function loadConfig(startDir, cliOverrides) {
21227
21240
  let rawConfig = structuredClone(DEFAULT_CONFIG);
21228
21241
  const projDir = startDir ? basename2(startDir) === PROJECT_NAX_DIR ? startDir : findProjectDir(startDir) : findProjectDir();
21229
- const projectRoot = startDir ? basename2(startDir) === PROJECT_NAX_DIR ? dirname3(startDir) : startDir : process.cwd();
21242
+ const projectRoot = startDir ? basename2(startDir) === PROJECT_NAX_DIR ? dirname2(startDir) : startDir : process.cwd();
21230
21243
  const profileName = await resolveProfileName(cliOverrides ?? {}, process.env, projectRoot);
21231
21244
  const globalConfRaw = await loadJsonFile(globalConfigPath(), "config");
21232
21245
  if (globalConfRaw) {
@@ -21269,8 +21282,8 @@ ${errors3.join(`
21269
21282
  }
21270
21283
  async function loadConfigForWorkdir(rootConfigPath, packageDir) {
21271
21284
  const logger = getLogger();
21272
- const resolvedRootConfigPath = resolve4(rootConfigPath);
21273
- const rootNaxDir = dirname3(resolvedRootConfigPath);
21285
+ const resolvedRootConfigPath = resolve5(rootConfigPath);
21286
+ const rootNaxDir = dirname2(resolvedRootConfigPath);
21274
21287
  let rootConfigPromise = _rootConfigCache.get(resolvedRootConfigPath);
21275
21288
  if (!rootConfigPromise) {
21276
21289
  rootConfigPromise = loadConfig(rootNaxDir);
@@ -21281,7 +21294,7 @@ async function loadConfigForWorkdir(rootConfigPath, packageDir) {
21281
21294
  logger.debug("config", "No packageDir \u2014 using root config");
21282
21295
  return rootConfig;
21283
21296
  }
21284
- const repoRoot = dirname3(rootNaxDir);
21297
+ const repoRoot = dirname2(rootNaxDir);
21285
21298
  const packageConfigPath = join9(repoRoot, PROJECT_NAX_DIR, "mono", packageDir, "config.json");
21286
21299
  const packageOverride = await loadJsonFile(packageConfigPath, "config");
21287
21300
  if (!packageOverride) {
@@ -22178,7 +22191,8 @@ Do NOT output the JSON to the conversation. Write the file, then reply with a br
22178
22191
  };
22179
22192
  }
22180
22193
  const proposalOutputs = successful.map((p) => p.output);
22181
- const outcome = await resolveOutcome(proposalOutputs, [], ctx.stageConfig, ctx.config, ctx.storyId, 0);
22194
+ const resolverTimeoutMs = (ctx.stageConfig.timeoutSeconds ?? 600) * 1000;
22195
+ const outcome = await resolveOutcome(proposalOutputs, [], ctx.stageConfig, ctx.config, ctx.storyId, resolverTimeoutMs);
22182
22196
  const winningOutput = successful[0].output;
22183
22197
  const proposals = successful.map((p) => ({ debater: p.debater, output: p.output }));
22184
22198
  logger?.info("debate", "debate:result", {
@@ -22644,9 +22658,9 @@ ${request.summary}
22644
22658
  if (!this.rl) {
22645
22659
  throw new Error("CLI plugin not initialized");
22646
22660
  }
22647
- const timeoutPromise = new Promise((resolve5) => {
22661
+ const timeoutPromise = new Promise((resolve6) => {
22648
22662
  setTimeout(() => {
22649
- resolve5({
22663
+ resolve6({
22650
22664
  requestId: request.id,
22651
22665
  action: "skip",
22652
22666
  respondedBy: "timeout",
@@ -22798,9 +22812,9 @@ ${request.summary}
22798
22812
  if (!this.rl) {
22799
22813
  throw new Error("CLI plugin not initialized");
22800
22814
  }
22801
- return new Promise((resolve5) => {
22815
+ return new Promise((resolve6) => {
22802
22816
  this.rl?.question(prompt, (answer) => {
22803
- resolve5(answer);
22817
+ resolve6(answer);
22804
22818
  });
22805
22819
  });
22806
22820
  }
@@ -23232,7 +23246,7 @@ class WebhookInteractionPlugin {
23232
23246
  this.pendingResponses.delete(requestId);
23233
23247
  return early;
23234
23248
  }
23235
- return new Promise((resolve5) => {
23249
+ return new Promise((resolve6) => {
23236
23250
  const existingCallback = this.receiveCallbacks.get(requestId);
23237
23251
  if (existingCallback) {
23238
23252
  this.clearReceiveTimer(requestId);
@@ -23246,7 +23260,7 @@ class WebhookInteractionPlugin {
23246
23260
  const timer = setTimeout(() => {
23247
23261
  this.clearReceiveTimer(requestId);
23248
23262
  this.receiveCallbacks.delete(requestId);
23249
- resolve5({
23263
+ resolve6({
23250
23264
  requestId,
23251
23265
  action: "skip",
23252
23266
  respondedBy: "timeout",
@@ -23257,7 +23271,7 @@ class WebhookInteractionPlugin {
23257
23271
  this.receiveCallbacks.set(requestId, (response) => {
23258
23272
  this.clearReceiveTimer(requestId);
23259
23273
  this.receiveCallbacks.delete(requestId);
23260
- resolve5(response);
23274
+ resolve6(response);
23261
23275
  });
23262
23276
  });
23263
23277
  }
@@ -25202,6 +25216,7 @@ async function runPipeline(stages, context, eventEmitter) {
25202
25216
  const logger = getLogger();
25203
25217
  const retryCountMap = new Map;
25204
25218
  let i = 0;
25219
+ let stageCostAccum = 0;
25205
25220
  while (i < stages.length) {
25206
25221
  const stage = stages[i];
25207
25222
  if (!stage.enabled(context)) {
@@ -25220,27 +25235,58 @@ async function runPipeline(stages, context, eventEmitter) {
25220
25235
  reason: `Stage "${stage.name}" threw error: ${errorMessage(error48)}`
25221
25236
  };
25222
25237
  eventEmitter?.emit("stage:exit", stage.name, failResult);
25223
- return { success: false, finalAction: "fail", reason: failResult.reason, stoppedAtStage: stage.name, context };
25238
+ return {
25239
+ success: false,
25240
+ finalAction: "fail",
25241
+ reason: failResult.reason,
25242
+ stoppedAtStage: stage.name,
25243
+ context,
25244
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25245
+ };
25224
25246
  }
25247
+ if (result.cost)
25248
+ stageCostAccum += result.cost;
25225
25249
  eventEmitter?.emit("stage:exit", stage.name, result);
25226
25250
  switch (result.action) {
25227
25251
  case "continue":
25228
25252
  i++;
25229
25253
  continue;
25230
25254
  case "skip":
25231
- return { success: false, finalAction: "skip", reason: result.reason, stoppedAtStage: stage.name, context };
25255
+ return {
25256
+ success: false,
25257
+ finalAction: "skip",
25258
+ reason: result.reason,
25259
+ stoppedAtStage: stage.name,
25260
+ context,
25261
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25262
+ };
25232
25263
  case "fail":
25233
- return { success: false, finalAction: "fail", reason: result.reason, stoppedAtStage: stage.name, context };
25264
+ return {
25265
+ success: false,
25266
+ finalAction: "fail",
25267
+ reason: result.reason,
25268
+ stoppedAtStage: stage.name,
25269
+ context,
25270
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25271
+ };
25234
25272
  case "escalate":
25235
25273
  return {
25236
25274
  success: false,
25237
25275
  finalAction: "escalate",
25238
25276
  reason: result.reason ?? "Stage requested escalation to higher tier",
25239
25277
  stoppedAtStage: stage.name,
25240
- context
25278
+ context,
25279
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25241
25280
  };
25242
25281
  case "pause":
25243
- return { success: false, finalAction: "pause", reason: result.reason, stoppedAtStage: stage.name, context };
25282
+ return {
25283
+ success: false,
25284
+ finalAction: "pause",
25285
+ reason: result.reason,
25286
+ stoppedAtStage: stage.name,
25287
+ context,
25288
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25289
+ };
25244
25290
  case "retry": {
25245
25291
  const retries = (retryCountMap.get(result.fromStage) ?? 0) + 1;
25246
25292
  if (retries > MAX_STAGE_RETRIES) {
@@ -25250,7 +25296,8 @@ async function runPipeline(stages, context, eventEmitter) {
25250
25296
  finalAction: "fail",
25251
25297
  reason: `Stage "${stage.name}" exceeded max retries (${MAX_STAGE_RETRIES}) for "${result.fromStage}"`,
25252
25298
  stoppedAtStage: stage.name,
25253
- context
25299
+ context,
25300
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25254
25301
  };
25255
25302
  }
25256
25303
  retryCountMap.set(result.fromStage, retries);
@@ -25262,7 +25309,8 @@ async function runPipeline(stages, context, eventEmitter) {
25262
25309
  finalAction: "escalate",
25263
25310
  reason: `Retry target stage "${result.fromStage}" not found`,
25264
25311
  stoppedAtStage: stage.name,
25265
- context
25312
+ context,
25313
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25266
25314
  };
25267
25315
  }
25268
25316
  logger.debug("pipeline", `Retrying from stage "${result.fromStage}" (attempt ${retries}/${MAX_STAGE_RETRIES})`);
@@ -25275,7 +25323,12 @@ async function runPipeline(stages, context, eventEmitter) {
25275
25323
  }
25276
25324
  }
25277
25325
  }
25278
- return { success: true, finalAction: "complete", context };
25326
+ return {
25327
+ success: true,
25328
+ finalAction: "complete",
25329
+ context,
25330
+ stageCost: stageCostAccum > 0 ? stageCostAccum : undefined
25331
+ };
25279
25332
  }
25280
25333
  var MAX_STAGE_RETRIES = 5;
25281
25334
  var init_runner = __esm(() => {
@@ -25813,8 +25866,7 @@ var init_acceptance = __esm(() => {
25813
25866
  acceptanceStage = {
25814
25867
  name: "acceptance",
25815
25868
  enabled(ctx) {
25816
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
25817
- if (!effectiveConfig.acceptance.enabled) {
25869
+ if (!ctx.config.acceptance.enabled) {
25818
25870
  return false;
25819
25871
  }
25820
25872
  if (!areAllStoriesComplete(ctx)) {
@@ -25824,7 +25876,6 @@ var init_acceptance = __esm(() => {
25824
25876
  },
25825
25877
  async execute(ctx) {
25826
25878
  const logger = getLogger();
25827
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
25828
25879
  logger.info("acceptance", "Running acceptance tests", { storyId: ctx.story.id });
25829
25880
  if (!ctx.featureDir) {
25830
25881
  logger.warn("acceptance", "No feature directory \u2014 skipping acceptance tests", { storyId: ctx.story.id });
@@ -25832,7 +25883,7 @@ var init_acceptance = __esm(() => {
25832
25883
  }
25833
25884
  const testGroups = ctx.acceptanceTestPaths ?? [
25834
25885
  {
25835
- testPath: resolveAcceptanceFeatureTestPath(ctx.featureDir, effectiveConfig.acceptance.testPath, effectiveConfig.project?.language),
25886
+ testPath: resolveAcceptanceFeatureTestPath(ctx.featureDir, ctx.config.acceptance.testPath, ctx.config.project?.language),
25836
25887
  packageDir: ctx.workdir
25837
25888
  }
25838
25889
  ];
@@ -25847,7 +25898,7 @@ var init_acceptance = __esm(() => {
25847
25898
  logger.warn("acceptance", "Acceptance test file not found \u2014 skipping", { storyId: ctx.story.id, testPath });
25848
25899
  continue;
25849
25900
  }
25850
- const testCmdParts = buildAcceptanceRunCommand(testPath, effectiveConfig.project?.testFramework, effectiveConfig.acceptance.command);
25901
+ const testCmdParts = buildAcceptanceRunCommand(testPath, ctx.config.project?.testFramework, ctx.config.acceptance.command);
25851
25902
  logger.info("acceptance", "Running acceptance command", {
25852
25903
  storyId: ctx.story.id,
25853
25904
  cmd: testCmdParts.join(" "),
@@ -25942,12 +25993,12 @@ function buildRefinementPrompt(criteria, codebaseContext, options) {
25942
25993
  `);
25943
25994
  const strategySection = buildStrategySection(options);
25944
25995
  const refinedExample = buildRefinedExample(options?.testStrategy);
25996
+ const codebaseSection = codebaseContext ? `CODEBASE CONTEXT:
25997
+ ${codebaseContext}
25998
+ ` : "";
25945
25999
  const core2 = `You are an acceptance criteria refinement assistant. Your task is to convert raw acceptance criteria into concrete, machine-verifiable assertions.
25946
26000
 
25947
- CODEBASE CONTEXT:
25948
- ${codebaseContext}
25949
- ${strategySection}
25950
- ACCEPTANCE CRITERIA TO REFINE:
26001
+ ${codebaseSection}${strategySection}ACCEPTANCE CRITERIA TO REFINE:
25951
26002
  ${criteriaList}
25952
26003
 
25953
26004
  For each criterion, produce a refined version that is concrete and automatically testable where possible.
@@ -26200,12 +26251,11 @@ ${stderr}` };
26200
26251
  if (!ctx.featureDir) {
26201
26252
  return { action: "fail", reason: "[acceptance-setup] featureDir is not set" };
26202
26253
  }
26203
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
26204
- const language = effectiveConfig.project?.language;
26205
- const testPathConfig = (ctx.effectiveConfig ?? ctx.config).acceptance.testPath;
26254
+ const language = ctx.config.project?.language;
26255
+ const testPathConfig = ctx.config.acceptance.testPath;
26206
26256
  const metaPath = path5.join(ctx.featureDir, "acceptance-meta.json");
26207
- const allCriteria = ctx.prd.userStories.filter((s) => !s.id.startsWith("US-FIX-")).flatMap((s) => s.acceptanceCriteria);
26208
- const nonFixStories = ctx.prd.userStories.filter((s) => !s.id.startsWith("US-FIX-"));
26257
+ const allCriteria = ctx.prd.userStories.filter((s) => !s.id.startsWith("US-FIX-") && s.status !== "decomposed").flatMap((s) => s.acceptanceCriteria);
26258
+ const nonFixStories = ctx.prd.userStories.filter((s) => !s.id.startsWith("US-FIX-") && s.status !== "decomposed");
26209
26259
  const workdirGroups = new Map;
26210
26260
  for (const story of nonFixStories) {
26211
26261
  const wd = story.workdir ?? "";
@@ -26261,7 +26311,7 @@ ${stderr}` };
26261
26311
  }
26262
26312
  if (shouldGenerate) {
26263
26313
  totalCriteria = allCriteria.length;
26264
- const agent = (ctx.agentGetFn ?? _acceptanceSetupDeps.getAgent)(ctx.config.autoMode.defaultAgent);
26314
+ const agent = (ctx.agentGetFn ?? _acceptanceSetupDeps.getAgent)(ctx.rootConfig.autoMode.defaultAgent);
26265
26315
  let allRefinedCriteria;
26266
26316
  if (ctx.config.acceptance.refinement) {
26267
26317
  const maxConcurrency = ctx.config.acceptance.refinementConcurrency ?? 3;
@@ -26305,7 +26355,7 @@ ${stderr}` };
26305
26355
  const groupRefined = allRefinedCriteria.filter((r) => groupStoryIds.has(r.storyId));
26306
26356
  let modelDef;
26307
26357
  try {
26308
- modelDef = resolveModelForAgent(ctx.config.models, ctx.routing.agent ?? ctx.config.autoMode.defaultAgent, ctx.config.acceptance.model ?? "fast", ctx.config.autoMode.defaultAgent);
26358
+ modelDef = resolveModelForAgent(ctx.rootConfig.models, ctx.routing.agent ?? ctx.rootConfig.autoMode.defaultAgent, ctx.config.acceptance.model ?? "fast", ctx.rootConfig.autoMode.defaultAgent);
26309
26359
  } catch {
26310
26360
  const tier = ctx.config.acceptance.model ?? "fast";
26311
26361
  modelDef = { provider: "anthropic", model: tier };
@@ -26341,7 +26391,7 @@ ${stderr}` };
26341
26391
  }
26342
26392
  let redFailCount = 0;
26343
26393
  for (const { testPath, packageDir } of testPaths) {
26344
- const runCmd = buildAcceptanceRunCommand(testPath, effectiveConfig.project?.testFramework, effectiveConfig.acceptance.command);
26394
+ const runCmd = buildAcceptanceRunCommand(testPath, ctx.config.project?.testFramework, ctx.config.acceptance.command);
26345
26395
  getSafeLogger()?.info("acceptance-setup", "Running acceptance RED gate command", {
26346
26396
  cmd: runCmd.join(" "),
26347
26397
  packageDir
@@ -26882,10 +26932,11 @@ ${prompt}`,
26882
26932
  history.push({ role: "implementer", content: prompt });
26883
26933
  history.push({ role: "reviewer", content: result.output });
26884
26934
  const parsed = parseReviewResponse(result.output);
26885
- lastCheckResult = parsed;
26935
+ const reviewResult = { ...parsed, cost: result.estimatedCost ?? 0 };
26936
+ lastCheckResult = reviewResult;
26886
26937
  lastStory = story;
26887
26938
  lastSemanticConfig = semanticConfig;
26888
- return parsed;
26939
+ return reviewResult;
26889
26940
  },
26890
26941
  async reReview(updatedDiff) {
26891
26942
  if (!active) {
@@ -26919,7 +26970,7 @@ ${prompt}`,
26919
26970
  history.push({ role: "reviewer", content: result.output });
26920
26971
  const parsed = parseReviewResponse(result.output);
26921
26972
  const deltaSummary = extractDeltaSummary(result.output, previousFindings, parsed.checkResult.findings);
26922
- const dialogueResult = { ...parsed, deltaSummary };
26973
+ const dialogueResult = { ...parsed, deltaSummary, cost: result.estimatedCost ?? 0 };
26923
26974
  lastCheckResult = dialogueResult;
26924
26975
  const maxMessages = _config.review?.dialogue?.maxDialogueMessages ?? 20;
26925
26976
  if (history.length > maxMessages) {
@@ -27432,6 +27483,7 @@ async function runSemanticReview(workdir, storyGitRef, story, semanticConfig, mo
27432
27483
  timeoutSeconds: naxConfig?.execution?.sessionTimeoutSeconds
27433
27484
  });
27434
27485
  const debateResult = await debateSession.run(prompt);
27486
+ const debateCost = debateResult.totalCostUsd ?? 0;
27435
27487
  let passCount = 0;
27436
27488
  let failCount = 0;
27437
27489
  const allFindings = [];
@@ -27474,7 +27526,8 @@ async function runSemanticReview(workdir, storyGitRef, story, semanticConfig, mo
27474
27526
 
27475
27527
  ${formatFindings(debateBlocking)}`,
27476
27528
  durationMs: durationMs2,
27477
- findings: toReviewFindings(debateBlocking)
27529
+ findings: toReviewFindings(debateBlocking),
27530
+ cost: debateCost
27478
27531
  };
27479
27532
  }
27480
27533
  logger?.info("review", "Semantic review passed (debate, all findings non-blocking)", {
@@ -27487,7 +27540,8 @@ ${formatFindings(debateBlocking)}`,
27487
27540
  command: "",
27488
27541
  exitCode: 0,
27489
27542
  output: "Semantic review passed (debate, all findings were unverifiable or informational)",
27490
- durationMs: durationMs2
27543
+ durationMs: durationMs2,
27544
+ cost: debateCost
27491
27545
  };
27492
27546
  }
27493
27547
  logger?.info("review", "Semantic review passed (debate)", { storyId: story.id, durationMs: durationMs2 });
@@ -27497,7 +27551,8 @@ ${formatFindings(debateBlocking)}`,
27497
27551
  command: "",
27498
27552
  exitCode: 0,
27499
27553
  output: "Semantic review passed",
27500
- durationMs: durationMs2
27554
+ durationMs: durationMs2,
27555
+ cost: debateCost
27501
27556
  };
27502
27557
  }
27503
27558
  const implementerSidecarKey = `${story.id}:implementer`;
@@ -27516,6 +27571,7 @@ ${formatFindings(debateBlocking)}`,
27516
27571
  }
27517
27572
  } catch {}
27518
27573
  let rawResponse;
27574
+ let llmCost = 0;
27519
27575
  try {
27520
27576
  let runErr;
27521
27577
  let runSucceeded = false;
@@ -27532,6 +27588,7 @@ ${formatFindings(debateBlocking)}`,
27532
27588
  config: naxConfig ?? DEFAULT_CONFIG
27533
27589
  });
27534
27590
  runOutput = runResult.output;
27591
+ llmCost = runResult.estimatedCost ?? 0;
27535
27592
  runSucceeded = true;
27536
27593
  } catch (err) {
27537
27594
  runErr = err;
@@ -27547,6 +27604,7 @@ ${formatFindings(debateBlocking)}`,
27547
27604
  config: naxConfig ?? DEFAULT_CONFIG
27548
27605
  });
27549
27606
  rawResponse = typeof completeResult === "string" ? completeResult : completeResult.output;
27607
+ llmCost = typeof completeResult === "string" ? 0 : completeResult.costUsd ?? 0;
27550
27608
  }
27551
27609
  } catch (err) {
27552
27610
  logger?.warn("semantic", "LLM call failed \u2014 fail-open", { cause: String(err) });
@@ -27572,7 +27630,8 @@ ${formatFindings(debateBlocking)}`,
27572
27630
  command: "",
27573
27631
  exitCode: 1,
27574
27632
  output: "semantic review: LLM response truncated but indicated failure (passed:false found in partial response)",
27575
- durationMs: Date.now() - startTime
27633
+ durationMs: Date.now() - startTime,
27634
+ cost: llmCost
27576
27635
  };
27577
27636
  }
27578
27637
  logger?.warn("semantic", "LLM returned invalid JSON \u2014 fail-open", { rawResponse: rawResponse.slice(0, 200) });
@@ -27582,7 +27641,8 @@ ${formatFindings(debateBlocking)}`,
27582
27641
  command: "",
27583
27642
  exitCode: 0,
27584
27643
  output: "semantic review: could not parse LLM response (fail-open)",
27585
- durationMs: Date.now() - startTime
27644
+ durationMs: Date.now() - startTime,
27645
+ cost: llmCost
27586
27646
  };
27587
27647
  }
27588
27648
  const blockingFindings = parsed.findings.filter((f) => isBlockingSeverity(f.severity));
@@ -27619,7 +27679,8 @@ ${formatFindings(blockingFindings)}`;
27619
27679
  exitCode: 1,
27620
27680
  output,
27621
27681
  durationMs: durationMs2,
27622
- findings: toReviewFindings(blockingFindings)
27682
+ findings: toReviewFindings(blockingFindings),
27683
+ cost: llmCost
27623
27684
  };
27624
27685
  }
27625
27686
  if (!parsed.passed && blockingFindings.length === 0) {
@@ -27631,7 +27692,8 @@ ${formatFindings(blockingFindings)}`;
27631
27692
  command: "",
27632
27693
  exitCode: 0,
27633
27694
  output: "Semantic review passed (all findings were unverifiable or informational)",
27634
- durationMs: durationMs2
27695
+ durationMs: durationMs2,
27696
+ cost: llmCost
27635
27697
  };
27636
27698
  }
27637
27699
  const durationMs = Date.now() - startTime;
@@ -27644,7 +27706,8 @@ ${formatFindings(blockingFindings)}`;
27644
27706
  command: "",
27645
27707
  exitCode: parsed.passed ? 0 : 1,
27646
27708
  output: parsed.passed ? "Semantic review passed" : "Semantic review failed (no findings)",
27647
- durationMs
27709
+ durationMs,
27710
+ cost: llmCost
27648
27711
  };
27649
27712
  }
27650
27713
  var _semanticDeps, DIFF_CAP_BYTES = 51200;
@@ -27950,6 +28013,19 @@ class ReviewOrchestrator {
27950
28013
  }
27951
28014
  return { builtIn, success: true, pluginFailed: false };
27952
28015
  }
28016
+ reviewFromContext(ctx) {
28017
+ const retrySkipChecks = ctx.retrySkipChecks;
28018
+ ctx.retrySkipChecks = undefined;
28019
+ const agentResolver = ctx.agentGetFn ?? undefined;
28020
+ const agentName = ctx.rootConfig.autoMode?.defaultAgent;
28021
+ const modelResolver = agentName ? (_tier) => agentResolver ? agentResolver(agentName) ?? null : null : undefined;
28022
+ return this.review(ctx.config.review, ctx.workdir, ctx.config.execution, ctx.plugins, ctx.storyGitRef, ctx.story.workdir, ctx.config.quality?.commands, ctx.story.id, {
28023
+ id: ctx.story.id,
28024
+ title: ctx.story.title,
28025
+ description: ctx.story.description,
28026
+ acceptanceCriteria: ctx.story.acceptanceCriteria
28027
+ }, modelResolver, ctx.config, retrySkipChecks, ctx.prd.feature);
28028
+ }
27953
28029
  }
27954
28030
  var _orchestratorDeps, reviewOrchestrator;
27955
28031
  var init_orchestrator = __esm(() => {
@@ -27965,7 +28041,6 @@ __export(exports_review, {
27965
28041
  reviewStage: () => reviewStage,
27966
28042
  _reviewDeps: () => _reviewDeps
27967
28043
  });
27968
- import { join as join17 } from "path";
27969
28044
  var reviewStage, _reviewDeps;
27970
28045
  var init_review = __esm(() => {
27971
28046
  init_agents();
@@ -27975,18 +28050,13 @@ var init_review = __esm(() => {
27975
28050
  init_orchestrator();
27976
28051
  reviewStage = {
27977
28052
  name: "review",
27978
- enabled: (ctx) => (ctx.effectiveConfig ?? ctx.config).review.enabled,
28053
+ enabled: (ctx) => ctx.config.review.enabled,
27979
28054
  async execute(ctx) {
27980
28055
  const logger = getLogger();
27981
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
27982
- const dialogueEnabled = effectiveConfig.review?.dialogue?.enabled ?? false;
28056
+ const dialogueEnabled = ctx.config.review?.dialogue?.enabled ?? false;
27983
28057
  logger.info("review", "Running review phase", { storyId: ctx.story.id });
27984
- const effectiveWorkdir = ctx.story.workdir ? join17(ctx.workdir, ctx.story.workdir) : ctx.workdir;
27985
28058
  const agentResolver = ctx.agentGetFn ?? getAgent;
27986
- const agentName = effectiveConfig.autoMode?.defaultAgent;
27987
- const modelResolver = (_tier) => agentName ? agentResolver(agentName) ?? null : null;
27988
- const retrySkipChecks = ctx.retrySkipChecks;
27989
- ctx.retrySkipChecks = undefined;
28059
+ const agentName = ctx.rootConfig.autoMode?.defaultAgent;
27990
28060
  if (dialogueEnabled && ctx.reviewerSession) {
27991
28061
  try {
27992
28062
  const diff = ctx.storyGitRef ?? "";
@@ -28024,8 +28094,8 @@ var init_review = __esm(() => {
28024
28094
  }
28025
28095
  if (dialogueEnabled && !ctx.reviewerSession) {
28026
28096
  const agent = agentName ? agentResolver(agentName) ?? null : null;
28027
- ctx.reviewerSession = _reviewDeps.createReviewerSession(agent ?? null, ctx.story.id, effectiveWorkdir, ctx.prd.feature ?? "", ctx.config);
28028
- const semanticConfig = effectiveConfig.review?.semantic;
28097
+ ctx.reviewerSession = _reviewDeps.createReviewerSession(agent ?? null, ctx.story.id, ctx.workdir, ctx.prd.feature ?? "", ctx.config);
28098
+ const semanticConfig = ctx.config.review?.semantic;
28029
28099
  if (semanticConfig && agent) {
28030
28100
  try {
28031
28101
  const diff = ctx.storyGitRef ?? "";
@@ -28053,6 +28123,7 @@ var init_review = __esm(() => {
28053
28123
  ],
28054
28124
  totalDurationMs: 0
28055
28125
  };
28126
+ const dialogueCost = sessionResult.cost ?? 0;
28056
28127
  if (passed) {
28057
28128
  logger.info("review", "Review passed (dialogue session)", { storyId: ctx.story.id });
28058
28129
  } else {
@@ -28060,7 +28131,7 @@ var init_review = __esm(() => {
28060
28131
  storyId: ctx.story.id
28061
28132
  });
28062
28133
  }
28063
- return { action: "continue" };
28134
+ return { action: "continue", cost: dialogueCost || undefined };
28064
28135
  } catch (err) {
28065
28136
  logger.warn("review", "ReviewerSession.review() failed \u2014 falling back to one-shot review", {
28066
28137
  storyId: ctx.story.id
@@ -28068,13 +28139,9 @@ var init_review = __esm(() => {
28068
28139
  }
28069
28140
  }
28070
28141
  }
28071
- const result = await reviewOrchestrator.review(effectiveConfig.review, effectiveWorkdir, effectiveConfig.execution, ctx.plugins, ctx.storyGitRef, ctx.story.workdir, effectiveConfig.quality?.commands, ctx.story.id, {
28072
- id: ctx.story.id,
28073
- title: ctx.story.title,
28074
- description: ctx.story.description,
28075
- acceptanceCriteria: ctx.story.acceptanceCriteria
28076
- }, modelResolver, ctx.config, retrySkipChecks, ctx.prd.feature);
28142
+ const result = await reviewOrchestrator.reviewFromContext(ctx);
28077
28143
  ctx.reviewResult = result.builtIn;
28144
+ const reviewCost = (result.builtIn.checks ?? []).reduce((sum, c) => sum + (c.cost ?? 0), 0) || undefined;
28078
28145
  if (!result.success) {
28079
28146
  const pluginFindings = result.builtIn.pluginReviewers?.flatMap((pr) => pr.findings ?? []) ?? [];
28080
28147
  const semanticFindings = (result.builtIn.checks ?? []).filter((c) => c.check === "semantic" && !c.success && c.findings?.length).flatMap((c) => c.findings ?? []);
@@ -28083,29 +28150,29 @@ var init_review = __esm(() => {
28083
28150
  ctx.reviewFindings = allFindings;
28084
28151
  }
28085
28152
  if (result.pluginFailed) {
28086
- if (ctx.interaction && isTriggerEnabled("security-review", effectiveConfig)) {
28087
- const shouldContinue = await _reviewDeps.checkSecurityReview({ featureName: ctx.prd.feature, storyId: ctx.story.id }, effectiveConfig, ctx.interaction);
28153
+ if (ctx.interaction && isTriggerEnabled("security-review", ctx.config)) {
28154
+ const shouldContinue = await _reviewDeps.checkSecurityReview({ featureName: ctx.prd.feature, storyId: ctx.story.id }, ctx.config, ctx.interaction);
28088
28155
  if (!shouldContinue) {
28089
28156
  logger.error("review", `Plugin reviewer failed: ${result.failureReason}`, { storyId: ctx.story.id });
28090
- return { action: "fail", reason: `Review failed: ${result.failureReason}` };
28157
+ return { action: "fail", reason: `Review failed: ${result.failureReason}`, cost: reviewCost };
28091
28158
  }
28092
28159
  logger.warn("review", "Security-review trigger escalated \u2014 retrying story", { storyId: ctx.story.id });
28093
- return { action: "escalate", reason: `Review failed: ${result.failureReason}` };
28160
+ return { action: "escalate", reason: `Review failed: ${result.failureReason}`, cost: reviewCost };
28094
28161
  }
28095
28162
  logger.error("review", `Plugin reviewer failed: ${result.failureReason}`, { storyId: ctx.story.id });
28096
- return { action: "fail", reason: `Review failed: ${result.failureReason}` };
28163
+ return { action: "fail", reason: `Review failed: ${result.failureReason}`, cost: reviewCost };
28097
28164
  }
28098
28165
  logger.warn("review", "Review failed (built-in checks) \u2014 handing off to autofix", {
28099
28166
  reason: result.failureReason,
28100
28167
  storyId: ctx.story.id
28101
28168
  });
28102
- return { action: "continue" };
28169
+ return { action: "continue", cost: reviewCost };
28103
28170
  }
28104
28171
  logger.info("review", "Review passed", {
28105
28172
  durationMs: result.builtIn.totalDurationMs,
28106
28173
  storyId: ctx.story.id
28107
28174
  });
28108
- return { action: "continue" };
28175
+ return { action: "continue", cost: reviewCost };
28109
28176
  }
28110
28177
  };
28111
28178
  _reviewDeps = {
@@ -28115,7 +28182,6 @@ var init_review = __esm(() => {
28115
28182
  });
28116
28183
 
28117
28184
  // src/pipeline/stages/autofix.ts
28118
- import { join as join18 } from "path";
28119
28185
  async function recheckReview(ctx) {
28120
28186
  const { reviewStage: reviewStage2 } = await Promise.resolve().then(() => (init_review(), exports_review));
28121
28187
  if (!reviewStage2.enabled(ctx))
@@ -28153,16 +28219,15 @@ Your previous fix attempt (attempt ${attempt}) did not resolve the quality error
28153
28219
  }
28154
28220
  async function runAgentRectification(ctx, lintFixCmd, formatFixCmd, effectiveWorkdir) {
28155
28221
  const logger = getLogger();
28156
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
28157
- const maxPerCycle = effectiveConfig.quality.autofix?.maxAttempts ?? 2;
28158
- const maxTotal = effectiveConfig.quality.autofix?.maxTotalAttempts ?? 10;
28159
- const rethinkAtAttempt = effectiveConfig.quality.autofix?.rethinkAtAttempt ?? 2;
28160
- const urgencyAtAttempt = effectiveConfig.quality.autofix?.urgencyAtAttempt ?? 3;
28222
+ const maxPerCycle = ctx.config.quality.autofix?.maxAttempts ?? 2;
28223
+ const maxTotal = ctx.config.quality.autofix?.maxTotalAttempts ?? 10;
28224
+ const rethinkAtAttempt = ctx.config.quality.autofix?.rethinkAtAttempt ?? 2;
28225
+ const urgencyAtAttempt = ctx.config.quality.autofix?.urgencyAtAttempt ?? 3;
28161
28226
  const consumed = ctx.autofixAttempt ?? 0;
28162
28227
  const failedChecks = collectFailedChecks(ctx);
28163
28228
  if (failedChecks.length === 0) {
28164
28229
  logger.debug("autofix", "No failed checks found \u2014 skipping agent rectification", { storyId: ctx.story.id });
28165
- return false;
28230
+ return { succeeded: false, cost: 0 };
28166
28231
  }
28167
28232
  if (consumed >= maxTotal) {
28168
28233
  logger.warn("autofix", "Global autofix budget exhausted \u2014 escalating", {
@@ -28170,7 +28235,7 @@ async function runAgentRectification(ctx, lintFixCmd, formatFixCmd, effectiveWor
28170
28235
  totalAttempts: consumed,
28171
28236
  maxTotalAttempts: maxTotal
28172
28237
  });
28173
- return false;
28238
+ return { succeeded: false, cost: 0 };
28174
28239
  }
28175
28240
  const remainingBudget = maxTotal - consumed;
28176
28241
  const maxAttempts = Math.min(maxPerCycle, remainingBudget);
@@ -28179,7 +28244,8 @@ async function runAgentRectification(ctx, lintFixCmd, formatFixCmd, effectiveWor
28179
28244
  attempt: 0,
28180
28245
  failedChecks
28181
28246
  };
28182
- return runSharedRectificationLoop({
28247
+ let autofixCostAccum = 0;
28248
+ const succeeded = await runSharedRectificationLoop({
28183
28249
  stage: "autofix",
28184
28250
  storyId: ctx.story.id,
28185
28251
  maxAttempts,
@@ -28206,29 +28272,30 @@ async function runAgentRectification(ctx, lintFixCmd, formatFixCmd, effectiveWor
28206
28272
  },
28207
28273
  runAttempt: async (attempt, prompt) => {
28208
28274
  ctx.autofixAttempt = consumed + attempt;
28209
- const agent = agentGetFn(ctx.config.autoMode.defaultAgent);
28275
+ const agent = agentGetFn(ctx.rootConfig.autoMode.defaultAgent);
28210
28276
  if (!agent) {
28211
28277
  logger.error("autofix", "Agent not found \u2014 cannot run agent rectification", { storyId: ctx.story.id });
28212
28278
  throw new Error("AUTOFIX_AGENT_NOT_FOUND");
28213
28279
  }
28214
- const modelTier = ctx.story.routing?.modelTier ?? ctx.config.autoMode.escalation.tierOrder[0]?.tier ?? "balanced";
28215
- const modelDef = resolveModelForAgent(ctx.config.models, ctx.routing.agent ?? ctx.config.autoMode.defaultAgent, modelTier, ctx.config.autoMode.defaultAgent);
28216
- const rectificationWorkdir = ctx.story.workdir ? join18(ctx.workdir, ctx.story.workdir) : ctx.workdir;
28280
+ const modelTier = ctx.story.routing?.modelTier ?? ctx.rootConfig.autoMode.escalation.tierOrder[0]?.tier ?? "balanced";
28281
+ const modelDef = resolveModelForAgent(ctx.rootConfig.models, ctx.routing.agent ?? ctx.rootConfig.autoMode.defaultAgent, modelTier, ctx.rootConfig.autoMode.defaultAgent);
28217
28282
  const result = await agent.run({
28218
28283
  prompt,
28219
- workdir: rectificationWorkdir,
28284
+ workdir: ctx.workdir,
28220
28285
  modelTier,
28221
28286
  modelDef,
28222
28287
  timeoutSeconds: ctx.config.execution.sessionTimeoutSeconds,
28223
28288
  dangerouslySkipPermissions: resolvePermissions(ctx.config, "rectification").skipPermissions,
28224
28289
  pipelineStage: "rectification",
28225
28290
  config: ctx.config,
28291
+ projectDir: ctx.projectDir,
28226
28292
  maxInteractionTurns: ctx.config.agent?.maxInteractionTurns,
28227
28293
  storyId: ctx.story.id,
28228
28294
  sessionRole: "implementer"
28229
28295
  });
28296
+ autofixCostAccum += result.estimatedCost ?? 0;
28230
28297
  if (ctx.reviewerSession && result.output) {
28231
- const maxClarifications = effectiveConfig.review?.dialogue?.maxClarificationsPerAttempt ?? 3;
28298
+ const maxClarifications = ctx.config.review?.dialogue?.maxClarificationsPerAttempt ?? 3;
28232
28299
  let clarifyCount = 0;
28233
28300
  const clarifyRegex = new RegExp(CLARIFY_REGEX.source, `${CLARIFY_REGEX.flags}g`);
28234
28301
  let match;
@@ -28311,6 +28378,7 @@ async function runAgentRectification(ctx, lintFixCmd, formatFixCmd, effectiveWor
28311
28378
  }
28312
28379
  throw error48;
28313
28380
  });
28381
+ return { succeeded, cost: autofixCostAccum };
28314
28382
  }
28315
28383
  var CLARIFY_REGEX, autofixStage, _autofixDeps;
28316
28384
  var init_autofix = __esm(() => {
@@ -28328,7 +28396,7 @@ var init_autofix = __esm(() => {
28328
28396
  return false;
28329
28397
  if (ctx.reviewResult.success)
28330
28398
  return false;
28331
- const autofixEnabled = (ctx.effectiveConfig ?? ctx.config).quality.autofix?.enabled ?? true;
28399
+ const autofixEnabled = ctx.config.quality.autofix?.enabled ?? true;
28332
28400
  return autofixEnabled;
28333
28401
  },
28334
28402
  skipReason(ctx) {
@@ -28342,16 +28410,14 @@ var init_autofix = __esm(() => {
28342
28410
  if (!reviewResult || reviewResult.success) {
28343
28411
  return { action: "continue" };
28344
28412
  }
28345
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
28346
- const lintFixCmd = effectiveConfig.quality.commands.lintFix ?? effectiveConfig.review.commands.lintFix;
28347
- const formatFixCmd = effectiveConfig.quality.commands.formatFix ?? effectiveConfig.review.commands.formatFix;
28348
- const effectiveWorkdir = ctx.story.workdir ? join18(ctx.workdir, ctx.story.workdir) : ctx.workdir;
28413
+ const lintFixCmd = ctx.config.quality.commands.lintFix ?? ctx.config.review.commands.lintFix;
28414
+ const formatFixCmd = ctx.config.quality.commands.formatFix ?? ctx.config.review.commands.formatFix;
28349
28415
  const failedCheckNames = new Set((reviewResult.checks ?? []).filter((c) => !c.success).map((c) => c.check));
28350
28416
  const hasLintFailure = failedCheckNames.has("lint");
28351
28417
  logger.info("autofix", "Starting autofix", {
28352
28418
  storyId: ctx.story.id,
28353
28419
  failedChecks: [...failedCheckNames],
28354
- workdir: effectiveWorkdir
28420
+ workdir: ctx.workdir
28355
28421
  });
28356
28422
  if (hasLintFailure && (lintFixCmd || formatFixCmd)) {
28357
28423
  if (lintFixCmd) {
@@ -28359,7 +28425,7 @@ var init_autofix = __esm(() => {
28359
28425
  const lintResult = await _autofixDeps.runQualityCommand({
28360
28426
  commandName: "lintFix",
28361
28427
  command: lintFixCmd,
28362
- workdir: effectiveWorkdir,
28428
+ workdir: ctx.workdir,
28363
28429
  storyId: ctx.story.id
28364
28430
  });
28365
28431
  logger.debug("autofix", `lintFix exit=${lintResult.exitCode}`, { storyId: ctx.story.id, command: lintFixCmd });
@@ -28375,7 +28441,7 @@ var init_autofix = __esm(() => {
28375
28441
  const fmtResult = await _autofixDeps.runQualityCommand({
28376
28442
  commandName: "formatFix",
28377
28443
  command: formatFixCmd,
28378
- workdir: effectiveWorkdir,
28444
+ workdir: ctx.workdir,
28379
28445
  storyId: ctx.story.id
28380
28446
  });
28381
28447
  logger.debug("autofix", `formatFix exit=${fmtResult.exitCode}`, {
@@ -28399,7 +28465,7 @@ var init_autofix = __esm(() => {
28399
28465
  storyId: ctx.story.id
28400
28466
  });
28401
28467
  }
28402
- const agentFixed = await _autofixDeps.runAgentRectification(ctx, lintFixCmd, formatFixCmd, effectiveWorkdir);
28468
+ const { succeeded: agentFixed, cost: agentCost } = await _autofixDeps.runAgentRectification(ctx, lintFixCmd, formatFixCmd, ctx.workdir);
28403
28469
  if (agentFixed) {
28404
28470
  if (ctx.reviewResult)
28405
28471
  ctx.reviewResult = { ...ctx.reviewResult, success: true };
@@ -28412,10 +28478,14 @@ var init_autofix = __esm(() => {
28412
28478
  });
28413
28479
  }
28414
28480
  logger.info("autofix", "Agent rectification succeeded \u2014 retrying review", { storyId: ctx.story.id });
28415
- return { action: "retry", fromStage: "review" };
28481
+ return { action: "retry", fromStage: "review", cost: agentCost };
28416
28482
  }
28417
28483
  logger.warn("autofix", "Autofix exhausted \u2014 escalating", { storyId: ctx.story.id });
28418
- return { action: "escalate", reason: "Autofix exhausted: review still failing after fix attempts" };
28484
+ return {
28485
+ action: "escalate",
28486
+ reason: "Autofix exhausted: review still failing after fix attempts",
28487
+ cost: agentCost
28488
+ };
28419
28489
  }
28420
28490
  };
28421
28491
  _autofixDeps = {
@@ -28485,10 +28555,10 @@ var init_semantic_verdict = __esm(() => {
28485
28555
 
28486
28556
  // src/execution/progress.ts
28487
28557
  import { appendFile as appendFile2, mkdir } from "fs/promises";
28488
- import { join as join19 } from "path";
28558
+ import { join as join17 } from "path";
28489
28559
  async function appendProgress(featureDir, storyId, status, message) {
28490
28560
  await mkdir(featureDir, { recursive: true });
28491
- const progressPath = join19(featureDir, "progress.txt");
28561
+ const progressPath = join17(featureDir, "progress.txt");
28492
28562
  const timestamp = new Date().toISOString();
28493
28563
  const entry = `[${timestamp}] ${storyId} \u2014 ${status.toUpperCase()} \u2014 ${message}
28494
28564
  `;
@@ -28594,7 +28664,7 @@ function estimateTokens(text) {
28594
28664
 
28595
28665
  // src/constitution/loader.ts
28596
28666
  import { existsSync as existsSync17 } from "fs";
28597
- import { join as join20 } from "path";
28667
+ import { join as join18 } from "path";
28598
28668
  function truncateToTokens(text, maxTokens) {
28599
28669
  const maxChars = maxTokens * 3;
28600
28670
  if (text.length <= maxChars) {
@@ -28616,7 +28686,7 @@ async function loadConstitution(projectDir, config2) {
28616
28686
  }
28617
28687
  let combinedContent = "";
28618
28688
  if (!config2.skipGlobal) {
28619
- const globalPath = join20(globalConfigDir(), config2.path);
28689
+ const globalPath = join18(globalConfigDir(), config2.path);
28620
28690
  if (existsSync17(globalPath)) {
28621
28691
  const validatedPath = validateFilePath(globalPath, globalConfigDir());
28622
28692
  const globalFile = Bun.file(validatedPath);
@@ -28626,7 +28696,7 @@ async function loadConstitution(projectDir, config2) {
28626
28696
  }
28627
28697
  }
28628
28698
  }
28629
- const projectPath = join20(projectDir, config2.path);
28699
+ const projectPath = join18(projectDir, config2.path);
28630
28700
  if (existsSync17(projectPath)) {
28631
28701
  const validatedPath = validateFilePath(projectPath, projectDir);
28632
28702
  const projectFile = Bun.file(validatedPath);
@@ -28674,7 +28744,7 @@ var init_constitution = __esm(() => {
28674
28744
  });
28675
28745
 
28676
28746
  // src/pipeline/stages/constitution.ts
28677
- import { dirname as dirname4 } from "path";
28747
+ import { dirname as dirname3 } from "path";
28678
28748
  var constitutionStage;
28679
28749
  var init_constitution2 = __esm(() => {
28680
28750
  init_constitution();
@@ -28684,7 +28754,7 @@ var init_constitution2 = __esm(() => {
28684
28754
  enabled: (ctx) => ctx.config.constitution.enabled,
28685
28755
  async execute(ctx) {
28686
28756
  const logger = getLogger();
28687
- const ngentDir = ctx.featureDir ? dirname4(dirname4(ctx.featureDir)) : `${ctx.workdir}/nax`;
28757
+ const ngentDir = ctx.featureDir ? dirname3(dirname3(ctx.featureDir)) : `${ctx.workdir}/nax`;
28688
28758
  const result = await loadConstitution(ngentDir, ctx.config.constitution);
28689
28759
  if (result) {
28690
28760
  ctx.constitution = result;
@@ -28849,6 +28919,27 @@ function createStoryContext(story, priority) {
28849
28919
  return { type: "story", storyId: story.id, content, priority, tokens: estimateTokens(content) };
28850
28920
  }
28851
28921
  function createDependencyContext(story, priority) {
28922
+ const content = isCompletedDependency(story) ? formatCompletedDependency(story) : formatFullDependency(story);
28923
+ return { type: "dependency", storyId: story.id, content, priority, tokens: estimateTokens(content) };
28924
+ }
28925
+ function isCompletedDependency(story) {
28926
+ return story.status === "passed" || story.status === "decomposed" || story.status === "skipped";
28927
+ }
28928
+ function formatCompletedDependency(story) {
28929
+ const header = `## ${story.id} (${story.status}): ${story.title}`;
28930
+ if (story.diffSummary) {
28931
+ return `${header}
28932
+
28933
+ **Changes made:**
28934
+ \`\`\`
28935
+ ${story.diffSummary}
28936
+ \`\`\``;
28937
+ }
28938
+ return `${header}
28939
+
28940
+ Status: ${story.status} (no diff summary available)`;
28941
+ }
28942
+ function formatFullDependency(story) {
28852
28943
  let content = formatStoryAsText(story);
28853
28944
  if (story.diffSummary) {
28854
28945
  content += `
@@ -28858,7 +28949,7 @@ function createDependencyContext(story, priority) {
28858
28949
  ${story.diffSummary}
28859
28950
  \`\`\``;
28860
28951
  }
28861
- return { type: "dependency", storyId: story.id, content, priority, tokens: estimateTokens(content) };
28952
+ return content;
28862
28953
  }
28863
28954
  function createErrorContext(errorMessage2, priority) {
28864
28955
  return { type: "error", content: errorMessage2, priority, tokens: estimateTokens(errorMessage2) };
@@ -29534,7 +29625,8 @@ ${pkgContent.trim()}`;
29534
29625
  if (built.elements.length === 0 && !packageSection) {
29535
29626
  return;
29536
29627
  }
29537
- const baseMarkdown = built.elements.length > 0 ? formatContextAsMarkdown(built) : "";
29628
+ const elementsForMarkdown = built.elements.filter((e) => e.type !== "story");
29629
+ const baseMarkdown = elementsForMarkdown.length > 0 ? formatContextAsMarkdown({ ...built, elements: elementsForMarkdown }) : "";
29538
29630
  const markdown = packageSection ? `${baseMarkdown}${packageSection}` : baseMarkdown;
29539
29631
  return { markdown, builtContext: built };
29540
29632
  } catch (error48) {
@@ -29545,6 +29637,10 @@ ${pkgContent.trim()}`;
29545
29637
  return;
29546
29638
  }
29547
29639
  }
29640
+ function buildStoryContextFullFromCtx(ctx) {
29641
+ const packageWorkdir = ctx.story.workdir ? ctx.workdir : undefined;
29642
+ return buildStoryContextFull(ctx.prd, ctx.story, ctx.config, packageWorkdir);
29643
+ }
29548
29644
  function getAllReadyStories(prd) {
29549
29645
  const storyIds = new Set(prd.userStories.map((s) => s.id));
29550
29646
  const completedIds = new Set(prd.userStories.filter((s) => s.passes || s.status === "skipped").map((s) => s.id));
@@ -29655,7 +29751,6 @@ var init_helpers = __esm(() => {
29655
29751
  });
29656
29752
 
29657
29753
  // src/pipeline/stages/context.ts
29658
- import { join as join21 } from "path";
29659
29754
  var contextStage;
29660
29755
  var init_context2 = __esm(() => {
29661
29756
  init_helpers();
@@ -29665,8 +29760,7 @@ var init_context2 = __esm(() => {
29665
29760
  enabled: () => true,
29666
29761
  async execute(ctx) {
29667
29762
  const logger = getLogger();
29668
- const packageWorkdir = ctx.story.workdir ? join21(ctx.workdir, ctx.story.workdir) : undefined;
29669
- const result = await buildStoryContextFull(ctx.prd, ctx.story, ctx.config, packageWorkdir);
29763
+ const result = await buildStoryContextFullFromCtx(ctx);
29670
29764
  if (result) {
29671
29765
  ctx.contextMarkdown = result.markdown;
29672
29766
  ctx.builtContext = result.builtContext;
@@ -29799,14 +29893,14 @@ var init_isolation = __esm(() => {
29799
29893
 
29800
29894
  // src/context/greenfield.ts
29801
29895
  import { readdir } from "fs/promises";
29802
- import { join as join22 } from "path";
29896
+ import { join as join19 } from "path";
29803
29897
  async function scanForTestFiles(dir, testPattern, isRootCall = true) {
29804
29898
  const results = [];
29805
29899
  const ignoreDirs = new Set(["node_modules", "dist", "build", ".next", ".git"]);
29806
29900
  try {
29807
29901
  const entries = await readdir(dir, { withFileTypes: true });
29808
29902
  for (const entry of entries) {
29809
- const fullPath = join22(dir, entry.name);
29903
+ const fullPath = join19(dir, entry.name);
29810
29904
  if (entry.isDirectory()) {
29811
29905
  if (ignoreDirs.has(entry.name))
29812
29906
  continue;
@@ -29872,10 +29966,10 @@ async function executeWithTimeout(command, timeoutSeconds, env2, options) {
29872
29966
  const timeoutMs = timeoutSeconds * 1000;
29873
29967
  let timedOut = false;
29874
29968
  const timer = { id: undefined };
29875
- const timeoutPromise = new Promise((resolve7) => {
29969
+ const timeoutPromise = new Promise((resolve8) => {
29876
29970
  timer.id = setTimeout(() => {
29877
29971
  timedOut = true;
29878
- resolve7();
29972
+ resolve8();
29879
29973
  }, timeoutMs);
29880
29974
  });
29881
29975
  const processPromise = proc.exited;
@@ -29884,7 +29978,12 @@ async function executeWithTimeout(command, timeoutSeconds, env2, options) {
29884
29978
  if (timedOut) {
29885
29979
  const pid = proc.pid;
29886
29980
  killProcessGroup(pid, "SIGTERM");
29887
- await Bun.sleep(gracePeriodMs);
29981
+ await Promise.race([
29982
+ proc.exited,
29983
+ new Promise((resolve8) => {
29984
+ setTimeout(resolve8, gracePeriodMs);
29985
+ })
29986
+ ]);
29888
29987
  killProcessGroup(pid, "SIGKILL");
29889
29988
  const [out, err] = await Promise.all([
29890
29989
  raceWithDeadline(stdoutPromise, drainTimeoutMs),
@@ -30146,13 +30245,13 @@ function parseTestOutput(output, exitCode) {
30146
30245
 
30147
30246
  // src/verification/runners.ts
30148
30247
  import { existsSync as existsSync18 } from "fs";
30149
- import { join as join23 } from "path";
30248
+ import { join as join20 } from "path";
30150
30249
  async function verifyAssets(workingDirectory, expectedFiles) {
30151
30250
  if (!expectedFiles || expectedFiles.length === 0)
30152
30251
  return { success: true, missingFiles: [] };
30153
30252
  const missingFiles = [];
30154
30253
  for (const file3 of expectedFiles) {
30155
- if (!existsSync18(join23(workingDirectory, file3)))
30254
+ if (!existsSync18(join20(workingDirectory, file3)))
30156
30255
  missingFiles.push(file3);
30157
30256
  }
30158
30257
  if (missingFiles.length > 0) {
@@ -30477,10 +30576,10 @@ var init_prompts = __esm(() => {
30477
30576
  });
30478
30577
 
30479
30578
  // src/tdd/rectification-gate.ts
30480
- async function runFullSuiteGate(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, featureName) {
30579
+ async function runFullSuiteGate(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, featureName, projectDir) {
30481
30580
  const rectificationEnabled = config2.execution.rectification?.enabled ?? false;
30482
30581
  if (!rectificationEnabled)
30483
- return false;
30582
+ return { passed: false, cost: 0 };
30484
30583
  const rectificationConfig = config2.execution.rectification;
30485
30584
  const testCmd = config2.quality?.commands?.test ?? "bun test";
30486
30585
  const fullSuiteTimeout = rectificationConfig.fullSuiteTimeoutSeconds;
@@ -30495,7 +30594,7 @@ async function runFullSuiteGate(story, config2, workdir, agent, implementerTier,
30495
30594
  if (!fullSuitePassed && fullSuiteResult.output) {
30496
30595
  const testSummary = _rectificationGateDeps.parseBunTestOutput(fullSuiteResult.output);
30497
30596
  if (testSummary.failed > 0) {
30498
- return await runRectificationLoop(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, testSummary, rectificationConfig, testCmd, fullSuiteTimeout, featureName);
30597
+ return await runRectificationLoop(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, testSummary, rectificationConfig, testCmd, fullSuiteTimeout, featureName, projectDir);
30499
30598
  }
30500
30599
  if (testSummary.passed > 0) {
30501
30600
  logger.info("tdd", "Full suite gate passed (non-zero exit, 0 failures, tests detected)", {
@@ -30503,7 +30602,7 @@ async function runFullSuiteGate(story, config2, workdir, agent, implementerTier,
30503
30602
  exitCode: fullSuiteResult.exitCode,
30504
30603
  passedTests: testSummary.passed
30505
30604
  });
30506
- return true;
30605
+ return { passed: true, cost: 0 };
30507
30606
  }
30508
30607
  logger.warn("tdd", "Full suite gate inconclusive \u2014 no test results parsed from output (possible crash/OOM)", {
30509
30608
  storyId: story.id,
@@ -30511,19 +30610,19 @@ async function runFullSuiteGate(story, config2, workdir, agent, implementerTier,
30511
30610
  outputLength: fullSuiteResult.output.length,
30512
30611
  outputTail: fullSuiteResult.output.slice(-200)
30513
30612
  });
30514
- return false;
30613
+ return { passed: false, cost: 0 };
30515
30614
  }
30516
30615
  if (fullSuitePassed) {
30517
30616
  logger.info("tdd", "Full suite gate passed", { storyId: story.id });
30518
- return true;
30617
+ return { passed: true, cost: 0 };
30519
30618
  }
30520
30619
  logger.warn("tdd", "Full suite gate execution failed (no output)", {
30521
30620
  storyId: story.id,
30522
30621
  exitCode: fullSuiteResult.exitCode
30523
30622
  });
30524
- return false;
30623
+ return { passed: false, cost: 0 };
30525
30624
  }
30526
- async function runRectificationLoop(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, testSummary, rectificationConfig, testCmd, fullSuiteTimeout, featureName) {
30625
+ async function runRectificationLoop(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, testSummary, rectificationConfig, testCmd, fullSuiteTimeout, featureName, projectDir) {
30527
30626
  const rectificationState = {
30528
30627
  attempt: 0,
30529
30628
  initialFailures: testSummary.failed,
@@ -30543,6 +30642,7 @@ async function runRectificationLoop(story, config2, workdir, agent, implementerT
30543
30642
  ...rectificationState,
30544
30643
  isolationPassed: true
30545
30644
  };
30645
+ let gateCostAccum = 0;
30546
30646
  const fixed = await runSharedRectificationLoop({
30547
30647
  stage: "tdd",
30548
30648
  storyId: story.id,
@@ -30574,6 +30674,7 @@ async function runRectificationLoop(story, config2, workdir, agent, implementerT
30574
30674
  dangerouslySkipPermissions: resolvePermissions(config2, "rectification").skipPermissions,
30575
30675
  pipelineStage: "rectification",
30576
30676
  config: config2,
30677
+ projectDir,
30577
30678
  maxInteractionTurns: config2.agent?.maxInteractionTurns,
30578
30679
  featureName,
30579
30680
  storyId: story.id,
@@ -30584,6 +30685,7 @@ async function runRectificationLoop(story, config2, workdir, agent, implementerT
30584
30685
  if (!rectifyResult.success && rectifyResult.pid) {
30585
30686
  await cleanupProcessTree(rectifyResult.pid);
30586
30687
  }
30688
+ gateCostAccum += rectifyResult.estimatedCost ?? 0;
30587
30689
  if (rectifyResult.success) {
30588
30690
  logger.info("tdd", "Rectification agent session complete", {
30589
30691
  storyId: story.id,
@@ -30644,7 +30746,7 @@ async function runRectificationLoop(story, config2, workdir, agent, implementerT
30644
30746
  }
30645
30747
  });
30646
30748
  if (fixed) {
30647
- return true;
30749
+ return { passed: true, cost: gateCostAccum };
30648
30750
  }
30649
30751
  const finalFullSuite = await _rectificationGateDeps.executeWithTimeout(testCmd, fullSuiteTimeout, undefined, {
30650
30752
  cwd: workdir
@@ -30656,10 +30758,10 @@ async function runRectificationLoop(story, config2, workdir, agent, implementerT
30656
30758
  attempts: rectificationState.attempt,
30657
30759
  remainingFailures: rectificationState.currentFailures
30658
30760
  });
30659
- return false;
30761
+ return { passed: false, cost: gateCostAccum };
30660
30762
  }
30661
30763
  logger.info("tdd", "Full suite gate passed", { storyId: story.id });
30662
- return true;
30764
+ return { passed: true, cost: gateCostAccum };
30663
30765
  }
30664
30766
  var _rectificationGateDeps;
30665
30767
  var init_rectification_gate = __esm(() => {
@@ -31063,36 +31165,10 @@ Set \`approved: false\` when ANY of these conditions are true:
31063
31165
  - Critical acceptance criteria are not met
31064
31166
  - Code quality is poor (security issues, severe bugs, etc.)
31065
31167
 
31066
- **Full JSON schema example** (fill in all fields with real values):
31168
+ **JSON schema** (fill in all fields with real values):
31067
31169
 
31068
31170
  \`\`\`json
31069
- {
31070
- "version": 1,
31071
- "approved": true,
31072
- "tests": {
31073
- "allPassing": true,
31074
- "passCount": 42,
31075
- "failCount": 0
31076
- },
31077
- "testModifications": {
31078
- "detected": false,
31079
- "files": [],
31080
- "legitimate": true,
31081
- "reasoning": "No test files were modified by the implementer"
31082
- },
31083
- "acceptanceCriteria": {
31084
- "allMet": true,
31085
- "criteria": [
31086
- { "criterion": "Example criterion", "met": true }
31087
- ]
31088
- },
31089
- "quality": {
31090
- "rating": "good",
31091
- "issues": []
31092
- },
31093
- "fixes": [],
31094
- "reasoning": "All tests pass, implementation is clean, all acceptance criteria are met."
31095
- }
31171
+ {"version":1,"approved":true,"tests":{"allPassing":true,"passCount":42,"failCount":0},"testModifications":{"detected":false,"files":[],"legitimate":true,"reasoning":"..."},"acceptanceCriteria":{"allMet":true,"criteria":[{"criterion":"...","met":true}]},"quality":{"rating":"good","issues":[]},"fixes":[],"reasoning":"..."}
31096
31172
  \`\`\`
31097
31173
 
31098
31174
  **Field notes:**
@@ -31109,13 +31185,13 @@ var exports_loader = {};
31109
31185
  __export(exports_loader, {
31110
31186
  loadOverride: () => loadOverride
31111
31187
  });
31112
- import { join as join24 } from "path";
31188
+ import { join as join21 } from "path";
31113
31189
  async function loadOverride(role, workdir, config2) {
31114
31190
  const overridePath = config2.prompts?.overrides?.[role];
31115
31191
  if (!overridePath) {
31116
31192
  return null;
31117
31193
  }
31118
- const absolutePath = join24(workdir, overridePath);
31194
+ const absolutePath = join21(workdir, overridePath);
31119
31195
  const file3 = Bun.file(absolutePath);
31120
31196
  if (!await file3.exists()) {
31121
31197
  return null;
@@ -31325,7 +31401,7 @@ async function rollbackToRef(workdir, ref) {
31325
31401
  }
31326
31402
  logger.info("tdd", "Successfully rolled back git changes", { ref });
31327
31403
  }
31328
- async function runTddSession(role, agent, story, config2, workdir, modelTier, beforeRef, contextMarkdown, lite = false, skipIsolation = false, constitution, featureName, interactionBridge) {
31404
+ async function runTddSession(role, agent, story, config2, workdir, modelTier, beforeRef, contextMarkdown, lite = false, skipIsolation = false, constitution, featureName, interactionBridge, projectDir) {
31329
31405
  const startTime = Date.now();
31330
31406
  let prompt;
31331
31407
  if (_sessionRunnerDeps.buildPrompt) {
@@ -31355,6 +31431,7 @@ async function runTddSession(role, agent, story, config2, workdir, modelTier, be
31355
31431
  dangerouslySkipPermissions: resolvePermissions(config2, "run").skipPermissions,
31356
31432
  pipelineStage: "run",
31357
31433
  config: config2,
31434
+ projectDir,
31358
31435
  maxInteractionTurns: config2.agent?.maxInteractionTurns,
31359
31436
  featureName,
31360
31437
  storyId: story.id,
@@ -31726,7 +31803,8 @@ async function runThreeSessionTdd(options) {
31726
31803
  dryRun = false,
31727
31804
  lite = false,
31728
31805
  _recursionDepth = 0,
31729
- interactionChain
31806
+ interactionChain,
31807
+ projectDir
31730
31808
  } = options;
31731
31809
  const logger = getLogger();
31732
31810
  const MAX_RECURSION_DEPTH = 2;
@@ -31785,7 +31863,7 @@ async function runThreeSessionTdd(options) {
31785
31863
  let session1;
31786
31864
  if (!isRetry) {
31787
31865
  const testWriterTier = config2.tdd.sessionTiers?.testWriter ?? "balanced";
31788
- session1 = await runTddSession("test-writer", agent, story, config2, workdir, testWriterTier, session1Ref, contextMarkdown, lite, lite, constitution, featureName, buildInteractionBridge(interactionChain, { featureName, storyId: story.id, stage: "execution" }));
31866
+ session1 = await runTddSession("test-writer", agent, story, config2, workdir, testWriterTier, session1Ref, contextMarkdown, lite, lite, constitution, featureName, buildInteractionBridge(interactionChain, { featureName, storyId: story.id, stage: "execution" }), projectDir);
31789
31867
  sessions.push(session1);
31790
31868
  }
31791
31869
  if (session1 && !session1.success) {
@@ -31847,7 +31925,7 @@ async function runThreeSessionTdd(options) {
31847
31925
  });
31848
31926
  const session2Ref = await captureGitRef(workdir) ?? "HEAD";
31849
31927
  const implementerTier = config2.tdd.sessionTiers?.implementer ?? modelTier;
31850
- const session2 = await runTddSession("implementer", agent, story, config2, workdir, implementerTier, session2Ref, contextMarkdown, lite, lite, constitution, featureName, buildInteractionBridge(interactionChain, { featureName, storyId: story.id, stage: "execution" }));
31928
+ const session2 = await runTddSession("implementer", agent, story, config2, workdir, implementerTier, session2Ref, contextMarkdown, lite, lite, constitution, featureName, buildInteractionBridge(interactionChain, { featureName, storyId: story.id, stage: "execution" }), projectDir);
31851
31929
  sessions.push(session2);
31852
31930
  if (!session2.success) {
31853
31931
  needsHumanReview = true;
@@ -31863,10 +31941,10 @@ async function runThreeSessionTdd(options) {
31863
31941
  lite
31864
31942
  };
31865
31943
  }
31866
- const fullSuiteGatePassed = await runFullSuiteGate(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, featureName);
31944
+ const { passed: fullSuiteGatePassed, cost: fullSuiteGateCost } = await runFullSuiteGate(story, config2, workdir, agent, implementerTier, contextMarkdown, lite, logger, featureName, projectDir);
31867
31945
  const session3Ref = await captureGitRef(workdir) ?? "HEAD";
31868
31946
  const verifierTier = config2.tdd.sessionTiers?.verifier ?? "fast";
31869
- const session3 = await runTddSession("verifier", agent, story, config2, workdir, verifierTier, session3Ref, undefined, false, false, constitution, featureName);
31947
+ const session3 = await runTddSession("verifier", agent, story, config2, workdir, verifierTier, session3Ref, undefined, false, false, constitution, featureName, undefined, projectDir);
31870
31948
  sessions.push(session3);
31871
31949
  const verdict = await readVerdict(workdir);
31872
31950
  await cleanupVerdict(workdir);
@@ -31926,7 +32004,7 @@ async function runThreeSessionTdd(options) {
31926
32004
  needsHumanReview = false;
31927
32005
  }
31928
32006
  }
31929
- const totalCost = sessions.reduce((sum, s) => sum + s.estimatedCost, 0);
32007
+ const totalCost = sessions.reduce((sum, s) => sum + s.estimatedCost, 0) + fullSuiteGateCost;
31930
32008
  logger.info("tdd", allSuccessful ? "[OK] Three-session TDD complete" : "[WARN] Three-session TDD needs review", {
31931
32009
  storyId: story.id,
31932
32010
  success: allSuccessful,
@@ -31962,6 +32040,22 @@ async function runThreeSessionTdd(options) {
31962
32040
  fullSuiteGatePassed
31963
32041
  };
31964
32042
  }
32043
+ function runThreeSessionTddFromCtx(ctx, opts) {
32044
+ return runThreeSessionTdd({
32045
+ agent: opts.agent,
32046
+ story: ctx.story,
32047
+ config: ctx.config,
32048
+ workdir: ctx.workdir,
32049
+ modelTier: ctx.routing.modelTier,
32050
+ featureName: ctx.prd.feature,
32051
+ contextMarkdown: ctx.contextMarkdown,
32052
+ constitution: ctx.constitution?.content,
32053
+ dryRun: opts.dryRun ?? false,
32054
+ lite: opts.lite ?? false,
32055
+ interactionChain: ctx.interaction,
32056
+ projectDir: ctx.projectDir
32057
+ });
32058
+ }
31965
32059
  var init_orchestrator2 = __esm(() => {
31966
32060
  init_config();
31967
32061
  init_greenfield();
@@ -31984,17 +32078,6 @@ var init_tdd = __esm(() => {
31984
32078
  });
31985
32079
 
31986
32080
  // src/pipeline/stages/execution.ts
31987
- import { existsSync as existsSync19 } from "fs";
31988
- import { join as join25 } from "path";
31989
- function resolveStoryWorkdir(repoRoot, storyWorkdir) {
31990
- if (!storyWorkdir)
31991
- return repoRoot;
31992
- const resolved = join25(repoRoot, storyWorkdir);
31993
- if (!existsSync19(resolved)) {
31994
- throw new Error(`[execution] story.workdir "${storyWorkdir}" does not exist at "${resolved}"`);
31995
- }
31996
- return resolved;
31997
- }
31998
32081
  function isAmbiguousOutput(output) {
31999
32082
  if (!output)
32000
32083
  return false;
@@ -32041,11 +32124,11 @@ var init_execution2 = __esm(() => {
32041
32124
  enabled: () => true,
32042
32125
  async execute(ctx) {
32043
32126
  const logger = getLogger();
32044
- const agent = (ctx.agentGetFn ?? _executionDeps.getAgent)(ctx.config.autoMode.defaultAgent);
32127
+ const agent = (ctx.agentGetFn ?? _executionDeps.getAgent)(ctx.rootConfig.autoMode.defaultAgent);
32045
32128
  if (!agent) {
32046
32129
  return {
32047
32130
  action: "fail",
32048
- reason: `Agent "${ctx.config.autoMode.defaultAgent}" not found`
32131
+ reason: `Agent "${ctx.rootConfig.autoMode.defaultAgent}" not found`
32049
32132
  };
32050
32133
  }
32051
32134
  const isTddStrategy = ctx.routing.testStrategy === "three-session-tdd" || ctx.routing.testStrategy === "three-session-tdd-lite";
@@ -32055,20 +32138,7 @@ var init_execution2 = __esm(() => {
32055
32138
  storyId: ctx.story.id,
32056
32139
  lite: isLiteMode
32057
32140
  });
32058
- const effectiveWorkdir = _executionDeps.resolveStoryWorkdir(ctx.workdir, ctx.story.workdir);
32059
- const tddResult = await runThreeSessionTdd({
32060
- agent,
32061
- story: ctx.story,
32062
- config: ctx.config,
32063
- workdir: effectiveWorkdir,
32064
- modelTier: ctx.routing.modelTier,
32065
- featureName: ctx.prd.feature,
32066
- contextMarkdown: ctx.contextMarkdown,
32067
- constitution: ctx.constitution?.content,
32068
- dryRun: false,
32069
- lite: isLiteMode,
32070
- interactionChain: ctx.interaction
32071
- });
32141
+ const tddResult = await runThreeSessionTddFromCtx(ctx, { agent, dryRun: false, lite: isLiteMode });
32072
32142
  ctx.agentResult = {
32073
32143
  success: tddResult.success,
32074
32144
  estimatedCost: tddResult.totalCost,
@@ -32131,17 +32201,17 @@ Category: ${tddResult.failureCategory ?? "unknown"}`,
32131
32201
  supportedTiers: agent.capabilities.supportedTiers
32132
32202
  });
32133
32203
  }
32134
- const storyWorkdir = _executionDeps.resolveStoryWorkdir(ctx.workdir, ctx.story.workdir);
32135
32204
  const keepSessionOpen = !!(ctx.config.review?.enabled === true || ctx.config.execution.rectification?.enabled === true);
32136
32205
  const result = await agent.run({
32137
32206
  prompt: ctx.prompt,
32138
- workdir: storyWorkdir,
32207
+ workdir: ctx.workdir,
32139
32208
  modelTier: ctx.routing.modelTier,
32140
- modelDef: resolveModelForAgent(ctx.config.models, ctx.routing.agent ?? ctx.config.autoMode.defaultAgent, ctx.routing.modelTier, ctx.config.autoMode.defaultAgent),
32209
+ modelDef: resolveModelForAgent(ctx.rootConfig.models, ctx.routing.agent ?? ctx.rootConfig.autoMode.defaultAgent, ctx.routing.modelTier, ctx.rootConfig.autoMode.defaultAgent),
32141
32210
  timeoutSeconds: ctx.config.execution.sessionTimeoutSeconds,
32142
32211
  dangerouslySkipPermissions: resolvePermissions(ctx.config, "run").skipPermissions,
32143
32212
  pipelineStage: "run",
32144
32213
  config: ctx.config,
32214
+ projectDir: ctx.projectDir,
32145
32215
  maxInteractionTurns: ctx.config.agent?.maxInteractionTurns,
32146
32216
  pidRegistry: ctx.pidRegistry,
32147
32217
  featureName: ctx.prd.feature,
@@ -32155,7 +32225,7 @@ Category: ${tddResult.failureCategory ?? "unknown"}`,
32155
32225
  })
32156
32226
  });
32157
32227
  ctx.agentResult = result;
32158
- await autoCommitIfDirty(storyWorkdir, "execution", "single-session", ctx.story.id);
32228
+ await autoCommitIfDirty(ctx.workdir, "execution", "single-session", ctx.story.id);
32159
32229
  const combinedOutput = (result.output ?? "") + (result.stderr ?? "");
32160
32230
  if (_executionDeps.detectMergeConflict(combinedOutput) && ctx.interaction && isTriggerEnabled("merge-conflict", ctx.config)) {
32161
32231
  const shouldProceed = await _executionDeps.checkMergeConflict({ featureName: ctx.prd.feature, storyId: ctx.story.id }, ctx.config, ctx.interaction);
@@ -32196,8 +32266,7 @@ Category: ${tddResult.failureCategory ?? "unknown"}`,
32196
32266
  detectMergeConflict,
32197
32267
  checkMergeConflict,
32198
32268
  isAmbiguousOutput,
32199
- checkStoryAmbiguity,
32200
- resolveStoryWorkdir
32269
+ checkStoryAmbiguity
32201
32270
  };
32202
32271
  });
32203
32272
 
@@ -32474,17 +32543,16 @@ var init_prompt = __esm(() => {
32474
32543
  async execute(ctx) {
32475
32544
  const logger = getLogger();
32476
32545
  const isBatch = ctx.stories.length > 1;
32477
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
32478
32546
  const acceptanceEntries = await _loadAcceptanceEntries(ctx, logger);
32479
32547
  let prompt;
32480
32548
  if (isBatch) {
32481
- const builder = PromptBuilder.for("batch").withLoader(ctx.workdir, ctx.config).stories(ctx.stories).context(ctx.contextMarkdown).constitution(ctx.constitution?.content).testCommand(effectiveConfig.quality?.commands?.test).hermeticConfig(effectiveConfig.quality?.testing);
32549
+ const builder = PromptBuilder.for("batch").withLoader(ctx.workdir, ctx.config).stories(ctx.stories).context(ctx.contextMarkdown).constitution(ctx.constitution?.content).testCommand(ctx.config.quality?.commands?.test).hermeticConfig(ctx.config.quality?.testing);
32482
32550
  if (acceptanceEntries.length > 0)
32483
32551
  builder.acceptanceContext(acceptanceEntries);
32484
32552
  prompt = await builder.build();
32485
32553
  } else {
32486
32554
  const role = ctx.routing.testStrategy === "no-test" ? "no-test" : "tdd-simple";
32487
- const builder = PromptBuilder.for(role).withLoader(ctx.workdir, ctx.config).story(ctx.story).context(ctx.contextMarkdown).constitution(ctx.constitution?.content).testCommand(effectiveConfig.quality?.commands?.test).hermeticConfig(effectiveConfig.quality?.testing).noTestJustification(ctx.story.routing?.noTestJustification);
32555
+ const builder = PromptBuilder.for(role).withLoader(ctx.workdir, ctx.config).story(ctx.story).context(ctx.contextMarkdown).constitution(ctx.constitution?.content).testCommand(ctx.config.quality?.commands?.test).hermeticConfig(ctx.config.quality?.testing).noTestJustification(ctx.story.routing?.noTestJustification);
32488
32556
  if (acceptanceEntries.length > 0)
32489
32557
  builder.acceptanceContext(acceptanceEntries);
32490
32558
  prompt = await builder.build();
@@ -32717,7 +32785,18 @@ async function _defaultRunDebate(storyId, stageConfig, prompt, config2) {
32717
32785
  return { output, totalCostUsd };
32718
32786
  }
32719
32787
  async function runRectificationLoop2(opts) {
32720
- const { config: config2, workdir, story, testCommand, timeoutSeconds, testOutput, promptPrefix, featureName, agentGetFn } = opts;
32788
+ const {
32789
+ config: config2,
32790
+ workdir,
32791
+ story,
32792
+ testCommand,
32793
+ timeoutSeconds,
32794
+ testOutput,
32795
+ promptPrefix,
32796
+ featureName,
32797
+ agentGetFn,
32798
+ projectDir
32799
+ } = opts;
32721
32800
  const logger = getSafeLogger();
32722
32801
  const rectificationConfig = config2.execution.rectification;
32723
32802
  const testSummary = parseBunTestOutput(testOutput);
@@ -32728,7 +32807,8 @@ async function runRectificationLoop2(opts) {
32728
32807
  currentFailures: testSummary.failed,
32729
32808
  lastExitCode: 1
32730
32809
  };
32731
- return runSharedRectificationLoop({
32810
+ let costAccum = 0;
32811
+ const succeeded = await runSharedRectificationLoop({
32732
32812
  stage: "rectification",
32733
32813
  storyId: story.id,
32734
32814
  maxAttempts: rectificationConfig.maxRetries,
@@ -32809,11 +32889,13 @@ ${rectificationPrompt}`;
32809
32889
  dangerouslySkipPermissions: resolvePermissions(config2, "rectification").skipPermissions,
32810
32890
  pipelineStage: "rectification",
32811
32891
  config: config2,
32892
+ projectDir,
32812
32893
  maxInteractionTurns: config2.agent?.maxInteractionTurns,
32813
32894
  featureName,
32814
32895
  storyId: story.id,
32815
32896
  sessionRole: "implementer"
32816
32897
  });
32898
+ costAccum += agentResult.estimatedCost ?? 0;
32817
32899
  if (agentResult.success) {
32818
32900
  logger?.info("rectification", `Agent ${label} session complete`, {
32819
32901
  storyId: story.id,
@@ -32933,11 +33015,13 @@ ${escalationPrompt}`;
32933
33015
  dangerouslySkipPermissions: resolvePermissions(config2, "rectification").skipPermissions,
32934
33016
  pipelineStage: "rectification",
32935
33017
  config: config2,
33018
+ projectDir,
32936
33019
  maxInteractionTurns: config2.agent?.maxInteractionTurns,
32937
33020
  featureName,
32938
33021
  storyId: story.id,
32939
33022
  sessionRole: "implementer"
32940
33023
  });
33024
+ costAccum += escalationRunResult.estimatedCost ?? 0;
32941
33025
  logger?.info("rectification", "escalated rectification attempt cost", {
32942
33026
  storyId: story.id,
32943
33027
  escalatedTier,
@@ -32974,6 +33058,21 @@ ${escalationPrompt}`;
32974
33058
  }
32975
33059
  throw error48;
32976
33060
  });
33061
+ return { succeeded, cost: costAccum };
33062
+ }
33063
+ function runRectificationLoopFromCtx(ctx, opts) {
33064
+ return runRectificationLoop2({
33065
+ config: ctx.config,
33066
+ workdir: ctx.workdir,
33067
+ story: ctx.story,
33068
+ testCommand: opts.testCommand,
33069
+ timeoutSeconds: ctx.config.execution.verificationTimeoutSeconds,
33070
+ testOutput: opts.testOutput,
33071
+ promptPrefix: opts.promptPrefix,
33072
+ featureName: ctx.prd.feature,
33073
+ agentGetFn: ctx.agentGetFn,
33074
+ projectDir: ctx.projectDir
33075
+ });
32977
33076
  }
32978
33077
  var _rectificationDeps;
32979
33078
  var init_rectification_loop = __esm(() => {
@@ -33035,36 +33134,28 @@ var init_rectify = __esm(() => {
33035
33134
  attempt: rectifyAttempt,
33036
33135
  testOutput
33037
33136
  });
33038
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
33039
- const testCommand = effectiveConfig.review?.commands?.test ?? effectiveConfig.quality.commands.test ?? "bun test";
33040
- const fixed = await _rectifyDeps.runRectificationLoop({
33041
- config: ctx.config,
33042
- workdir: ctx.workdir,
33043
- story: ctx.story,
33044
- testCommand,
33045
- timeoutSeconds: effectiveConfig.execution.verificationTimeoutSeconds,
33046
- testOutput,
33047
- agentGetFn: ctx.agentGetFn
33048
- });
33137
+ const testCommand = ctx.config.review?.commands?.test ?? ctx.config.quality.commands.test ?? "bun test";
33138
+ const { succeeded, cost } = await _rectifyDeps.runRectificationLoop(ctx, { testCommand, testOutput });
33049
33139
  pipelineEventBus.emit({
33050
33140
  type: "rectify:completed",
33051
33141
  storyId: ctx.story.id,
33052
33142
  attempt: rectifyAttempt,
33053
- fixed
33143
+ fixed: succeeded
33054
33144
  });
33055
- if (fixed) {
33145
+ if (succeeded) {
33056
33146
  logger.info("rectify", "Rectification succeeded \u2014 retrying verify", { storyId: ctx.story.id });
33057
33147
  ctx.verifyResult = undefined;
33058
- return { action: "retry", fromStage: "verify" };
33148
+ return { action: "retry", fromStage: "verify", cost };
33059
33149
  }
33060
33150
  logger.warn("rectify", "Rectification exhausted \u2014 escalating", { storyId: ctx.story.id });
33061
33151
  return {
33062
33152
  action: "escalate",
33063
- reason: `Rectification exhausted after ${maxRetries} attempts (${verifyResult.failCount} test failures)`
33153
+ reason: `Rectification exhausted after ${maxRetries} attempts (${verifyResult.failCount} test failures)`,
33154
+ cost
33064
33155
  };
33065
33156
  }
33066
33157
  };
33067
- _rectifyDeps = { runRectificationLoop: runRectificationLoop2 };
33158
+ _rectifyDeps = { runRectificationLoop: runRectificationLoopFromCtx };
33068
33159
  });
33069
33160
 
33070
33161
  // src/verification/orchestrator-types.ts
@@ -33171,16 +33262,16 @@ class AcceptanceStrategy {
33171
33262
  }, timeoutMs);
33172
33263
  const exitCode = await Promise.race([
33173
33264
  proc.exited,
33174
- new Promise((resolve7) => setTimeout(() => resolve7(124), timeoutMs + 6000))
33265
+ new Promise((resolve8) => setTimeout(() => resolve8(124), timeoutMs + 6000))
33175
33266
  ]);
33176
33267
  clearTimeout(timeoutId);
33177
33268
  const stdout = await Promise.race([
33178
33269
  new Response(proc.stdout).text(),
33179
- new Promise((resolve7) => setTimeout(() => resolve7(""), 3000))
33270
+ new Promise((resolve8) => setTimeout(() => resolve8(""), 3000))
33180
33271
  ]);
33181
33272
  const stderr = await Promise.race([
33182
33273
  new Response(proc.stderr).text(),
33183
- new Promise((resolve7) => setTimeout(() => resolve7(""), 3000))
33274
+ new Promise((resolve8) => setTimeout(() => resolve8(""), 3000))
33184
33275
  ]);
33185
33276
  const durationMs = Date.now() - start;
33186
33277
  if (timedOut || exitCode === 124) {
@@ -33590,34 +33681,31 @@ var init_regression2 = __esm(() => {
33590
33681
  regressionStage = {
33591
33682
  name: "regression",
33592
33683
  enabled(ctx) {
33593
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
33594
- const mode = effectiveConfig.execution.regressionGate?.mode ?? "deferred";
33684
+ const mode = ctx.config.execution.regressionGate?.mode ?? "deferred";
33595
33685
  if (mode !== "per-story")
33596
33686
  return false;
33597
33687
  if (ctx.verifyResult && !ctx.verifyResult.success)
33598
33688
  return false;
33599
- const gateEnabled = effectiveConfig.execution.regressionGate?.enabled ?? true;
33689
+ const gateEnabled = ctx.config.execution.regressionGate?.enabled ?? true;
33600
33690
  return gateEnabled;
33601
33691
  },
33602
33692
  skipReason(ctx) {
33603
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
33604
- const mode = effectiveConfig.execution.regressionGate?.mode ?? "deferred";
33693
+ const mode = ctx.config.execution.regressionGate?.mode ?? "deferred";
33605
33694
  if (mode !== "per-story")
33606
33695
  return `not needed (regression mode is '${mode}', not 'per-story')`;
33607
33696
  return "disabled (regression gate not enabled in config)";
33608
33697
  },
33609
33698
  async execute(ctx) {
33610
33699
  const logger = getLogger();
33611
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
33612
- const testCommand = effectiveConfig.review?.commands?.test ?? effectiveConfig.quality.commands.test ?? "bun test";
33613
- const timeoutSeconds = effectiveConfig.execution.regressionGate?.timeoutSeconds ?? 120;
33700
+ const testCommand = ctx.config.review?.commands?.test ?? ctx.config.quality.commands.test ?? "bun test";
33701
+ const timeoutSeconds = ctx.config.execution.regressionGate?.timeoutSeconds ?? 120;
33614
33702
  logger.info("regression", "Running full-suite regression gate", { storyId: ctx.story.id });
33615
33703
  const verifyCtx = {
33616
33704
  workdir: ctx.workdir,
33617
33705
  testCommand,
33618
33706
  timeoutSeconds,
33619
33707
  storyId: ctx.story.id,
33620
- acceptOnTimeout: effectiveConfig.execution.regressionGate?.acceptOnTimeout ?? true,
33708
+ acceptOnTimeout: ctx.config.execution.regressionGate?.acceptOnTimeout ?? true,
33621
33709
  config: ctx.config
33622
33710
  };
33623
33711
  const result = await _regressionStageDeps.verifyRegression(verifyCtx);
@@ -34224,7 +34312,6 @@ var init_routing = __esm(() => {
34224
34312
  });
34225
34313
 
34226
34314
  // src/pipeline/stages/routing.ts
34227
- import { join as join26 } from "path";
34228
34315
  var routingStage, _routingDeps;
34229
34316
  var init_routing2 = __esm(() => {
34230
34317
  init_greenfield();
@@ -34237,13 +34324,12 @@ var init_routing2 = __esm(() => {
34237
34324
  enabled: () => true,
34238
34325
  async execute(ctx) {
34239
34326
  const logger = getLogger();
34240
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
34241
- const agentName = effectiveConfig.execution?.agent ?? "claude";
34327
+ const agentName = ctx.config.execution?.agent ?? "claude";
34242
34328
  const adapter = ctx.agentGetFn ? ctx.agentGetFn(agentName) : undefined;
34243
34329
  if (ctx.story.id === ctx.stories[0]?.id) {
34244
34330
  _routingDeps.clearCache();
34245
34331
  }
34246
- const decision = await _routingDeps.resolveRouting(ctx.story, effectiveConfig, ctx.plugins, adapter);
34332
+ const decision = await _routingDeps.resolveRouting(ctx.story, ctx.config, ctx.plugins, adapter);
34247
34333
  const TIER_RANK = { fast: 0, balanced: 1, powerful: 2 };
34248
34334
  const derivedTier = decision.modelTier;
34249
34335
  const previousTier = ctx.story.routing?.modelTier;
@@ -34261,9 +34347,9 @@ var init_routing2 = __esm(() => {
34261
34347
  if (ctx.prdPath) {
34262
34348
  await _routingDeps.savePRD(ctx.prd, ctx.prdPath);
34263
34349
  }
34264
- const greenfieldDetectionEnabled = effectiveConfig.tdd.greenfieldDetection ?? true;
34350
+ const greenfieldDetectionEnabled = ctx.config.tdd.greenfieldDetection ?? true;
34265
34351
  if (greenfieldDetectionEnabled && routing.testStrategy.startsWith("three-session-tdd")) {
34266
- const greenfieldScanDir = ctx.story.workdir ? join26(ctx.workdir, ctx.story.workdir) : ctx.workdir;
34352
+ const greenfieldScanDir = ctx.workdir;
34267
34353
  const isGreenfield = await _routingDeps.isGreenfieldStory(ctx.story, greenfieldScanDir);
34268
34354
  if (isGreenfield) {
34269
34355
  logger.info("routing", "Greenfield detected \u2014 forcing test-after strategy", {
@@ -34314,7 +34400,7 @@ var init_crash_detector = __esm(() => {
34314
34400
  });
34315
34401
 
34316
34402
  // src/pipeline/stages/verify.ts
34317
- import { join as join27 } from "path";
34403
+ import { join as join22 } from "path";
34318
34404
  function coerceSmartTestRunner(val) {
34319
34405
  if (val === undefined || val === true)
34320
34406
  return DEFAULT_SMART_RUNNER_CONFIG2;
@@ -34330,7 +34416,7 @@ function buildScopedCommand2(testFiles, baseCommand, testScopedTemplate) {
34330
34416
  }
34331
34417
  async function readPackageName(dir) {
34332
34418
  try {
34333
- const content = await Bun.file(join27(dir, "package.json")).json();
34419
+ const content = await Bun.file(join22(dir, "package.json")).json();
34334
34420
  return typeof content.name === "string" ? content.name : null;
34335
34421
  } catch {
34336
34422
  return null;
@@ -34363,26 +34449,24 @@ var init_verify = __esm(() => {
34363
34449
  skipReason: () => "not needed (full-suite gate already passed)",
34364
34450
  async execute(ctx) {
34365
34451
  const logger = getLogger();
34366
- const effectiveConfig = ctx.effectiveConfig ?? ctx.config;
34367
- if (!effectiveConfig.quality.requireTests) {
34452
+ if (!ctx.config.quality.requireTests) {
34368
34453
  logger.debug("verify", "Skipping verification (quality.requireTests = false)", { storyId: ctx.story.id });
34369
34454
  return { action: "continue" };
34370
34455
  }
34371
- const testCommand = effectiveConfig.review?.commands?.test ?? effectiveConfig.quality.commands.test;
34372
- const testScopedTemplate = effectiveConfig.quality.commands.testScoped;
34456
+ const testCommand = ctx.config.review?.commands?.test ?? ctx.config.quality.commands.test;
34457
+ const testScopedTemplate = ctx.config.quality.commands.testScoped;
34373
34458
  if (!testCommand) {
34374
34459
  logger.debug("verify", "Skipping verification (no test command configured)", { storyId: ctx.story.id });
34375
34460
  return { action: "continue" };
34376
34461
  }
34377
34462
  logger.info("verify", "Running verification", { storyId: ctx.story.id });
34378
- const effectiveWorkdir = ctx.story.workdir ? join27(ctx.workdir, ctx.story.workdir) : ctx.workdir;
34379
34463
  let effectiveCommand = testCommand;
34380
34464
  let isFullSuite = true;
34381
- const smartRunnerConfig = coerceSmartTestRunner(effectiveConfig.execution.smartTestRunner);
34382
- const regressionMode = effectiveConfig.execution.regressionGate?.mode ?? "deferred";
34465
+ const smartRunnerConfig = coerceSmartTestRunner(ctx.config.execution.smartTestRunner);
34466
+ const regressionMode = ctx.config.execution.regressionGate?.mode ?? "deferred";
34383
34467
  let resolvedTestScopedTemplate = testScopedTemplate;
34384
34468
  if (testScopedTemplate && ctx.story.workdir) {
34385
- const resolved = await resolvePackageTemplate(testScopedTemplate, effectiveWorkdir);
34469
+ const resolved = await resolvePackageTemplate(testScopedTemplate, ctx.workdir);
34386
34470
  resolvedTestScopedTemplate = resolved ?? undefined;
34387
34471
  }
34388
34472
  const isMonorepoOrchestrator = isMonorepoOrchestratorCommand(testCommand);
@@ -34401,8 +34485,8 @@ var init_verify = __esm(() => {
34401
34485
  });
34402
34486
  }
34403
34487
  } else if (smartRunnerConfig.enabled) {
34404
- const sourceFiles = await _smartRunnerDeps.getChangedSourceFiles(effectiveWorkdir, ctx.storyGitRef, ctx.story.workdir);
34405
- const pass1Files = await _smartRunnerDeps.mapSourceToTests(sourceFiles, effectiveWorkdir);
34488
+ const sourceFiles = await _smartRunnerDeps.getChangedSourceFiles(ctx.workdir, ctx.storyGitRef, ctx.story.workdir);
34489
+ const pass1Files = await _smartRunnerDeps.mapSourceToTests(sourceFiles, ctx.workdir);
34406
34490
  if (pass1Files.length > 0) {
34407
34491
  logger.info("verify", `[smart-runner] Pass 1: path convention matched ${pass1Files.length} test files`, {
34408
34492
  storyId: ctx.story.id
@@ -34410,7 +34494,7 @@ var init_verify = __esm(() => {
34410
34494
  effectiveCommand = buildScopedCommand2(pass1Files, testCommand, resolvedTestScopedTemplate);
34411
34495
  isFullSuite = false;
34412
34496
  } else if (smartRunnerConfig.fallback === "import-grep") {
34413
- const pass2Files = await _smartRunnerDeps.importGrepFallback(sourceFiles, effectiveWorkdir, smartRunnerConfig.testFilePatterns);
34497
+ const pass2Files = await _smartRunnerDeps.importGrepFallback(sourceFiles, ctx.workdir, smartRunnerConfig.testFilePatterns);
34414
34498
  if (pass2Files.length > 0) {
34415
34499
  logger.info("verify", `[smart-runner] Pass 2: import-grep matched ${pass2Files.length} test files`, {
34416
34500
  storyId: ctx.story.id
@@ -34436,10 +34520,10 @@ var init_verify = __esm(() => {
34436
34520
  command: effectiveCommand
34437
34521
  });
34438
34522
  const result = await _verifyDeps.regression({
34439
- workdir: effectiveWorkdir,
34523
+ workdir: ctx.workdir,
34440
34524
  command: effectiveCommand,
34441
- timeoutSeconds: effectiveConfig.execution.verificationTimeoutSeconds,
34442
- acceptOnTimeout: effectiveConfig.execution.regressionGate?.acceptOnTimeout ?? true
34525
+ timeoutSeconds: ctx.config.execution.verificationTimeoutSeconds,
34526
+ acceptOnTimeout: ctx.config.execution.regressionGate?.acceptOnTimeout ?? true
34443
34527
  });
34444
34528
  ctx.verifyResult = {
34445
34529
  success: result.success,
@@ -34456,7 +34540,7 @@ var init_verify = __esm(() => {
34456
34540
  };
34457
34541
  if (!result.success) {
34458
34542
  if (result.status === "TIMEOUT") {
34459
- const timeout = effectiveConfig.execution.verificationTimeoutSeconds;
34543
+ const timeout = ctx.config.execution.verificationTimeoutSeconds;
34460
34544
  logger.error("verify", `Test suite exceeded timeout (${timeout}s). This is NOT a test failure \u2014 consider increasing execution.verificationTimeoutSeconds or scoping tests.`, {
34461
34545
  exitCode: result.status,
34462
34546
  storyId: ctx.story.id,
@@ -34474,7 +34558,7 @@ var init_verify = __esm(() => {
34474
34558
  if (result.status === "TIMEOUT" || detectRuntimeCrash(result.output)) {
34475
34559
  return {
34476
34560
  action: "escalate",
34477
- reason: result.status === "TIMEOUT" ? `Test suite TIMEOUT after ${effectiveConfig.execution.verificationTimeoutSeconds}s (not a code failure)` : `Tests failed with runtime crash (exit code ${result.status ?? "non-zero"})`
34561
+ reason: result.status === "TIMEOUT" ? `Test suite TIMEOUT after ${ctx.config.execution.verificationTimeoutSeconds}s (not a code failure)` : `Tests failed with runtime crash (exit code ${result.status ?? "non-zero"})`
34478
34562
  };
34479
34563
  }
34480
34564
  return { action: "continue" };
@@ -34592,9 +34676,9 @@ __export(exports_init_context, {
34592
34676
  generateContextTemplate: () => generateContextTemplate,
34593
34677
  _initContextDeps: () => _initContextDeps
34594
34678
  });
34595
- import { existsSync as existsSync22 } from "fs";
34679
+ import { existsSync as existsSync21 } from "fs";
34596
34680
  import { mkdir as mkdir2 } from "fs/promises";
34597
- import { basename as basename4, join as join31 } from "path";
34681
+ import { basename as basename3, join as join26 } from "path";
34598
34682
  async function findFiles(dir, maxFiles = 200) {
34599
34683
  try {
34600
34684
  const proc = Bun.spawnSync([
@@ -34622,8 +34706,8 @@ async function findFiles(dir, maxFiles = 200) {
34622
34706
  return [];
34623
34707
  }
34624
34708
  async function readPackageManifest(projectRoot) {
34625
- const packageJsonPath = join31(projectRoot, "package.json");
34626
- if (!existsSync22(packageJsonPath)) {
34709
+ const packageJsonPath = join26(projectRoot, "package.json");
34710
+ if (!existsSync21(packageJsonPath)) {
34627
34711
  return null;
34628
34712
  }
34629
34713
  try {
@@ -34640,8 +34724,8 @@ async function readPackageManifest(projectRoot) {
34640
34724
  }
34641
34725
  }
34642
34726
  async function readReadmeSnippet(projectRoot) {
34643
- const readmePath = join31(projectRoot, "README.md");
34644
- if (!existsSync22(readmePath)) {
34727
+ const readmePath = join26(projectRoot, "README.md");
34728
+ if (!existsSync21(readmePath)) {
34645
34729
  return null;
34646
34730
  }
34647
34731
  try {
@@ -34658,8 +34742,8 @@ async function detectEntryPoints(projectRoot) {
34658
34742
  const candidates = ["src/index.ts", "src/main.ts", "main.go", "src/lib.rs"];
34659
34743
  const found = [];
34660
34744
  for (const candidate of candidates) {
34661
- const path13 = join31(projectRoot, candidate);
34662
- if (existsSync22(path13)) {
34745
+ const path13 = join26(projectRoot, candidate);
34746
+ if (existsSync21(path13)) {
34663
34747
  found.push(candidate);
34664
34748
  }
34665
34749
  }
@@ -34669,8 +34753,8 @@ async function detectConfigFiles(projectRoot) {
34669
34753
  const candidates = ["tsconfig.json", "biome.json", "turbo.json", ".env.example"];
34670
34754
  const found = [];
34671
34755
  for (const candidate of candidates) {
34672
- const path13 = join31(projectRoot, candidate);
34673
- if (existsSync22(path13)) {
34756
+ const path13 = join26(projectRoot, candidate);
34757
+ if (existsSync21(path13)) {
34674
34758
  found.push(candidate);
34675
34759
  }
34676
34760
  }
@@ -34682,7 +34766,7 @@ async function scanProject(projectRoot) {
34682
34766
  const readmeSnippet = await readReadmeSnippet(projectRoot);
34683
34767
  const entryPoints = await detectEntryPoints(projectRoot);
34684
34768
  const configFiles = await detectConfigFiles(projectRoot);
34685
- const projectName = packageManifest?.name || basename4(projectRoot);
34769
+ const projectName = packageManifest?.name || basename3(projectRoot);
34686
34770
  return {
34687
34771
  projectName,
34688
34772
  fileTree,
@@ -34830,13 +34914,13 @@ function generatePackageContextTemplate(packagePath) {
34830
34914
  }
34831
34915
  async function initPackage(repoRoot, packagePath, force = false) {
34832
34916
  const logger = getLogger();
34833
- const naxDir = join31(repoRoot, ".nax", "mono", packagePath);
34834
- const contextPath = join31(naxDir, "context.md");
34835
- if (existsSync22(contextPath) && !force) {
34917
+ const naxDir = join26(repoRoot, ".nax", "mono", packagePath);
34918
+ const contextPath = join26(naxDir, "context.md");
34919
+ if (existsSync21(contextPath) && !force) {
34836
34920
  logger.info("init", "Package context.md already exists (use --force to overwrite)", { path: contextPath });
34837
34921
  return;
34838
34922
  }
34839
- if (!existsSync22(naxDir)) {
34923
+ if (!existsSync21(naxDir)) {
34840
34924
  await mkdir2(naxDir, { recursive: true });
34841
34925
  }
34842
34926
  const content = generatePackageContextTemplate(packagePath);
@@ -34845,13 +34929,13 @@ async function initPackage(repoRoot, packagePath, force = false) {
34845
34929
  }
34846
34930
  async function initContext(projectRoot, options = {}) {
34847
34931
  const logger = getLogger();
34848
- const naxDir = join31(projectRoot, ".nax");
34849
- const contextPath = join31(naxDir, "context.md");
34850
- if (existsSync22(contextPath) && !options.force) {
34932
+ const naxDir = join26(projectRoot, ".nax");
34933
+ const contextPath = join26(naxDir, "context.md");
34934
+ if (existsSync21(contextPath) && !options.force) {
34851
34935
  logger.info("init", "context.md already exists, skipping (use --force to overwrite)", { path: contextPath });
34852
34936
  return;
34853
34937
  }
34854
- if (!existsSync22(naxDir)) {
34938
+ if (!existsSync21(naxDir)) {
34855
34939
  await mkdir2(naxDir, { recursive: true });
34856
34940
  }
34857
34941
  const scan = await scanProject(projectRoot);
@@ -34876,23 +34960,23 @@ var init_init_context = __esm(() => {
34876
34960
 
34877
34961
  // src/utils/path-security.ts
34878
34962
  import { realpathSync as realpathSync3 } from "fs";
34879
- import { dirname as dirname5, isAbsolute as isAbsolute5, join as join32, normalize as normalize2, resolve as resolve7 } from "path";
34963
+ import { dirname as dirname4, isAbsolute as isAbsolute5, join as join27, normalize as normalize2, resolve as resolve8 } from "path";
34880
34964
  function safeRealpathForComparison(p) {
34881
34965
  try {
34882
34966
  return realpathSync3(p);
34883
34967
  } catch {
34884
- const parent = dirname5(p);
34968
+ const parent = dirname4(p);
34885
34969
  if (parent === p)
34886
34970
  return normalize2(p);
34887
34971
  const resolvedParent = safeRealpathForComparison(parent);
34888
- return join32(resolvedParent, p.split("/").pop() ?? "");
34972
+ return join27(resolvedParent, p.split("/").pop() ?? "");
34889
34973
  }
34890
34974
  }
34891
34975
  function validateModulePath(modulePath, allowedRoots) {
34892
34976
  if (!modulePath) {
34893
34977
  return { valid: false, error: "Module path is empty" };
34894
34978
  }
34895
- const resolvedRoots = allowedRoots.map((r) => safeRealpathForComparison(resolve7(r)));
34979
+ const resolvedRoots = allowedRoots.map((r) => safeRealpathForComparison(resolve8(r)));
34896
34980
  if (isAbsolute5(modulePath)) {
34897
34981
  const normalized = normalize2(modulePath);
34898
34982
  const resolved = safeRealpathForComparison(normalized);
@@ -34902,8 +34986,8 @@ function validateModulePath(modulePath, allowedRoots) {
34902
34986
  }
34903
34987
  } else {
34904
34988
  for (let i = 0;i < allowedRoots.length; i++) {
34905
- const originalRoot = resolve7(allowedRoots[i]);
34906
- const absoluteInput = resolve7(join32(originalRoot, modulePath));
34989
+ const originalRoot = resolve8(allowedRoots[i]);
34990
+ const absoluteInput = resolve8(join27(originalRoot, modulePath));
34907
34991
  const resolved = safeRealpathForComparison(absoluteInput);
34908
34992
  const resolvedRoot = resolvedRoots[i];
34909
34993
  if (resolved.startsWith(`${resolvedRoot}/`) || resolved === resolvedRoot) {
@@ -35264,11 +35348,11 @@ function getSafeLogger6() {
35264
35348
  return getSafeLogger();
35265
35349
  }
35266
35350
  function extractPluginName(pluginPath) {
35267
- const basename6 = path13.basename(pluginPath);
35268
- if (basename6 === "index.ts" || basename6 === "index.js" || basename6 === "index.mjs") {
35351
+ const basename5 = path13.basename(pluginPath);
35352
+ if (basename5 === "index.ts" || basename5 === "index.js" || basename5 === "index.mjs") {
35269
35353
  return path13.basename(path13.dirname(pluginPath));
35270
35354
  }
35271
- return basename6.replace(/\.(ts|js|mjs)$/, "");
35355
+ return basename5.replace(/\.(ts|js|mjs)$/, "");
35272
35356
  }
35273
35357
  async function loadPlugins(globalDir, projectDir, configPlugins, projectRoot, disabledPlugins) {
35274
35358
  const loadedPlugins = [];
@@ -35429,7 +35513,7 @@ async function loadAndValidatePlugin(initialModulePath, config2, allowedRoots =
35429
35513
  return null;
35430
35514
  }
35431
35515
  }
35432
- var _pluginErrorSink = (...args) => console.error(...args);
35516
+ var _pluginErrorSink = () => {};
35433
35517
  var init_loader4 = __esm(() => {
35434
35518
  init_logger2();
35435
35519
  init_path_security2();
@@ -35439,19 +35523,19 @@ var init_loader4 = __esm(() => {
35439
35523
  });
35440
35524
 
35441
35525
  // src/hooks/runner.ts
35442
- import { join as join47 } from "path";
35526
+ import { join as join42 } from "path";
35443
35527
  async function loadHooksConfig(projectDir, globalDir) {
35444
35528
  let globalHooks = { hooks: {} };
35445
35529
  let projectHooks = { hooks: {} };
35446
35530
  let skipGlobal = false;
35447
- const projectPath = join47(projectDir, "hooks.json");
35531
+ const projectPath = join42(projectDir, "hooks.json");
35448
35532
  const projectData = await loadJsonFile(projectPath, "hooks");
35449
35533
  if (projectData) {
35450
35534
  projectHooks = projectData;
35451
35535
  skipGlobal = projectData.skipGlobal ?? false;
35452
35536
  }
35453
35537
  if (!skipGlobal && globalDir) {
35454
- const globalPath = join47(globalDir, "hooks.json");
35538
+ const globalPath = join42(globalDir, "hooks.json");
35455
35539
  const globalData = await loadJsonFile(globalPath, "hooks");
35456
35540
  if (globalData) {
35457
35541
  globalHooks = globalData;
@@ -35651,7 +35735,7 @@ var package_default;
35651
35735
  var init_package = __esm(() => {
35652
35736
  package_default = {
35653
35737
  name: "@nathapp/nax",
35654
- version: "0.58.4",
35738
+ version: "0.59.0",
35655
35739
  description: "AI Coding Agent Orchestrator \u2014 loops until done",
35656
35740
  type: "module",
35657
35741
  bin: {
@@ -35731,8 +35815,8 @@ var init_version = __esm(() => {
35731
35815
  NAX_VERSION = package_default.version;
35732
35816
  NAX_COMMIT = (() => {
35733
35817
  try {
35734
- if (/^[0-9a-f]{6,10}$/.test("05836706"))
35735
- return "05836706";
35818
+ if (/^[0-9a-f]{6,10}$/.test("13990b5b"))
35819
+ return "13990b5b";
35736
35820
  } catch {}
35737
35821
  try {
35738
35822
  const result = Bun.spawnSync(["git", "rev-parse", "--short", "HEAD"], {
@@ -36028,18 +36112,18 @@ var init_crash_signals = __esm(() => {
36028
36112
 
36029
36113
  // src/execution/crash-recovery.ts
36030
36114
  function installCrashHandlers(ctx) {
36031
- if (handlersInstalled) {
36032
- return () => {};
36115
+ if (activeCleanup) {
36116
+ activeCleanup();
36033
36117
  }
36034
36118
  const cleanup = installSignalHandlers(ctx);
36035
- handlersInstalled = true;
36036
- return () => {
36119
+ activeCleanup = () => {
36037
36120
  cleanup();
36038
36121
  stopHeartbeat();
36039
- handlersInstalled = false;
36122
+ activeCleanup = null;
36040
36123
  };
36124
+ return activeCleanup;
36041
36125
  }
36042
- var handlersInstalled = false;
36126
+ var activeCleanup = null;
36043
36127
  var init_crash_recovery = __esm(() => {
36044
36128
  init_crash_heartbeat();
36045
36129
  init_crash_signals();
@@ -36360,12 +36444,13 @@ async function diagnoseAcceptanceFailure(agent, options) {
36360
36444
  semanticVerdicts: options.semanticVerdicts
36361
36445
  });
36362
36446
  try {
36447
+ const timeoutSeconds = (config2.acceptance?.timeoutMs ?? 120000) / 1000;
36363
36448
  const result = await agent.run({
36364
36449
  prompt,
36365
36450
  workdir,
36366
36451
  modelTier: undefined,
36367
36452
  modelDef,
36368
- timeoutSeconds: 300,
36453
+ timeoutSeconds,
36369
36454
  sessionRole: "diagnose",
36370
36455
  acpSessionName: sessionName,
36371
36456
  featureName,
@@ -36374,12 +36459,13 @@ async function diagnoseAcceptanceFailure(agent, options) {
36374
36459
  });
36375
36460
  const diagnosis = parseDiagnosisResult(result.output);
36376
36461
  if (diagnosis) {
36377
- return diagnosis;
36462
+ return { ...diagnosis, cost: result.estimatedCost ?? 0 };
36378
36463
  }
36379
36464
  return {
36380
36465
  verdict: "source_bug",
36381
36466
  reasoning: "diagnosis failed \u2014 falling back to source fix",
36382
- confidence: 0
36467
+ confidence: 0,
36468
+ cost: result.estimatedCost ?? 0
36383
36469
  };
36384
36470
  } catch {
36385
36471
  return {
@@ -36491,7 +36577,7 @@ __export(exports_acceptance_loop, {
36491
36577
  _regenerateDeps: () => _regenerateDeps,
36492
36578
  _acceptanceLoopDeps: () => _acceptanceLoopDeps
36493
36579
  });
36494
- import path15, { join as join48 } from "path";
36580
+ import path15, { join as join43 } from "path";
36495
36581
  function isStubTestFile(content) {
36496
36582
  return /expect\s*\(\s*true\s*\)\s*\.\s*toBe\s*\(\s*(?:false|true)\s*\)/.test(content);
36497
36583
  }
@@ -36580,15 +36666,16 @@ async function executeFixStory(ctx, story, prd, iterations) {
36580
36666
  agent: ctx.config.autoMode.defaultAgent,
36581
36667
  iteration: iterations
36582
36668
  }), ctx.workdir);
36583
- const fixEffectiveConfig = story.workdir ? await loadConfigForWorkdir(join48(ctx.workdir, ".nax", "config.json"), story.workdir) : ctx.config;
36669
+ const fixEffectiveConfig = story.workdir ? await loadConfigForWorkdir(join43(ctx.workdir, ".nax", "config.json"), story.workdir) : ctx.config;
36584
36670
  const fixContext = {
36585
- config: ctx.config,
36586
- effectiveConfig: fixEffectiveConfig,
36671
+ config: fixEffectiveConfig,
36672
+ rootConfig: ctx.config,
36587
36673
  prd,
36588
36674
  story,
36589
36675
  stories: [story],
36590
36676
  routing,
36591
- workdir: ctx.workdir,
36677
+ projectDir: ctx.workdir,
36678
+ workdir: story.workdir ? join43(ctx.workdir, story.workdir) : ctx.workdir,
36592
36679
  featureDir: ctx.featureDir,
36593
36680
  hooks: ctx.hooks,
36594
36681
  plugins: ctx.pluginRegistry,
@@ -36767,6 +36854,7 @@ async function runFixRouting(options) {
36767
36854
  storyId,
36768
36855
  semanticVerdicts: options.semanticVerdicts
36769
36856
  });
36857
+ const diagnosisCost = diagnosis.cost ?? 0;
36770
36858
  logger?.info("acceptance.diagnosis", "Diagnosis complete", {
36771
36859
  verdict: diagnosis.verdict,
36772
36860
  confidence: diagnosis.confidence,
@@ -36776,7 +36864,7 @@ async function runFixRouting(options) {
36776
36864
  logger?.info("acceptance", "Diagnosis: source_bug \u2014 executing source fix");
36777
36865
  if (!agent) {
36778
36866
  logger?.error("acceptance", "Agent not found for source fix execution");
36779
- return { fixed: false, cost: 0, prdDirty: false };
36867
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36780
36868
  }
36781
36869
  let fixAttempts = 0;
36782
36870
  while (fixAttempts < fixMaxRetries) {
@@ -36798,7 +36886,7 @@ async function runFixRouting(options) {
36798
36886
  attempt: fixAttempts
36799
36887
  });
36800
36888
  if (fixResult.success) {
36801
- return { fixed: true, cost: fixResult.cost, prdDirty: false };
36889
+ return { fixed: true, cost: fixResult.cost + diagnosisCost, prdDirty: false };
36802
36890
  }
36803
36891
  logger?.warn("acceptance.source-fix", "Source fix attempt failed", {
36804
36892
  attempt: fixAttempts,
@@ -36811,13 +36899,13 @@ async function runFixRouting(options) {
36811
36899
  break;
36812
36900
  }
36813
36901
  }
36814
- return { fixed: false, cost: 0, prdDirty: false };
36902
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36815
36903
  }
36816
36904
  if (diagnosis.verdict === "test_bug") {
36817
36905
  logger?.info("acceptance", "Diagnosis: test_bug \u2014 regenerating acceptance test");
36818
36906
  if (!ctx.featureDir) {
36819
36907
  logger?.error("acceptance", "Cannot regenerate test without featureDir");
36820
- return { fixed: false, cost: 0, prdDirty: false };
36908
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36821
36909
  }
36822
36910
  const testPath = await findExistingAcceptanceTestPath({
36823
36911
  acceptanceTestPaths: ctx.acceptanceTestPaths,
@@ -36834,29 +36922,29 @@ async function runFixRouting(options) {
36834
36922
  language: ctx.config.project?.language
36835
36923
  })
36836
36924
  });
36837
- return { fixed: false, cost: 0, prdDirty: false };
36925
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36838
36926
  }
36839
36927
  const regenerated = await regenerateAcceptanceTest(testPath, acceptanceContext);
36840
36928
  logger?.info("acceptance.test-regen", "Test regeneration completed", {
36841
36929
  outcome: regenerated ? "success" : "failure"
36842
36930
  });
36843
36931
  if (!regenerated) {
36844
- return { fixed: false, cost: 0, prdDirty: false };
36932
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36845
36933
  }
36846
36934
  const { acceptanceStage: acceptanceStage2 } = await Promise.resolve().then(() => (init_acceptance(), exports_acceptance));
36847
36935
  const acceptanceResult = await acceptanceStage2.execute(acceptanceContext);
36848
36936
  if (acceptanceResult.action === "continue") {
36849
36937
  logger?.info("acceptance", "Acceptance passed after test regeneration");
36850
- return { fixed: true, cost: 0, prdDirty: true };
36938
+ return { fixed: true, cost: diagnosisCost, prdDirty: true };
36851
36939
  }
36852
36940
  logger?.warn("acceptance", "Acceptance still failing after test regeneration");
36853
- return { fixed: false, cost: 0, prdDirty: true };
36941
+ return { fixed: false, cost: diagnosisCost, prdDirty: true };
36854
36942
  }
36855
36943
  if (diagnosis.verdict === "both") {
36856
36944
  logger?.info("acceptance", "Diagnosis: both \u2014 executing source fix then regenerating test if needed");
36857
36945
  if (!agent) {
36858
36946
  logger?.error("acceptance", "Agent not found for source fix execution");
36859
- return { fixed: false, cost: 0, prdDirty: false };
36947
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36860
36948
  }
36861
36949
  let sourceFixSuccess = false;
36862
36950
  let sourceFixCost = 0;
@@ -36896,19 +36984,19 @@ async function runFixRouting(options) {
36896
36984
  }
36897
36985
  }
36898
36986
  if (!sourceFixSuccess) {
36899
- return { fixed: false, cost: sourceFixCost, prdDirty: false };
36987
+ return { fixed: false, cost: sourceFixCost + diagnosisCost, prdDirty: false };
36900
36988
  }
36901
36989
  logger?.info("acceptance", "Source fix succeeded \u2014 re-running acceptance to verify");
36902
36990
  const { acceptanceStage: acceptanceStage2 } = await Promise.resolve().then(() => (init_acceptance(), exports_acceptance));
36903
36991
  const acceptanceResult = await acceptanceStage2.execute(acceptanceContext);
36904
36992
  if (acceptanceResult.action === "continue") {
36905
36993
  logger?.info("acceptance", "Acceptance passed after source fix");
36906
- return { fixed: true, cost: sourceFixCost, prdDirty: false };
36994
+ return { fixed: true, cost: sourceFixCost + diagnosisCost, prdDirty: false };
36907
36995
  }
36908
36996
  logger?.info("acceptance", "Acceptance still failing after source fix \u2014 regenerating test");
36909
36997
  if (!ctx.featureDir) {
36910
36998
  logger?.error("acceptance", "Cannot regenerate test without featureDir");
36911
- return { fixed: false, cost: sourceFixCost, prdDirty: false };
36999
+ return { fixed: false, cost: sourceFixCost + diagnosisCost, prdDirty: false };
36912
37000
  }
36913
37001
  const testPath = await findExistingAcceptanceTestPath({
36914
37002
  acceptanceTestPaths: ctx.acceptanceTestPaths,
@@ -36925,15 +37013,15 @@ async function runFixRouting(options) {
36925
37013
  language: ctx.config.project?.language
36926
37014
  })
36927
37015
  });
36928
- return { fixed: false, cost: sourceFixCost, prdDirty: false };
37016
+ return { fixed: false, cost: sourceFixCost + diagnosisCost, prdDirty: false };
36929
37017
  }
36930
37018
  const regenerated = await regenerateAcceptanceTest(testPath, acceptanceContext);
36931
37019
  logger?.info("acceptance.test-regen", "Test regeneration completed", {
36932
37020
  outcome: regenerated ? "success" : "failure"
36933
37021
  });
36934
- return { fixed: regenerated, cost: sourceFixCost, prdDirty: regenerated };
37022
+ return { fixed: regenerated, cost: sourceFixCost + diagnosisCost, prdDirty: regenerated };
36935
37023
  }
36936
- return { fixed: false, cost: 0, prdDirty: false };
37024
+ return { fixed: false, cost: diagnosisCost, prdDirty: false };
36937
37025
  }
36938
37026
  async function runAcceptanceLoop(ctx) {
36939
37027
  const logger = getSafeLogger();
@@ -36949,7 +37037,7 @@ async function runAcceptanceLoop(ctx) {
36949
37037
  const firstStory = prd.userStories[0];
36950
37038
  const acceptanceContext = {
36951
37039
  config: ctx.config,
36952
- effectiveConfig: ctx.config,
37040
+ rootConfig: ctx.config,
36953
37041
  prd,
36954
37042
  story: firstStory,
36955
37043
  stories: [firstStory],
@@ -36959,6 +37047,7 @@ async function runAcceptanceLoop(ctx) {
36959
37047
  testStrategy: "test-after",
36960
37048
  reasoning: "Acceptance validation"
36961
37049
  },
37050
+ projectDir: ctx.workdir,
36962
37051
  workdir: ctx.workdir,
36963
37052
  featureDir: ctx.featureDir,
36964
37053
  hooks: ctx.hooks,
@@ -37184,6 +37273,20 @@ async function runDeferredRegression(options) {
37184
37273
  const testCommand = config2.quality.commands.test ?? "bun test";
37185
37274
  const timeoutSeconds = config2.execution.regressionGate?.timeoutSeconds ?? 120;
37186
37275
  const maxRectificationAttempts = config2.execution.regressionGate?.maxRectificationAttempts ?? 2;
37276
+ const acceptOnTimeout = config2.execution.regressionGate?.acceptOnTimeout ?? true;
37277
+ const verifyOpts = {
37278
+ workdir,
37279
+ command: testCommand,
37280
+ timeoutSeconds,
37281
+ forceExit: config2.quality.forceExit,
37282
+ detectOpenHandles: config2.quality.detectOpenHandles,
37283
+ detectOpenHandlesRetries: config2.quality.detectOpenHandlesRetries,
37284
+ timeoutRetryCount: 0,
37285
+ gracePeriodMs: config2.quality.gracePeriodMs,
37286
+ drainTimeoutMs: config2.quality.drainTimeoutMs,
37287
+ shell: config2.quality.shell,
37288
+ stripEnvVars: config2.quality.stripEnvVars
37289
+ };
37187
37290
  const counts = countStories(prd);
37188
37291
  const passedStories = prd.userStories.filter((s) => s.status === "passed");
37189
37292
  if (passedStories.length === 0) {
@@ -37201,19 +37304,7 @@ async function runDeferredRegression(options) {
37201
37304
  totalStories: counts.total,
37202
37305
  passedStories: passedStories.length
37203
37306
  });
37204
- const fullSuiteResult = await _regressionDeps.runVerification({
37205
- workdir,
37206
- command: testCommand,
37207
- timeoutSeconds,
37208
- forceExit: config2.quality.forceExit,
37209
- detectOpenHandles: config2.quality.detectOpenHandles,
37210
- detectOpenHandlesRetries: config2.quality.detectOpenHandlesRetries,
37211
- timeoutRetryCount: 0,
37212
- gracePeriodMs: config2.quality.gracePeriodMs,
37213
- drainTimeoutMs: config2.quality.drainTimeoutMs,
37214
- shell: config2.quality.shell,
37215
- stripEnvVars: config2.quality.stripEnvVars
37216
- });
37307
+ const fullSuiteResult = await _regressionDeps.runVerification(verifyOpts);
37217
37308
  if (fullSuiteResult.success) {
37218
37309
  logger?.info("regression", "Full suite passed");
37219
37310
  return {
@@ -37225,7 +37316,6 @@ async function runDeferredRegression(options) {
37225
37316
  affectedStories: []
37226
37317
  };
37227
37318
  }
37228
- const acceptOnTimeout = config2.execution.regressionGate?.acceptOnTimeout ?? true;
37229
37319
  if (fullSuiteResult.status === "TIMEOUT" && acceptOnTimeout) {
37230
37320
  logger?.warn("regression", "Full-suite regression gate timed out (accepted as pass)");
37231
37321
  return {
@@ -37307,6 +37397,8 @@ async function runDeferredRegression(options) {
37307
37397
  };
37308
37398
  }
37309
37399
  let rectificationAttempts = 0;
37400
+ let storiesRectified = 0;
37401
+ let currentTestOutput = fullSuiteResult.output;
37310
37402
  const affectedStoriesList = Array.from(affectedStoriesObjs.values());
37311
37403
  for (const story of affectedStoriesList) {
37312
37404
  for (let attempt = 0;attempt < maxRectificationAttempts; attempt++) {
@@ -37318,32 +37410,51 @@ async function runDeferredRegression(options) {
37318
37410
  story,
37319
37411
  testCommand,
37320
37412
  timeoutSeconds,
37321
- testOutput: fullSuiteResult.output,
37413
+ testOutput: currentTestOutput,
37322
37414
  promptPrefix: `# DEFERRED REGRESSION: Full-Suite Failures
37323
37415
 
37324
37416
  Your story ${story.id} broke tests in the full suite. Fix these regressions.`,
37325
37417
  agentGetFn
37326
37418
  });
37327
37419
  if (fixed) {
37420
+ storiesRectified++;
37328
37421
  logger?.info("regression", `Story ${story.id} rectified successfully`);
37422
+ logger?.info("regression", "Re-running full suite after story rectification", {
37423
+ storyId: story.id,
37424
+ storiesRectified,
37425
+ storiesRemaining: affectedStoriesList.length - storiesRectified
37426
+ });
37427
+ const midResult = await _regressionDeps.runVerification(verifyOpts);
37428
+ const midSuccess = midResult.success || midResult.status === "TIMEOUT" && acceptOnTimeout;
37429
+ if (midSuccess) {
37430
+ logger?.info("regression", "Full suite passed after story rectification \u2014 early exit", {
37431
+ storyId: story.id,
37432
+ storiesRectified,
37433
+ storiesSkipped: affectedStoriesList.length - storiesRectified,
37434
+ passCount: midResult.passCount ?? 0
37435
+ });
37436
+ return {
37437
+ success: true,
37438
+ failedTests: testFilesInFailures.size,
37439
+ failedTestFiles: Array.from(testFilesInFailures),
37440
+ passedTests: midResult.passCount ?? 0,
37441
+ rectificationAttempts,
37442
+ affectedStories: Array.from(affectedStories)
37443
+ };
37444
+ }
37445
+ logger?.warn("regression", "Full suite still failing after story rectification \u2014 continuing", {
37446
+ storyId: story.id,
37447
+ failCount: midResult.failCount ?? 0,
37448
+ passCount: midResult.passCount ?? 0
37449
+ });
37450
+ if (midResult.output)
37451
+ currentTestOutput = midResult.output;
37329
37452
  break;
37330
37453
  }
37331
37454
  }
37332
37455
  }
37333
37456
  logger?.info("regression", "Re-running full suite after rectification");
37334
- const retryResult = await _regressionDeps.runVerification({
37335
- workdir,
37336
- command: testCommand,
37337
- timeoutSeconds,
37338
- forceExit: config2.quality.forceExit,
37339
- detectOpenHandles: config2.quality.detectOpenHandles,
37340
- detectOpenHandlesRetries: config2.quality.detectOpenHandlesRetries,
37341
- timeoutRetryCount: 0,
37342
- gracePeriodMs: config2.quality.gracePeriodMs,
37343
- drainTimeoutMs: config2.quality.drainTimeoutMs,
37344
- shell: config2.quality.shell,
37345
- stripEnvVars: config2.quality.stripEnvVars
37346
- });
37457
+ const retryResult = await _regressionDeps.runVerification(verifyOpts);
37347
37458
  const success2 = retryResult.success || retryResult.status === "TIMEOUT" && acceptOnTimeout;
37348
37459
  if (success2) {
37349
37460
  logger?.info("regression", "Deferred regression gate passed after rectification");
@@ -37600,12 +37711,12 @@ var init_headless_formatter = __esm(() => {
37600
37711
  // src/pipeline/subscribers/events-writer.ts
37601
37712
  import { appendFile as appendFile3, mkdir as mkdir3 } from "fs/promises";
37602
37713
  import { homedir as homedir5 } from "os";
37603
- import { basename as basename7, join as join49 } from "path";
37714
+ import { basename as basename6, join as join44 } from "path";
37604
37715
  function wireEventsWriter(bus, feature, runId, workdir) {
37605
37716
  const logger = getSafeLogger();
37606
- const project = basename7(workdir);
37607
- const eventsDir = join49(homedir5(), ".nax", "events", project);
37608
- const eventsFile = join49(eventsDir, "events.jsonl");
37717
+ const project = basename6(workdir);
37718
+ const eventsDir = join44(homedir5(), ".nax", "events", project);
37719
+ const eventsFile = join44(eventsDir, "events.jsonl");
37609
37720
  let dirReady = false;
37610
37721
  const write = (line) => {
37611
37722
  return (async () => {
@@ -37786,12 +37897,12 @@ var init_interaction2 = __esm(() => {
37786
37897
  // src/pipeline/subscribers/registry.ts
37787
37898
  import { mkdir as mkdir4, writeFile } from "fs/promises";
37788
37899
  import { homedir as homedir6 } from "os";
37789
- import { basename as basename8, join as join50 } from "path";
37900
+ import { basename as basename7, join as join45 } from "path";
37790
37901
  function wireRegistry(bus, feature, runId, workdir) {
37791
37902
  const logger = getSafeLogger();
37792
- const project = basename8(workdir);
37793
- const runDir = join50(homedir6(), ".nax", "runs", `${project}-${feature}-${runId}`);
37794
- const metaFile = join50(runDir, "meta.json");
37903
+ const project = basename7(workdir);
37904
+ const runDir = join45(homedir6(), ".nax", "runs", `${project}-${feature}-${runId}`);
37905
+ const metaFile = join45(runDir, "meta.json");
37795
37906
  const unsub = bus.on("run:started", (_ev) => {
37796
37907
  return (async () => {
37797
37908
  try {
@@ -37801,8 +37912,8 @@ function wireRegistry(bus, feature, runId, workdir) {
37801
37912
  project,
37802
37913
  feature,
37803
37914
  workdir,
37804
- statusPath: join50(workdir, ".nax", "features", feature, "status.json"),
37805
- eventsDir: join50(workdir, ".nax", "features", feature, "runs"),
37915
+ statusPath: join45(workdir, ".nax", "features", feature, "status.json"),
37916
+ eventsDir: join45(workdir, ".nax", "features", feature, "runs"),
37806
37917
  registeredAt: new Date().toISOString()
37807
37918
  };
37808
37919
  await writeFile(metaFile, JSON.stringify(meta3, null, 2));
@@ -38327,7 +38438,7 @@ function filterOutputFiles(files) {
38327
38438
  }
38328
38439
  async function handlePipelineSuccess(ctx, pipelineResult) {
38329
38440
  const logger = getSafeLogger();
38330
- const costDelta = pipelineResult.context.agentResult?.estimatedCost || 0;
38441
+ const costDelta = (pipelineResult.context.agentResult?.estimatedCost ?? 0) + (pipelineResult.stageCost ?? 0);
38331
38442
  const prd = ctx.prd;
38332
38443
  if (pipelineResult.context.storyMetrics) {
38333
38444
  ctx.allStoryMetrics.push(...pipelineResult.context.storyMetrics);
@@ -38375,7 +38486,7 @@ async function handlePipelineFailure(ctx, pipelineResult) {
38375
38486
  const logger = getSafeLogger();
38376
38487
  let prd = ctx.prd;
38377
38488
  let prdDirty = false;
38378
- const costDelta = pipelineResult.context.agentResult?.estimatedCost || 0;
38489
+ const costDelta = (pipelineResult.context.agentResult?.estimatedCost ?? 0) + (pipelineResult.stageCost ?? 0);
38379
38490
  switch (pipelineResult.finalAction) {
38380
38491
  case "pause":
38381
38492
  markStoryPaused(prd, ctx.story.id);
@@ -38454,7 +38565,7 @@ var init_pipeline_result_handler = __esm(() => {
38454
38565
  });
38455
38566
 
38456
38567
  // src/execution/iteration-runner.ts
38457
- import { join as join51 } from "path";
38568
+ import { join as join46 } from "path";
38458
38569
  async function runIteration(ctx, prd, selection, iterations, totalCost, allStoryMetrics) {
38459
38570
  const logger = getSafeLogger();
38460
38571
  const { story, storiesToExecute, routing, isBatchExecution } = selection;
@@ -38489,15 +38600,16 @@ async function runIteration(ctx, prd, selection, iterations, totalCost, allStory
38489
38600
  }
38490
38601
  }
38491
38602
  const accumulatedAttemptCost = (story.priorFailures || []).reduce((sum, f) => sum + (f.cost || 0), 0);
38492
- const effectiveConfig = story.workdir ? await _iterationRunnerDeps.loadConfigForWorkdir(join51(ctx.workdir, ".nax", "config.json"), story.workdir) : ctx.config;
38603
+ const effectiveConfig = story.workdir ? await _iterationRunnerDeps.loadConfigForWorkdir(join46(ctx.workdir, ".nax", "config.json"), story.workdir) : ctx.config;
38493
38604
  const pipelineContext = {
38494
- config: ctx.config,
38495
- effectiveConfig,
38605
+ config: effectiveConfig,
38606
+ rootConfig: ctx.config,
38496
38607
  prd,
38497
38608
  story,
38498
38609
  stories: storiesToExecute,
38499
38610
  routing,
38500
- workdir: ctx.workdir,
38611
+ projectDir: ctx.workdir,
38612
+ workdir: story.workdir ? join46(ctx.workdir, story.workdir) : ctx.workdir,
38501
38613
  prdPath: ctx.prdPath,
38502
38614
  featureDir: ctx.featureDir,
38503
38615
  hooks: ctx.hooks,
@@ -38522,13 +38634,6 @@ async function runIteration(ctx, prd, selection, iterations, totalCost, allStory
38522
38634
  await ctx.statusWriter.update(totalCost, iterations);
38523
38635
  const pipelineResult = await runPipeline(defaultPipeline, pipelineContext, ctx.eventEmitter);
38524
38636
  const currentPrd = pipelineResult.context.prd;
38525
- pipelineContext.agentResult = undefined;
38526
- pipelineContext.prompt = undefined;
38527
- pipelineContext.contextMarkdown = undefined;
38528
- pipelineContext.builtContext = undefined;
38529
- pipelineContext.verifyResult = undefined;
38530
- pipelineContext.reviewResult = undefined;
38531
- pipelineContext.constitution = undefined;
38532
38637
  const handlerCtx = {
38533
38638
  config: ctx.config,
38534
38639
  prd: currentPrd,
@@ -38551,26 +38656,36 @@ async function runIteration(ctx, prd, selection, iterations, totalCost, allStory
38551
38656
  storyStartTime,
38552
38657
  statusWriter: ctx.statusWriter
38553
38658
  };
38659
+ let iterResult;
38554
38660
  if (pipelineResult.success) {
38555
- const r2 = await handlePipelineSuccess(handlerCtx, pipelineResult);
38556
- return {
38557
- prd: r2.prd,
38558
- storiesCompletedDelta: r2.storiesCompletedDelta,
38559
- costDelta: r2.costDelta,
38560
- prdDirty: r2.prdDirty,
38661
+ const r = await handlePipelineSuccess(handlerCtx, pipelineResult);
38662
+ iterResult = {
38663
+ prd: r.prd,
38664
+ storiesCompletedDelta: r.storiesCompletedDelta,
38665
+ costDelta: r.costDelta,
38666
+ prdDirty: r.prdDirty,
38561
38667
  finalAction: pipelineResult.finalAction
38562
38668
  };
38669
+ } else {
38670
+ const r = await handlePipelineFailure(handlerCtx, pipelineResult);
38671
+ iterResult = {
38672
+ prd: r.prd,
38673
+ storiesCompletedDelta: 0,
38674
+ costDelta: r.costDelta,
38675
+ prdDirty: r.prdDirty,
38676
+ finalAction: pipelineResult.finalAction,
38677
+ reason: pipelineResult.reason,
38678
+ subStoryCount: pipelineResult.subStoryCount
38679
+ };
38563
38680
  }
38564
- const r = await handlePipelineFailure(handlerCtx, pipelineResult);
38565
- return {
38566
- prd: r.prd,
38567
- storiesCompletedDelta: 0,
38568
- costDelta: r.costDelta,
38569
- prdDirty: r.prdDirty,
38570
- finalAction: pipelineResult.finalAction,
38571
- reason: pipelineResult.reason,
38572
- subStoryCount: pipelineResult.subStoryCount
38573
- };
38681
+ pipelineContext.agentResult = undefined;
38682
+ pipelineContext.prompt = undefined;
38683
+ pipelineContext.contextMarkdown = undefined;
38684
+ pipelineContext.builtContext = undefined;
38685
+ pipelineContext.verifyResult = undefined;
38686
+ pipelineContext.reviewResult = undefined;
38687
+ pipelineContext.constitution = undefined;
38688
+ return iterResult;
38574
38689
  }
38575
38690
  var _iterationRunnerDeps;
38576
38691
  var init_iteration_runner = __esm(() => {
@@ -38650,6 +38765,7 @@ __export(exports_parallel_worker, {
38650
38765
  executeStoryInWorktree: () => executeStoryInWorktree,
38651
38766
  executeParallelBatch: () => executeParallelBatch
38652
38767
  });
38768
+ import { join as join47 } from "path";
38653
38769
  async function executeStoryInWorktree(story, worktreePath, context, routing, eventEmitter) {
38654
38770
  const logger = getSafeLogger();
38655
38771
  try {
@@ -38664,10 +38780,12 @@ async function executeStoryInWorktree(story, worktreePath, context, routing, eve
38664
38780
  }
38665
38781
  const pipelineContext = {
38666
38782
  ...context,
38667
- effectiveConfig: context.effectiveConfig ?? context.config,
38783
+ config: context.config,
38784
+ rootConfig: context.rootConfig,
38668
38785
  story,
38669
38786
  stories: [story],
38670
- workdir: worktreePath,
38787
+ projectDir: context.projectDir,
38788
+ workdir: story.workdir ? join47(worktreePath, story.workdir) : worktreePath,
38671
38789
  routing,
38672
38790
  storyGitRef: storyGitRef ?? undefined
38673
38791
  };
@@ -38712,7 +38830,7 @@ async function executeParallelBatch(stories, projectRoot, config2, context, work
38712
38830
  }
38713
38831
  const routing = routeTask(story.title, story.description, story.acceptanceCriteria, story.tags, config2);
38714
38832
  const storyConfig = storyEffectiveConfigs?.get(story.id);
38715
- const storyContext = storyConfig ? { ...context, effectiveConfig: storyConfig } : context;
38833
+ const storyContext = storyConfig ? { ...context, config: storyConfig } : context;
38716
38834
  const executePromise = executeStoryInWorktree(story, worktreePath, storyContext, routing, eventEmitter).then((result) => {
38717
38835
  results.totalCost += result.cost;
38718
38836
  results.storyCosts.set(story.id, result.cost);
@@ -38754,19 +38872,19 @@ __export(exports_manager, {
38754
38872
  _managerDeps: () => _managerDeps,
38755
38873
  WorktreeManager: () => WorktreeManager
38756
38874
  });
38757
- import { existsSync as existsSync30, symlinkSync } from "fs";
38875
+ import { existsSync as existsSync29, symlinkSync } from "fs";
38758
38876
  import { mkdir as mkdir5 } from "fs/promises";
38759
- import { join as join52 } from "path";
38877
+ import { join as join48 } from "path";
38760
38878
 
38761
38879
  class WorktreeManager {
38762
38880
  async ensureGitExcludes(projectRoot) {
38763
38881
  const logger = getSafeLogger();
38764
- const infoDir = join52(projectRoot, ".git", "info");
38765
- const excludePath = join52(infoDir, "exclude");
38882
+ const infoDir = join48(projectRoot, ".git", "info");
38883
+ const excludePath = join48(infoDir, "exclude");
38766
38884
  try {
38767
38885
  await mkdir5(infoDir, { recursive: true });
38768
38886
  let existing = "";
38769
- if (existsSync30(excludePath)) {
38887
+ if (existsSync29(excludePath)) {
38770
38888
  existing = await Bun.file(excludePath).text();
38771
38889
  }
38772
38890
  const missing = NAX_GITIGNORE_ENTRIES.filter((entry) => !existing.includes(entry));
@@ -38789,7 +38907,7 @@ ${missing.join(`
38789
38907
  }
38790
38908
  async create(projectRoot, storyId) {
38791
38909
  validateStoryId(storyId);
38792
- const worktreePath = join52(projectRoot, ".nax-wt", storyId);
38910
+ const worktreePath = join48(projectRoot, ".nax-wt", storyId);
38793
38911
  const branchName = `nax/${storyId}`;
38794
38912
  try {
38795
38913
  const pruneProc = _managerDeps.spawn(["git", "worktree", "prune"], {
@@ -38830,9 +38948,9 @@ ${missing.join(`
38830
38948
  }
38831
38949
  throw new Error(`Failed to create worktree: ${String(error48)}`);
38832
38950
  }
38833
- const nodeModulesSource = join52(projectRoot, "node_modules");
38834
- if (existsSync30(nodeModulesSource)) {
38835
- const nodeModulesTarget = join52(worktreePath, "node_modules");
38951
+ const nodeModulesSource = join48(projectRoot, "node_modules");
38952
+ if (existsSync29(nodeModulesSource)) {
38953
+ const nodeModulesTarget = join48(worktreePath, "node_modules");
38836
38954
  try {
38837
38955
  symlinkSync(nodeModulesSource, nodeModulesTarget, "dir");
38838
38956
  } catch (error48) {
@@ -38840,9 +38958,9 @@ ${missing.join(`
38840
38958
  throw new Error(`Failed to symlink node_modules: ${errorMessage(error48)}`);
38841
38959
  }
38842
38960
  }
38843
- const envSource = join52(projectRoot, ".env");
38844
- if (existsSync30(envSource)) {
38845
- const envTarget = join52(worktreePath, ".env");
38961
+ const envSource = join48(projectRoot, ".env");
38962
+ if (existsSync29(envSource)) {
38963
+ const envTarget = join48(worktreePath, ".env");
38846
38964
  try {
38847
38965
  symlinkSync(envSource, envTarget, "file");
38848
38966
  } catch (error48) {
@@ -38853,7 +38971,7 @@ ${missing.join(`
38853
38971
  }
38854
38972
  async remove(projectRoot, storyId) {
38855
38973
  validateStoryId(storyId);
38856
- const worktreePath = join52(projectRoot, ".nax-wt", storyId);
38974
+ const worktreePath = join48(projectRoot, ".nax-wt", storyId);
38857
38975
  const branchName = `nax/${storyId}`;
38858
38976
  try {
38859
38977
  const proc = _managerDeps.spawn(["git", "worktree", "remove", worktreePath, "--force"], {
@@ -39213,10 +39331,11 @@ async function rectifyConflictedStory(options) {
39213
39331
  const routing = routeTask2(story.title, story.description, story.acceptanceCriteria, story.tags, config2);
39214
39332
  const pipelineContext = {
39215
39333
  config: config2,
39216
- effectiveConfig: config2,
39334
+ rootConfig: config2,
39217
39335
  prd,
39218
39336
  story,
39219
39337
  stories: [story],
39338
+ projectDir: workdir,
39220
39339
  workdir: worktreePath,
39221
39340
  featureDir: undefined,
39222
39341
  hooks,
@@ -39453,8 +39572,9 @@ async function executeUnified(ctx, initialPrd) {
39453
39572
  logger?.info("execution", "Running pre-run pipeline (acceptance test setup)");
39454
39573
  preRunCtx = {
39455
39574
  config: ctx.config,
39456
- effectiveConfig: ctx.config,
39575
+ rootConfig: ctx.config,
39457
39576
  prd,
39577
+ projectDir: ctx.workdir,
39458
39578
  workdir: ctx.workdir,
39459
39579
  featureDir: ctx.featureDir,
39460
39580
  story: prd.userStories[0],
@@ -39499,6 +39619,7 @@ async function executeUnified(ctx, initialPrd) {
39499
39619
  const readyStories = getAllReadyStories(prd);
39500
39620
  const batch = _unifiedExecutorDeps.selectIndependentBatch(readyStories, ctx.parallelCount);
39501
39621
  if (batch.length > 1) {
39622
+ ctx.onBeforeStory?.();
39502
39623
  for (const story of batch) {
39503
39624
  pipelineEventBus.emit({
39504
39625
  type: "story:started",
@@ -39524,8 +39645,9 @@ async function executeUnified(ctx, initialPrd) {
39524
39645
  maxConcurrency: ctx.parallelCount,
39525
39646
  pipelineContext: {
39526
39647
  config: ctx.config,
39527
- effectiveConfig: ctx.config,
39648
+ rootConfig: ctx.config,
39528
39649
  prd,
39650
+ projectDir: ctx.workdir,
39529
39651
  hooks: ctx.hooks,
39530
39652
  featureDir: ctx.featureDir,
39531
39653
  agentGetFn: ctx.agentGetFn,
@@ -39639,6 +39761,7 @@ async function executeUnified(ctx, initialPrd) {
39639
39761
  }
39640
39762
  pipelineEventBus.emit({ type: "run:resumed", feature: ctx.feature });
39641
39763
  }
39764
+ ctx.onBeforeStory?.();
39642
39765
  pipelineEventBus.emit({
39643
39766
  type: "story:started",
39644
39767
  storyId: singleStory.id,
@@ -39701,6 +39824,7 @@ async function executeUnified(ctx, initialPrd) {
39701
39824
  }
39702
39825
  pipelineEventBus.emit({ type: "run:resumed", feature: ctx.feature });
39703
39826
  }
39827
+ ctx.onBeforeStory?.();
39704
39828
  pipelineEventBus.emit({
39705
39829
  type: "story:started",
39706
39830
  storyId: selection.story.id,
@@ -39789,16 +39913,16 @@ var init_unified_executor = __esm(() => {
39789
39913
  });
39790
39914
 
39791
39915
  // src/project/detector.ts
39792
- import { join as join53 } from "path";
39916
+ import { join as join49 } from "path";
39793
39917
  async function detectLanguage(workdir, pkg) {
39794
39918
  const deps = _detectorDeps;
39795
- if (await deps.fileExists(join53(workdir, "go.mod")))
39919
+ if (await deps.fileExists(join49(workdir, "go.mod")))
39796
39920
  return "go";
39797
- if (await deps.fileExists(join53(workdir, "Cargo.toml")))
39921
+ if (await deps.fileExists(join49(workdir, "Cargo.toml")))
39798
39922
  return "rust";
39799
- if (await deps.fileExists(join53(workdir, "pyproject.toml")))
39923
+ if (await deps.fileExists(join49(workdir, "pyproject.toml")))
39800
39924
  return "python";
39801
- if (await deps.fileExists(join53(workdir, "requirements.txt")))
39925
+ if (await deps.fileExists(join49(workdir, "requirements.txt")))
39802
39926
  return "python";
39803
39927
  if (pkg != null) {
39804
39928
  const allDeps = {
@@ -39858,18 +39982,18 @@ async function detectLintTool(workdir, language) {
39858
39982
  if (language === "python")
39859
39983
  return "ruff";
39860
39984
  const deps = _detectorDeps;
39861
- if (await deps.fileExists(join53(workdir, "biome.json")))
39985
+ if (await deps.fileExists(join49(workdir, "biome.json")))
39862
39986
  return "biome";
39863
- if (await deps.fileExists(join53(workdir, ".eslintrc")))
39987
+ if (await deps.fileExists(join49(workdir, ".eslintrc")))
39864
39988
  return "eslint";
39865
- if (await deps.fileExists(join53(workdir, ".eslintrc.js")))
39989
+ if (await deps.fileExists(join49(workdir, ".eslintrc.js")))
39866
39990
  return "eslint";
39867
- if (await deps.fileExists(join53(workdir, ".eslintrc.json")))
39991
+ if (await deps.fileExists(join49(workdir, ".eslintrc.json")))
39868
39992
  return "eslint";
39869
39993
  return;
39870
39994
  }
39871
39995
  async function detectProjectProfile(workdir, existing) {
39872
- const pkg = await _detectorDeps.readJson(join53(workdir, "package.json"));
39996
+ const pkg = await _detectorDeps.readJson(join49(workdir, "package.json"));
39873
39997
  const language = existing.language !== undefined ? existing.language : await detectLanguage(workdir, pkg);
39874
39998
  const type = existing.type !== undefined ? existing.type : detectType(pkg);
39875
39999
  const testFramework = existing.testFramework !== undefined ? existing.testFramework : await detectTestFramework(workdir, language, pkg);
@@ -39906,7 +40030,7 @@ var init_project = __esm(() => {
39906
40030
 
39907
40031
  // src/execution/status-file.ts
39908
40032
  import { rename, unlink as unlink3 } from "fs/promises";
39909
- import { resolve as resolve9 } from "path";
40033
+ import { resolve as resolve10 } from "path";
39910
40034
  function countProgress(prd) {
39911
40035
  const stories = prd.userStories;
39912
40036
  const passed = stories.filter((s) => s.status === "passed").length;
@@ -39951,7 +40075,7 @@ function buildStatusSnapshot(state) {
39951
40075
  return snapshot;
39952
40076
  }
39953
40077
  async function writeStatusFile(filePath, status) {
39954
- const resolvedPath = resolve9(filePath);
40078
+ const resolvedPath = resolve10(filePath);
39955
40079
  if (filePath.includes("../") || filePath.includes("..\\")) {
39956
40080
  throw new Error("Invalid status file path: path traversal detected");
39957
40081
  }
@@ -39965,7 +40089,7 @@ async function writeStatusFile(filePath, status) {
39965
40089
  var init_status_file = () => {};
39966
40090
 
39967
40091
  // src/execution/status-writer.ts
39968
- import { join as join54 } from "path";
40092
+ import { join as join50 } from "path";
39969
40093
 
39970
40094
  class StatusWriter {
39971
40095
  statusFile;
@@ -40079,7 +40203,7 @@ class StatusWriter {
40079
40203
  if (!this._prd)
40080
40204
  return;
40081
40205
  const safeLogger = getSafeLogger();
40082
- const featureStatusPath = join54(featureDir, "status.json");
40206
+ const featureStatusPath = join50(featureDir, "status.json");
40083
40207
  const write = async () => {
40084
40208
  try {
40085
40209
  const base = this.getSnapshot(totalCost, iterations);
@@ -40290,7 +40414,7 @@ __export(exports_run_initialization, {
40290
40414
  initializeRun: () => initializeRun,
40291
40415
  _reconcileDeps: () => _reconcileDeps
40292
40416
  });
40293
- import { join as join55 } from "path";
40417
+ import { join as join51 } from "path";
40294
40418
  async function reconcileState(prd, prdPath, workdir, config2) {
40295
40419
  const logger = getSafeLogger();
40296
40420
  let reconciledCount = 0;
@@ -40308,7 +40432,7 @@ async function reconcileState(prd, prdPath, workdir, config2) {
40308
40432
  });
40309
40433
  continue;
40310
40434
  }
40311
- const effectiveWorkdir = story.workdir ? join55(workdir, story.workdir) : workdir;
40435
+ const effectiveWorkdir = story.workdir ? join51(workdir, story.workdir) : workdir;
40312
40436
  try {
40313
40437
  const reviewResult = await _reconcileDeps.runReview(config2.review, effectiveWorkdir, config2.execution);
40314
40438
  if (!reviewResult.success) {
@@ -41034,14 +41158,14 @@ See https://react.dev/link/invalid-hook-call for tips about how to debug and fix
41034
41158
  prevActScopeDepth !== actScopeDepth - 1 && console.error("You seem to have overlapping act() calls, this is not supported. Be sure to await previous act() calls before making a new one. ");
41035
41159
  actScopeDepth = prevActScopeDepth;
41036
41160
  }
41037
- function recursivelyFlushAsyncActWork(returnValue, resolve10, reject) {
41161
+ function recursivelyFlushAsyncActWork(returnValue, resolve11, reject) {
41038
41162
  var queue = ReactSharedInternals.actQueue;
41039
41163
  if (queue !== null)
41040
41164
  if (queue.length !== 0)
41041
41165
  try {
41042
41166
  flushActQueue(queue);
41043
41167
  enqueueTask(function() {
41044
- return recursivelyFlushAsyncActWork(returnValue, resolve10, reject);
41168
+ return recursivelyFlushAsyncActWork(returnValue, resolve11, reject);
41045
41169
  });
41046
41170
  return;
41047
41171
  } catch (error48) {
@@ -41049,7 +41173,7 @@ See https://react.dev/link/invalid-hook-call for tips about how to debug and fix
41049
41173
  }
41050
41174
  else
41051
41175
  ReactSharedInternals.actQueue = null;
41052
- 0 < ReactSharedInternals.thrownErrors.length ? (queue = aggregateErrors(ReactSharedInternals.thrownErrors), ReactSharedInternals.thrownErrors.length = 0, reject(queue)) : resolve10(returnValue);
41176
+ 0 < ReactSharedInternals.thrownErrors.length ? (queue = aggregateErrors(ReactSharedInternals.thrownErrors), ReactSharedInternals.thrownErrors.length = 0, reject(queue)) : resolve11(returnValue);
41053
41177
  }
41054
41178
  function flushActQueue(queue) {
41055
41179
  if (!isFlushing) {
@@ -41225,14 +41349,14 @@ See https://react.dev/link/invalid-hook-call for tips about how to debug and fix
41225
41349
  didAwaitActCall || didWarnNoAwaitAct || (didWarnNoAwaitAct = true, console.error("You called act(async () => ...) without await. This could lead to unexpected testing behaviour, interleaving multiple act calls and mixing their scopes. You should - await act(async () => ...);"));
41226
41350
  });
41227
41351
  return {
41228
- then: function(resolve10, reject) {
41352
+ then: function(resolve11, reject) {
41229
41353
  didAwaitActCall = true;
41230
41354
  thenable.then(function(returnValue) {
41231
41355
  popActScope(prevActQueue, prevActScopeDepth);
41232
41356
  if (prevActScopeDepth === 0) {
41233
41357
  try {
41234
41358
  flushActQueue(queue), enqueueTask(function() {
41235
- return recursivelyFlushAsyncActWork(returnValue, resolve10, reject);
41359
+ return recursivelyFlushAsyncActWork(returnValue, resolve11, reject);
41236
41360
  });
41237
41361
  } catch (error$0) {
41238
41362
  ReactSharedInternals.thrownErrors.push(error$0);
@@ -41243,7 +41367,7 @@ See https://react.dev/link/invalid-hook-call for tips about how to debug and fix
41243
41367
  reject(_thrownError);
41244
41368
  }
41245
41369
  } else
41246
- resolve10(returnValue);
41370
+ resolve11(returnValue);
41247
41371
  }, function(error48) {
41248
41372
  popActScope(prevActQueue, prevActScopeDepth);
41249
41373
  0 < ReactSharedInternals.thrownErrors.length ? (error48 = aggregateErrors(ReactSharedInternals.thrownErrors), ReactSharedInternals.thrownErrors.length = 0, reject(error48)) : reject(error48);
@@ -41259,11 +41383,11 @@ See https://react.dev/link/invalid-hook-call for tips about how to debug and fix
41259
41383
  if (0 < ReactSharedInternals.thrownErrors.length)
41260
41384
  throw callback = aggregateErrors(ReactSharedInternals.thrownErrors), ReactSharedInternals.thrownErrors.length = 0, callback;
41261
41385
  return {
41262
- then: function(resolve10, reject) {
41386
+ then: function(resolve11, reject) {
41263
41387
  didAwaitActCall = true;
41264
41388
  prevActScopeDepth === 0 ? (ReactSharedInternals.actQueue = queue, enqueueTask(function() {
41265
- return recursivelyFlushAsyncActWork(returnValue$jscomp$0, resolve10, reject);
41266
- })) : resolve10(returnValue$jscomp$0);
41389
+ return recursivelyFlushAsyncActWork(returnValue$jscomp$0, resolve11, reject);
41390
+ })) : resolve11(returnValue$jscomp$0);
41267
41391
  }
41268
41392
  };
41269
41393
  };
@@ -44105,8 +44229,8 @@ It can also happen if the client has a browser extension installed which messes
44105
44229
  currentEntangledActionThenable = {
44106
44230
  status: "pending",
44107
44231
  value: undefined,
44108
- then: function(resolve10) {
44109
- entangledListeners.push(resolve10);
44232
+ then: function(resolve11) {
44233
+ entangledListeners.push(resolve11);
44110
44234
  }
44111
44235
  };
44112
44236
  }
@@ -44130,8 +44254,8 @@ It can also happen if the client has a browser extension installed which messes
44130
44254
  status: "pending",
44131
44255
  value: null,
44132
44256
  reason: null,
44133
- then: function(resolve10) {
44134
- listeners.push(resolve10);
44257
+ then: function(resolve11) {
44258
+ listeners.push(resolve11);
44135
44259
  }
44136
44260
  };
44137
44261
  thenable.then(function() {
@@ -71521,9 +71645,9 @@ var require_jsx_dev_runtime = __commonJS((exports, module) => {
71521
71645
 
71522
71646
  // bin/nax.ts
71523
71647
  init_source();
71524
- import { existsSync as existsSync32, mkdirSync as mkdirSync7 } from "fs";
71648
+ import { existsSync as existsSync31, mkdirSync as mkdirSync7 } from "fs";
71525
71649
  import { homedir as homedir8 } from "os";
71526
- import { join as join57 } from "path";
71650
+ import { join as join53 } from "path";
71527
71651
 
71528
71652
  // node_modules/commander/esm.mjs
71529
71653
  var import__ = __toESM(require_commander(), 1);
@@ -71549,15 +71673,15 @@ import { join as join11 } from "path";
71549
71673
  import { createInterface as createInterface2 } from "readline";
71550
71674
 
71551
71675
  // src/analyze/scanner.ts
71552
- import { existsSync as existsSync2, readdirSync } from "fs";
71676
+ import { existsSync as existsSync3, readdirSync } from "fs";
71553
71677
  import { join as join4 } from "path";
71554
71678
  async function scanCodebase(workdir) {
71555
71679
  const srcPath = join4(workdir, "src");
71556
71680
  const packageJsonPath = join4(workdir, "package.json");
71557
- const fileTree = existsSync2(srcPath) ? await generateFileTree(srcPath, 3) : "No src/ directory";
71681
+ const fileTree = existsSync3(srcPath) ? await generateFileTree(srcPath, 3) : "No src/ directory";
71558
71682
  let dependencies = {};
71559
71683
  let devDependencies = {};
71560
- if (existsSync2(packageJsonPath)) {
71684
+ if (existsSync3(packageJsonPath)) {
71561
71685
  try {
71562
71686
  const pkg = await Bun.file(packageJsonPath).json();
71563
71687
  dependencies = pkg.dependencies || {};
@@ -71618,16 +71742,16 @@ function detectTestPatterns(workdir, dependencies, devDependencies) {
71618
71742
  } else {
71619
71743
  patterns.push("Test framework: likely bun:test (no framework dependency)");
71620
71744
  }
71621
- if (existsSync2(join4(workdir, "test"))) {
71745
+ if (existsSync3(join4(workdir, "test"))) {
71622
71746
  patterns.push("Test directory: test/");
71623
71747
  }
71624
- if (existsSync2(join4(workdir, "__tests__"))) {
71748
+ if (existsSync3(join4(workdir, "__tests__"))) {
71625
71749
  patterns.push("Test directory: __tests__/");
71626
71750
  }
71627
- if (existsSync2(join4(workdir, "tests"))) {
71751
+ if (existsSync3(join4(workdir, "tests"))) {
71628
71752
  patterns.push("Test directory: tests/");
71629
71753
  }
71630
- const hasTestFiles = existsSync2(join4(workdir, "test")) || existsSync2(join4(workdir, "src"));
71754
+ const hasTestFiles = existsSync3(join4(workdir, "test")) || existsSync3(join4(workdir, "src"));
71631
71755
  if (hasTestFiles) {
71632
71756
  patterns.push("Test files: *.test.ts, *.spec.ts");
71633
71757
  }
@@ -71639,7 +71763,7 @@ init_test_strategy();
71639
71763
 
71640
71764
  // src/context/generator.ts
71641
71765
  init_path_security();
71642
- import { existsSync as existsSync5, readFileSync } from "fs";
71766
+ import { existsSync as existsSync5 } from "fs";
71643
71767
  import { join as join6, relative } from "path";
71644
71768
 
71645
71769
  // src/context/injector.ts
@@ -71982,7 +72106,6 @@ var windsurfGenerator = {
71982
72106
  // src/context/generator.ts
71983
72107
  var _generatorDeps = {
71984
72108
  existsSync: (p) => existsSync5(p),
71985
- readFileSync: (p, enc) => readFileSync(p, enc),
71986
72109
  readTextFile: (p) => Bun.file(p).text(),
71987
72110
  writeFile: (p, content) => Bun.write(p, content),
71988
72111
  buildProjectMetadata
@@ -72084,47 +72207,41 @@ async function discoverWorkspacePackages(repoRoot) {
72084
72207
  }
72085
72208
  }
72086
72209
  const turboPath = join6(repoRoot, "turbo.json");
72087
- if (_generatorDeps.existsSync(turboPath)) {
72088
- try {
72089
- const turbo = JSON.parse(_generatorDeps.readFileSync(turboPath, "utf-8"));
72090
- if (Array.isArray(turbo.packages)) {
72091
- await resolveGlobs(turbo.packages);
72092
- }
72093
- } catch {}
72094
- }
72210
+ try {
72211
+ const turbo = JSON.parse(await _generatorDeps.readTextFile(turboPath));
72212
+ if (Array.isArray(turbo.packages)) {
72213
+ await resolveGlobs(turbo.packages);
72214
+ }
72215
+ } catch {}
72095
72216
  const pkgPath = join6(repoRoot, "package.json");
72096
- if (_generatorDeps.existsSync(pkgPath)) {
72097
- try {
72098
- const pkg = JSON.parse(_generatorDeps.readFileSync(pkgPath, "utf-8"));
72099
- const ws = pkg.workspaces;
72100
- const patterns = Array.isArray(ws) ? ws : Array.isArray(ws?.packages) ? ws.packages : [];
72101
- if (patterns.length > 0)
72102
- await resolveGlobs(patterns);
72103
- } catch {}
72104
- }
72217
+ try {
72218
+ const pkg = JSON.parse(await _generatorDeps.readTextFile(pkgPath));
72219
+ const ws = pkg.workspaces;
72220
+ const patterns = Array.isArray(ws) ? ws : Array.isArray(ws?.packages) ? ws.packages : [];
72221
+ if (patterns.length > 0)
72222
+ await resolveGlobs(patterns);
72223
+ } catch {}
72105
72224
  const pnpmPath = join6(repoRoot, "pnpm-workspace.yaml");
72106
- if (_generatorDeps.existsSync(pnpmPath)) {
72107
- try {
72108
- const raw = _generatorDeps.readFileSync(pnpmPath, "utf-8");
72109
- const lines = raw.split(`
72225
+ try {
72226
+ const raw = await _generatorDeps.readTextFile(pnpmPath);
72227
+ const lines = raw.split(`
72110
72228
  `);
72111
- let inPackages = false;
72112
- const patterns = [];
72113
- for (const line of lines) {
72114
- if (/^packages\s*:/.test(line)) {
72115
- inPackages = true;
72116
- continue;
72117
- }
72118
- if (inPackages && /^\s+-\s+/.test(line)) {
72119
- patterns.push(line.replace(/^\s+-\s+['"]?/, "").replace(/['"]?\s*$/, ""));
72120
- } else if (inPackages && !/^\s/.test(line)) {
72121
- break;
72122
- }
72229
+ let inPackages = false;
72230
+ const patterns = [];
72231
+ for (const line of lines) {
72232
+ if (/^packages\s*:/.test(line)) {
72233
+ inPackages = true;
72234
+ continue;
72123
72235
  }
72124
- if (patterns.length > 0)
72125
- await resolveGlobs(patterns);
72126
- } catch {}
72127
- }
72236
+ if (inPackages && /^\s+-\s+/.test(line)) {
72237
+ patterns.push(line.replace(/^\s+-\s+['"]?/, "").replace(/['"]?\s*$/, ""));
72238
+ } else if (inPackages && !/^\s/.test(line)) {
72239
+ break;
72240
+ }
72241
+ }
72242
+ if (patterns.length > 0)
72243
+ await resolveGlobs(patterns);
72244
+ } catch {}
72128
72245
  return results.sort();
72129
72246
  }
72130
72247
  async function generateForPackage(packageDir, config2, dryRun = false, repoRoot) {
@@ -72656,13 +72773,13 @@ function createCliInteractionBridge() {
72656
72773
  process.stdout.write(`
72657
72774
  \uD83E\uDD16 Agent: ${text}
72658
72775
  You: `);
72659
- return new Promise((resolve5) => {
72776
+ return new Promise((resolve6) => {
72660
72777
  const rl = createInterface2({ input: process.stdin, terminal: false });
72661
72778
  rl.once("line", (line) => {
72662
72779
  rl.close();
72663
- resolve5(line.trim());
72780
+ resolve6(line.trim());
72664
72781
  });
72665
- rl.once("close", () => resolve5(""));
72782
+ rl.once("close", () => resolve6(""));
72666
72783
  });
72667
72784
  }
72668
72785
  };
@@ -73167,20 +73284,20 @@ async function displayModelEfficiency(workdir) {
73167
73284
  // src/cli/status-features.ts
73168
73285
  init_source();
73169
73286
  import { existsSync as existsSync15, readdirSync as readdirSync4 } from "fs";
73170
- import { join as join14, resolve as resolve6 } from "path";
73287
+ import { join as join14, resolve as resolve7 } from "path";
73171
73288
 
73172
73289
  // src/commands/common.ts
73173
73290
  init_path_security();
73174
73291
  init_errors();
73175
73292
  import { existsSync as existsSync14, readdirSync as readdirSync3, realpathSync as realpathSync2 } from "fs";
73176
- import { join as join12, resolve as resolve5 } from "path";
73293
+ import { join as join12, resolve as resolve6 } from "path";
73177
73294
  function resolveProject(options = {}) {
73178
73295
  const { dir, feature } = options;
73179
73296
  let projectRoot;
73180
73297
  let naxDir;
73181
73298
  let configPath;
73182
73299
  if (dir) {
73183
- projectRoot = realpathSync2(resolve5(dir));
73300
+ projectRoot = realpathSync2(resolve6(dir));
73184
73301
  naxDir = join12(projectRoot, ".nax");
73185
73302
  if (!existsSync14(naxDir)) {
73186
73303
  throw new NaxError(`Directory does not contain a nax project: ${projectRoot}
@@ -73233,7 +73350,7 @@ No features found in this project.`;
73233
73350
  };
73234
73351
  }
73235
73352
  function findProjectRoot(startDir) {
73236
- let current = resolve5(startDir);
73353
+ let current = resolve6(startDir);
73237
73354
  let depth = 0;
73238
73355
  while (depth < MAX_DIRECTORY_DEPTH) {
73239
73356
  const naxDir = join12(current, ".nax");
@@ -73550,7 +73667,7 @@ async function displayFeatureStatus(options = {}) {
73550
73667
  if (options.feature) {
73551
73668
  let featureDir;
73552
73669
  if (options.dir) {
73553
- featureDir = join14(resolve6(options.dir), ".nax", "features", options.feature);
73670
+ featureDir = join14(resolve7(options.dir), ".nax", "features", options.feature);
73554
73671
  } else {
73555
73672
  const resolved = resolveProject({ feature: options.feature });
73556
73673
  if (!resolved.featureDir) {
@@ -73660,8 +73777,8 @@ async function runsShowCommand(options) {
73660
73777
  }
73661
73778
  // src/cli/prompts-main.ts
73662
73779
  init_logger2();
73663
- import { existsSync as existsSync20, mkdirSync as mkdirSync3 } from "fs";
73664
- import { join as join29 } from "path";
73780
+ import { existsSync as existsSync19, mkdirSync as mkdirSync3 } from "fs";
73781
+ import { join as join24 } from "path";
73665
73782
 
73666
73783
  // src/pipeline/index.ts
73667
73784
  init_runner();
@@ -73736,7 +73853,7 @@ function buildFrontmatter(story, ctx, role) {
73736
73853
 
73737
73854
  // src/cli/prompts-tdd.ts
73738
73855
  init_prompts2();
73739
- import { join as join28 } from "path";
73856
+ import { join as join23 } from "path";
73740
73857
  async function handleThreeSessionTddPrompts(story, ctx, outputDir, logger) {
73741
73858
  const [testWriterPrompt, implementerPrompt, verifierPrompt] = await Promise.all([
73742
73859
  PromptBuilder.for("test-writer", { isolation: "strict" }).withLoader(ctx.workdir, ctx.config).story(story).context(ctx.contextMarkdown).constitution(ctx.constitution?.content).testCommand(ctx.config.quality?.commands?.test).build(),
@@ -73755,7 +73872,7 @@ ${frontmatter}---
73755
73872
 
73756
73873
  ${session.prompt}`;
73757
73874
  if (outputDir) {
73758
- const promptFile = join28(outputDir, `${story.id}.${session.role}.md`);
73875
+ const promptFile = join23(outputDir, `${story.id}.${session.role}.md`);
73759
73876
  await Bun.write(promptFile, fullOutput);
73760
73877
  logger.info("cli", "Written TDD prompt file", {
73761
73878
  storyId: story.id,
@@ -73771,7 +73888,7 @@ ${"=".repeat(80)}`);
73771
73888
  }
73772
73889
  }
73773
73890
  if (outputDir && ctx.contextMarkdown) {
73774
- const contextFile = join28(outputDir, `${story.id}.context.md`);
73891
+ const contextFile = join23(outputDir, `${story.id}.context.md`);
73775
73892
  const frontmatter = buildFrontmatter(story, ctx);
73776
73893
  const contextOutput = `---
73777
73894
  ${frontmatter}---
@@ -73785,13 +73902,13 @@ ${ctx.contextMarkdown}`;
73785
73902
  async function promptsCommand(options) {
73786
73903
  const logger = getLogger();
73787
73904
  const { feature, workdir, config: config2, storyId, outputDir } = options;
73788
- const naxDir = join29(workdir, ".nax");
73789
- if (!existsSync20(naxDir)) {
73905
+ const naxDir = join24(workdir, ".nax");
73906
+ if (!existsSync19(naxDir)) {
73790
73907
  throw new Error(`.nax directory not found. Run 'nax init' first in ${workdir}`);
73791
73908
  }
73792
- const featureDir = join29(naxDir, "features", feature);
73793
- const prdPath = join29(featureDir, "prd.json");
73794
- if (!existsSync20(prdPath)) {
73909
+ const featureDir = join24(naxDir, "features", feature);
73910
+ const prdPath = join24(featureDir, "prd.json");
73911
+ if (!existsSync19(prdPath)) {
73795
73912
  throw new Error(`Feature "${feature}" not found or missing prd.json`);
73796
73913
  }
73797
73914
  const prd = await loadPRD(prdPath);
@@ -73812,7 +73929,7 @@ async function promptsCommand(options) {
73812
73929
  for (const story of stories) {
73813
73930
  const ctx = {
73814
73931
  config: config2,
73815
- effectiveConfig: config2,
73932
+ rootConfig: config2,
73816
73933
  prd,
73817
73934
  story,
73818
73935
  stories: [story],
@@ -73822,6 +73939,7 @@ async function promptsCommand(options) {
73822
73939
  testStrategy: "test-after",
73823
73940
  reasoning: "Placeholder routing"
73824
73941
  },
73942
+ projectDir: workdir,
73825
73943
  workdir,
73826
73944
  featureDir,
73827
73945
  hooks: { hooks: {} }
@@ -73851,10 +73969,10 @@ ${frontmatter}---
73851
73969
 
73852
73970
  ${ctx.prompt}`;
73853
73971
  if (outputDir) {
73854
- const promptFile = join29(outputDir, `${story.id}.prompt.md`);
73972
+ const promptFile = join24(outputDir, `${story.id}.prompt.md`);
73855
73973
  await Bun.write(promptFile, fullOutput);
73856
73974
  if (ctx.contextMarkdown) {
73857
- const contextFile = join29(outputDir, `${story.id}.context.md`);
73975
+ const contextFile = join24(outputDir, `${story.id}.context.md`);
73858
73976
  const contextOutput = `---
73859
73977
  ${frontmatter}---
73860
73978
 
@@ -73880,8 +73998,8 @@ ${"=".repeat(80)}`);
73880
73998
  return processedStories;
73881
73999
  }
73882
74000
  // src/cli/prompts-init.ts
73883
- import { existsSync as existsSync21, mkdirSync as mkdirSync4 } from "fs";
73884
- import { join as join30 } from "path";
74001
+ import { existsSync as existsSync20, mkdirSync as mkdirSync4 } from "fs";
74002
+ import { join as join25 } from "path";
73885
74003
  var TEMPLATE_ROLES = [
73886
74004
  { file: "test-writer.md", role: "test-writer" },
73887
74005
  { file: "implementer.md", role: "implementer", variant: "standard" },
@@ -73905,9 +74023,9 @@ var TEMPLATE_HEADER = `<!--
73905
74023
  `;
73906
74024
  async function promptsInitCommand(options) {
73907
74025
  const { workdir, force = false, autoWireConfig = true } = options;
73908
- const templatesDir = join30(workdir, ".nax", "templates");
74026
+ const templatesDir = join25(workdir, ".nax", "templates");
73909
74027
  mkdirSync4(templatesDir, { recursive: true });
73910
- const existingFiles = TEMPLATE_ROLES.map((t) => t.file).filter((f) => existsSync21(join30(templatesDir, f)));
74028
+ const existingFiles = TEMPLATE_ROLES.map((t) => t.file).filter((f) => existsSync20(join25(templatesDir, f)));
73911
74029
  if (existingFiles.length > 0 && !force) {
73912
74030
  console.warn(`[WARN] nax/templates/ already contains files: ${existingFiles.join(", ")}. No files overwritten.
73913
74031
  Pass --force to overwrite existing templates.`);
@@ -73915,7 +74033,7 @@ async function promptsInitCommand(options) {
73915
74033
  }
73916
74034
  const written = [];
73917
74035
  for (const template of TEMPLATE_ROLES) {
73918
- const filePath = join30(templatesDir, template.file);
74036
+ const filePath = join25(templatesDir, template.file);
73919
74037
  const roleBody = template.role === "implementer" ? buildRoleTaskSection(template.role, template.variant) : buildRoleTaskSection(template.role);
73920
74038
  const content = TEMPLATE_HEADER + roleBody;
73921
74039
  await Bun.write(filePath, content);
@@ -73931,8 +74049,8 @@ async function promptsInitCommand(options) {
73931
74049
  return written;
73932
74050
  }
73933
74051
  async function autoWirePromptsConfig(workdir) {
73934
- const configPath = join30(workdir, "nax.config.json");
73935
- if (!existsSync21(configPath)) {
74052
+ const configPath = join25(workdir, "nax.config.json");
74053
+ if (!existsSync20(configPath)) {
73936
74054
  const exampleConfig = JSON.stringify({
73937
74055
  prompts: {
73938
74056
  overrides: {
@@ -74096,8 +74214,8 @@ function pad(str, width) {
74096
74214
  init_config();
74097
74215
  init_logger2();
74098
74216
  init_prd();
74099
- import { existsSync as existsSync23, readdirSync as readdirSync6 } from "fs";
74100
- import { join as join35 } from "path";
74217
+ import { existsSync as existsSync22, readdirSync as readdirSync6 } from "fs";
74218
+ import { join as join30 } from "path";
74101
74219
 
74102
74220
  // src/cli/diagnose-analysis.ts
74103
74221
  function detectFailurePattern(story, prd, status) {
@@ -74296,8 +74414,8 @@ function isProcessAlive2(pid) {
74296
74414
  }
74297
74415
  }
74298
74416
  async function loadStatusFile2(workdir) {
74299
- const statusPath = join35(workdir, ".nax", "status.json");
74300
- if (!existsSync23(statusPath))
74417
+ const statusPath = join30(workdir, ".nax", "status.json");
74418
+ if (!existsSync22(statusPath))
74301
74419
  return null;
74302
74420
  try {
74303
74421
  return await Bun.file(statusPath).json();
@@ -74324,7 +74442,7 @@ async function countCommitsSince(workdir, since) {
74324
74442
  }
74325
74443
  }
74326
74444
  async function checkLock(workdir) {
74327
- const lockFile = Bun.file(join35(workdir, "nax.lock"));
74445
+ const lockFile = Bun.file(join30(workdir, "nax.lock"));
74328
74446
  if (!await lockFile.exists())
74329
74447
  return { lockPresent: false };
74330
74448
  try {
@@ -74342,8 +74460,8 @@ async function diagnoseCommand(options = {}) {
74342
74460
  const logger = getLogger();
74343
74461
  const workdir = options.workdir ?? process.cwd();
74344
74462
  const naxSubdir = findProjectDir(workdir);
74345
- let projectDir = naxSubdir ? join35(naxSubdir, "..") : null;
74346
- if (!projectDir && existsSync23(join35(workdir, ".nax"))) {
74463
+ let projectDir = naxSubdir ? join30(naxSubdir, "..") : null;
74464
+ if (!projectDir && existsSync22(join30(workdir, ".nax"))) {
74347
74465
  projectDir = workdir;
74348
74466
  }
74349
74467
  if (!projectDir)
@@ -74354,8 +74472,8 @@ async function diagnoseCommand(options = {}) {
74354
74472
  if (status2) {
74355
74473
  feature = status2.run.feature;
74356
74474
  } else {
74357
- const featuresDir = join35(projectDir, ".nax", "features");
74358
- if (!existsSync23(featuresDir))
74475
+ const featuresDir = join30(projectDir, ".nax", "features");
74476
+ if (!existsSync22(featuresDir))
74359
74477
  throw new Error("No features found in project");
74360
74478
  const features = readdirSync6(featuresDir, { withFileTypes: true }).filter((e) => e.isDirectory()).map((e) => e.name);
74361
74479
  if (features.length === 0)
@@ -74364,9 +74482,9 @@ async function diagnoseCommand(options = {}) {
74364
74482
  logger.info("diagnose", "No feature specified, using first found", { feature });
74365
74483
  }
74366
74484
  }
74367
- const featureDir = join35(projectDir, ".nax", "features", feature);
74368
- const prdPath = join35(featureDir, "prd.json");
74369
- if (!existsSync23(prdPath))
74485
+ const featureDir = join30(projectDir, ".nax", "features", feature);
74486
+ const prdPath = join30(featureDir, "prd.json");
74487
+ if (!existsSync22(prdPath))
74370
74488
  throw new Error(`Feature not found: ${feature}`);
74371
74489
  const prd = await loadPRD(prdPath);
74372
74490
  const status = await loadStatusFile2(projectDir);
@@ -74407,8 +74525,8 @@ init_interaction();
74407
74525
  // src/cli/generate.ts
74408
74526
  init_source();
74409
74527
  init_loader();
74410
- import { existsSync as existsSync24 } from "fs";
74411
- import { join as join36 } from "path";
74528
+ import { existsSync as existsSync23 } from "fs";
74529
+ import { join as join31 } from "path";
74412
74530
  var VALID_AGENTS = ["claude", "codex", "opencode", "cursor", "windsurf", "aider", "gemini"];
74413
74531
  async function generateCommand(options) {
74414
74532
  const workdir = options.dir ?? process.cwd();
@@ -74451,7 +74569,7 @@ async function generateCommand(options) {
74451
74569
  return;
74452
74570
  }
74453
74571
  if (options.package) {
74454
- const packageDir = join36(workdir, options.package);
74572
+ const packageDir = join31(workdir, options.package);
74455
74573
  if (dryRun) {
74456
74574
  console.log(source_default.yellow("\u26A0 Dry run \u2014 no files will be written"));
74457
74575
  }
@@ -74471,10 +74589,10 @@ async function generateCommand(options) {
74471
74589
  process.exit(1);
74472
74590
  return;
74473
74591
  }
74474
- const contextPath = options.context ? join36(workdir, options.context) : join36(workdir, ".nax/context.md");
74475
- const outputDir = options.output ? join36(workdir, options.output) : workdir;
74592
+ const contextPath = options.context ? join31(workdir, options.context) : join31(workdir, ".nax/context.md");
74593
+ const outputDir = options.output ? join31(workdir, options.output) : workdir;
74476
74594
  const autoInject = !options.noAutoInject;
74477
- if (!existsSync24(contextPath)) {
74595
+ if (!existsSync23(contextPath)) {
74478
74596
  console.error(source_default.red(`\u2717 Context file not found: ${contextPath}`));
74479
74597
  console.error(source_default.yellow(" Create .nax/context.md first, or run `nax init` to scaffold it."));
74480
74598
  process.exit(1);
@@ -74576,8 +74694,8 @@ async function generateCommand(options) {
74576
74694
  }
74577
74695
  // src/cli/config-display.ts
74578
74696
  init_loader();
74579
- import { existsSync as existsSync26 } from "fs";
74580
- import { join as join38 } from "path";
74697
+ import { existsSync as existsSync25 } from "fs";
74698
+ import { join as join33 } from "path";
74581
74699
 
74582
74700
  // src/cli/config-descriptions.ts
74583
74701
  var FIELD_DESCRIPTIONS = {
@@ -74816,10 +74934,10 @@ function deepEqual(a, b) {
74816
74934
  // src/cli/config-get.ts
74817
74935
  init_defaults();
74818
74936
  init_loader();
74819
- import { existsSync as existsSync25 } from "fs";
74820
- import { join as join37 } from "path";
74937
+ import { existsSync as existsSync24 } from "fs";
74938
+ import { join as join32 } from "path";
74821
74939
  async function loadConfigFile(path15) {
74822
- if (!existsSync25(path15))
74940
+ if (!existsSync24(path15))
74823
74941
  return null;
74824
74942
  try {
74825
74943
  return await Bun.file(path15).json();
@@ -74839,7 +74957,7 @@ async function loadProjectConfig() {
74839
74957
  const projectDir = findProjectDir();
74840
74958
  if (!projectDir)
74841
74959
  return null;
74842
- const projectPath = join37(projectDir, "config.json");
74960
+ const projectPath = join32(projectDir, "config.json");
74843
74961
  return await loadConfigFile(projectPath);
74844
74962
  }
74845
74963
 
@@ -74899,14 +75017,14 @@ async function configCommand(config2, options = {}) {
74899
75017
  function determineConfigSources() {
74900
75018
  const globalPath = globalConfigPath();
74901
75019
  const projectDir = findProjectDir();
74902
- const projectPath = projectDir ? join38(projectDir, "config.json") : null;
75020
+ const projectPath = projectDir ? join33(projectDir, "config.json") : null;
74903
75021
  return {
74904
75022
  global: fileExists(globalPath) ? globalPath : null,
74905
75023
  project: projectPath && fileExists(projectPath) ? projectPath : null
74906
75024
  };
74907
75025
  }
74908
75026
  function fileExists(path15) {
74909
- return existsSync26(path15);
75027
+ return existsSync25(path15);
74910
75028
  }
74911
75029
  function displayConfigWithDescriptions(obj, path15, sources, indent = 0) {
74912
75030
  const indentStr = " ".repeat(indent);
@@ -75048,15 +75166,15 @@ init_paths();
75048
75166
  init_profile();
75049
75167
  import { mkdirSync as mkdirSync5 } from "fs";
75050
75168
  import { readdirSync as readdirSync7 } from "fs";
75051
- import { join as join39 } from "path";
75169
+ import { join as join34 } from "path";
75052
75170
  var _profileCLIDeps = {
75053
75171
  env: process.env
75054
75172
  };
75055
75173
  var SENSITIVE_KEY_PATTERN = /key|token|secret|password|credential/i;
75056
75174
  var VAR_PATTERN = /\$[A-Za-z_][A-Za-z0-9_]*/;
75057
75175
  async function profileListCommand(startDir) {
75058
- const globalProfilesDir = join39(globalConfigDir(), "profiles");
75059
- const projectProfilesDir = join39(projectConfigDir(startDir), "profiles");
75176
+ const globalProfilesDir = join34(globalConfigDir(), "profiles");
75177
+ const projectProfilesDir = join34(projectConfigDir(startDir), "profiles");
75060
75178
  const globalProfiles = scanProfileDir(globalProfilesDir);
75061
75179
  const projectProfiles = scanProfileDir(projectProfilesDir);
75062
75180
  const activeProfile = await resolveProfileName({}, _profileCLIDeps.env, startDir);
@@ -75115,7 +75233,7 @@ function maskProfileValues(obj) {
75115
75233
  return result;
75116
75234
  }
75117
75235
  async function profileUseCommand(profileName, startDir) {
75118
- const configPath = join39(projectConfigDir(startDir), "config.json");
75236
+ const configPath = join34(projectConfigDir(startDir), "config.json");
75119
75237
  const configFile = Bun.file(configPath);
75120
75238
  let existing = {};
75121
75239
  if (await configFile.exists()) {
@@ -75134,8 +75252,8 @@ async function profileCurrentCommand(startDir) {
75134
75252
  return resolveProfileName({}, _profileCLIDeps.env, startDir);
75135
75253
  }
75136
75254
  async function profileCreateCommand(profileName, startDir) {
75137
- const profilesDir = join39(projectConfigDir(startDir), "profiles");
75138
- const profilePath = join39(profilesDir, `${profileName}.json`);
75255
+ const profilesDir = join34(projectConfigDir(startDir), "profiles");
75256
+ const profilePath = join34(profilesDir, `${profileName}.json`);
75139
75257
  const profileFile = Bun.file(profilePath);
75140
75258
  if (await profileFile.exists()) {
75141
75259
  throw new Error(`Profile "${profileName}" already exists at ${profilePath}`);
@@ -75200,25 +75318,25 @@ async function diagnose(options) {
75200
75318
  }
75201
75319
 
75202
75320
  // src/commands/logs.ts
75203
- import { existsSync as existsSync28 } from "fs";
75204
- import { join as join43 } from "path";
75321
+ import { existsSync as existsSync27 } from "fs";
75322
+ import { join as join38 } from "path";
75205
75323
 
75206
75324
  // src/commands/logs-formatter.ts
75207
75325
  init_source();
75208
75326
  init_formatter();
75209
75327
  import { readdirSync as readdirSync9 } from "fs";
75210
- import { join as join42 } from "path";
75328
+ import { join as join37 } from "path";
75211
75329
 
75212
75330
  // src/commands/logs-reader.ts
75213
- import { existsSync as existsSync27, readdirSync as readdirSync8 } from "fs";
75331
+ import { existsSync as existsSync26, readdirSync as readdirSync8 } from "fs";
75214
75332
  import { readdir as readdir3 } from "fs/promises";
75215
- import { join as join41 } from "path";
75333
+ import { join as join36 } from "path";
75216
75334
 
75217
75335
  // src/utils/paths.ts
75218
75336
  import { homedir as homedir4 } from "os";
75219
- import { join as join40 } from "path";
75337
+ import { join as join35 } from "path";
75220
75338
  function getRunsDir() {
75221
- return process.env.NAX_RUNS_DIR ?? join40(homedir4(), ".nax", "runs");
75339
+ return process.env.NAX_RUNS_DIR ?? join35(homedir4(), ".nax", "runs");
75222
75340
  }
75223
75341
 
75224
75342
  // src/commands/logs-reader.ts
@@ -75235,7 +75353,7 @@ async function resolveRunFileFromRegistry(runId) {
75235
75353
  }
75236
75354
  let matched = null;
75237
75355
  for (const entry of entries) {
75238
- const metaPath = join41(runsDir, entry, "meta.json");
75356
+ const metaPath = join36(runsDir, entry, "meta.json");
75239
75357
  try {
75240
75358
  const meta3 = await Bun.file(metaPath).json();
75241
75359
  if (meta3.runId === runId || meta3.runId.startsWith(runId)) {
@@ -75247,7 +75365,7 @@ async function resolveRunFileFromRegistry(runId) {
75247
75365
  if (!matched) {
75248
75366
  throw new Error(`Run not found in registry: ${runId}`);
75249
75367
  }
75250
- if (!existsSync27(matched.eventsDir)) {
75368
+ if (!existsSync26(matched.eventsDir)) {
75251
75369
  console.log(`Log directory unavailable for run: ${runId}`);
75252
75370
  return null;
75253
75371
  }
@@ -75257,14 +75375,14 @@ async function resolveRunFileFromRegistry(runId) {
75257
75375
  return null;
75258
75376
  }
75259
75377
  const specificFile = files.find((f) => f === `${matched.runId}.jsonl`);
75260
- return join41(matched.eventsDir, specificFile ?? files[0]);
75378
+ return join36(matched.eventsDir, specificFile ?? files[0]);
75261
75379
  }
75262
75380
  async function selectRunFile(runsDir) {
75263
75381
  const files = readdirSync8(runsDir).filter((f) => f.endsWith(".jsonl") && f !== "latest.jsonl").sort().reverse();
75264
75382
  if (files.length === 0) {
75265
75383
  return null;
75266
75384
  }
75267
- return join41(runsDir, files[0]);
75385
+ return join36(runsDir, files[0]);
75268
75386
  }
75269
75387
  async function extractRunSummary(filePath) {
75270
75388
  const file3 = Bun.file(filePath);
@@ -75349,7 +75467,7 @@ Runs:
75349
75467
  console.log(source_default.gray(" Timestamp Stories Duration Cost Status"));
75350
75468
  console.log(source_default.gray(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500"));
75351
75469
  for (const file3 of files) {
75352
- const filePath = join42(runsDir, file3);
75470
+ const filePath = join37(runsDir, file3);
75353
75471
  const summary = await extractRunSummary(filePath);
75354
75472
  const timestamp = file3.replace(".jsonl", "");
75355
75473
  const stories = summary ? `${summary.passed}/${summary.total}` : "?/?";
@@ -75463,7 +75581,7 @@ async function logsCommand(options) {
75463
75581
  return;
75464
75582
  }
75465
75583
  const resolved = resolveProject({ dir: options.dir });
75466
- const naxDir = join43(resolved.projectDir, ".nax");
75584
+ const naxDir = join38(resolved.projectDir, ".nax");
75467
75585
  const configPath = resolved.configPath;
75468
75586
  const configFile = Bun.file(configPath);
75469
75587
  const config2 = await configFile.json();
@@ -75471,9 +75589,9 @@ async function logsCommand(options) {
75471
75589
  if (!featureName) {
75472
75590
  throw new Error("No feature specified in config.json");
75473
75591
  }
75474
- const featureDir = join43(naxDir, "features", featureName);
75475
- const runsDir = join43(featureDir, "runs");
75476
- if (!existsSync28(runsDir)) {
75592
+ const featureDir = join38(naxDir, "features", featureName);
75593
+ const runsDir = join38(featureDir, "runs");
75594
+ if (!existsSync27(runsDir)) {
75477
75595
  throw new Error(`No runs directory found for feature: ${featureName}`);
75478
75596
  }
75479
75597
  if (options.list) {
@@ -75496,8 +75614,8 @@ init_source();
75496
75614
  init_config();
75497
75615
  init_prd();
75498
75616
  init_precheck();
75499
- import { existsSync as existsSync29 } from "fs";
75500
- import { join as join44 } from "path";
75617
+ import { existsSync as existsSync28 } from "fs";
75618
+ import { join as join39 } from "path";
75501
75619
  async function precheckCommand(options) {
75502
75620
  const resolved = resolveProject({
75503
75621
  dir: options.dir,
@@ -75519,14 +75637,14 @@ async function precheckCommand(options) {
75519
75637
  process.exit(1);
75520
75638
  }
75521
75639
  }
75522
- const naxDir = join44(resolved.projectDir, ".nax");
75523
- const featureDir = join44(naxDir, "features", featureName);
75524
- const prdPath = join44(featureDir, "prd.json");
75525
- if (!existsSync29(featureDir)) {
75640
+ const naxDir = join39(resolved.projectDir, ".nax");
75641
+ const featureDir = join39(naxDir, "features", featureName);
75642
+ const prdPath = join39(featureDir, "prd.json");
75643
+ if (!existsSync28(featureDir)) {
75526
75644
  console.error(source_default.red(`Feature not found: ${featureName}`));
75527
75645
  process.exit(1);
75528
75646
  }
75529
- if (!existsSync29(prdPath)) {
75647
+ if (!existsSync28(prdPath)) {
75530
75648
  console.error(source_default.red(`Missing prd.json for feature: ${featureName}`));
75531
75649
  console.error(source_default.dim(`Run: nax plan -f ${featureName} --from spec.md --auto`));
75532
75650
  process.exit(EXIT_CODES.INVALID_PRD);
@@ -75543,7 +75661,7 @@ async function precheckCommand(options) {
75543
75661
  // src/commands/runs.ts
75544
75662
  init_source();
75545
75663
  import { readdir as readdir4 } from "fs/promises";
75546
- import { join as join45 } from "path";
75664
+ import { join as join40 } from "path";
75547
75665
  var DEFAULT_LIMIT = 20;
75548
75666
  var _runsCmdDeps = {
75549
75667
  getRunsDir
@@ -75598,7 +75716,7 @@ async function runsCommand(options = {}) {
75598
75716
  }
75599
75717
  const rows = [];
75600
75718
  for (const entry of entries) {
75601
- const metaPath = join45(runsDir, entry, "meta.json");
75719
+ const metaPath = join40(runsDir, entry, "meta.json");
75602
75720
  let meta3;
75603
75721
  try {
75604
75722
  meta3 = await Bun.file(metaPath).json();
@@ -75675,7 +75793,7 @@ async function runsCommand(options = {}) {
75675
75793
 
75676
75794
  // src/commands/unlock.ts
75677
75795
  init_source();
75678
- import { join as join46 } from "path";
75796
+ import { join as join41 } from "path";
75679
75797
  function isProcessAlive3(pid) {
75680
75798
  try {
75681
75799
  process.kill(pid, 0);
@@ -75690,7 +75808,7 @@ function formatLockAge(ageMs) {
75690
75808
  }
75691
75809
  async function unlockCommand(options) {
75692
75810
  const workdir = options.dir ?? process.cwd();
75693
- const lockPath = join46(workdir, "nax.lock");
75811
+ const lockPath = join41(workdir, "nax.lock");
75694
75812
  const lockFile = Bun.file(lockPath);
75695
75813
  const exists = await lockFile.exists();
75696
75814
  if (!exists) {
@@ -75766,11 +75884,9 @@ async function runCompletionPhase(options) {
75766
75884
  const regressionAlreadyPassed = postRunStatus?.regression?.status === "passed";
75767
75885
  if (acceptanceAlreadyPassed && regressionAlreadyPassed) {
75768
75886
  logger?.info("execution", "Post-run phases already passed \u2014 skipping acceptance and regression");
75769
- console.info("Post-run phases already passed \u2014 skipping acceptance and regression");
75770
75887
  } else {
75771
75888
  if (acceptanceAlreadyPassed) {
75772
75889
  logger?.info("execution", "Acceptance already passed \u2014 skipping acceptance phase");
75773
- console.info("Acceptance already passed \u2014 skipping acceptance phase");
75774
75890
  } else if (options.config.acceptance.enabled && isComplete(options.prd)) {
75775
75891
  options.statusWriter.setPostRunPhase("acceptance", { status: "running" });
75776
75892
  const acceptanceResult = await _runnerCompletionDeps.runAcceptanceLoop({
@@ -75969,6 +76085,7 @@ async function runExecutionPhase(options, prd, pluginRegistry) {
75969
76085
  startTime: options.startTime,
75970
76086
  parallelCount: options.parallel,
75971
76087
  agentGetFn: options.agentGetFn,
76088
+ onBeforeStory: options.onBeforeStory,
75972
76089
  pidRegistry: options.pidRegistry,
75973
76090
  interactionChain: options.interactionChain,
75974
76091
  batchPlan
@@ -76093,6 +76210,7 @@ async function run(options) {
76093
76210
  headless,
76094
76211
  parallel,
76095
76212
  agentGetFn,
76213
+ onBeforeStory: () => registry2.resetStoryState(),
76096
76214
  pidRegistry,
76097
76215
  interactionChain
76098
76216
  }, prd, pluginRegistry);
@@ -81676,8 +81794,8 @@ class Ink {
81676
81794
  }
81677
81795
  }
81678
81796
  async waitUntilExit() {
81679
- this.exitPromise ||= new Promise((resolve10, reject2) => {
81680
- this.resolveExitPromise = resolve10;
81797
+ this.exitPromise ||= new Promise((resolve11, reject2) => {
81798
+ this.resolveExitPromise = resolve11;
81681
81799
  this.rejectExitPromise = reject2;
81682
81800
  });
81683
81801
  if (!this.beforeExitHandler) {
@@ -83490,7 +83608,7 @@ async function promptForConfirmation(question) {
83490
83608
  if (!process.stdin.isTTY) {
83491
83609
  return true;
83492
83610
  }
83493
- return new Promise((resolve10) => {
83611
+ return new Promise((resolve11) => {
83494
83612
  process.stdout.write(source_default.bold(`${question} [Y/n] `));
83495
83613
  process.stdin.setRawMode(true);
83496
83614
  process.stdin.resume();
@@ -83503,9 +83621,9 @@ async function promptForConfirmation(question) {
83503
83621
  process.stdout.write(`
83504
83622
  `);
83505
83623
  if (answer === "n") {
83506
- resolve10(false);
83624
+ resolve11(false);
83507
83625
  } else {
83508
- resolve10(true);
83626
+ resolve11(true);
83509
83627
  }
83510
83628
  };
83511
83629
  process.stdin.on("data", handler);
@@ -83534,15 +83652,15 @@ Next: nax generate --package ${options.package}`));
83534
83652
  }
83535
83653
  return;
83536
83654
  }
83537
- const naxDir = join57(workdir, ".nax");
83538
- if (existsSync32(naxDir) && !options.force) {
83655
+ const naxDir = join53(workdir, ".nax");
83656
+ if (existsSync31(naxDir) && !options.force) {
83539
83657
  console.log(source_default.yellow("nax already initialized. Use --force to overwrite."));
83540
83658
  return;
83541
83659
  }
83542
- mkdirSync7(join57(naxDir, "features"), { recursive: true });
83543
- mkdirSync7(join57(naxDir, "hooks"), { recursive: true });
83544
- await Bun.write(join57(naxDir, "config.json"), JSON.stringify(DEFAULT_CONFIG, null, 2));
83545
- await Bun.write(join57(naxDir, "hooks.json"), JSON.stringify({
83660
+ mkdirSync7(join53(naxDir, "features"), { recursive: true });
83661
+ mkdirSync7(join53(naxDir, "hooks"), { recursive: true });
83662
+ await Bun.write(join53(naxDir, "config.json"), JSON.stringify(DEFAULT_CONFIG, null, 2));
83663
+ await Bun.write(join53(naxDir, "hooks.json"), JSON.stringify({
83546
83664
  hooks: {
83547
83665
  "on-start": { command: 'echo "nax started: $NAX_FEATURE"', enabled: false },
83548
83666
  "on-complete": { command: 'echo "nax complete: $NAX_FEATURE"', enabled: false },
@@ -83550,12 +83668,12 @@ Next: nax generate --package ${options.package}`));
83550
83668
  "on-error": { command: 'echo "nax error: $NAX_REASON"', enabled: false }
83551
83669
  }
83552
83670
  }, null, 2));
83553
- await Bun.write(join57(naxDir, ".gitignore"), `# nax temp files
83671
+ await Bun.write(join53(naxDir, ".gitignore"), `# nax temp files
83554
83672
  *.tmp
83555
83673
  .paused.json
83556
83674
  .nax-verifier-verdict.json
83557
83675
  `);
83558
- await Bun.write(join57(naxDir, "context.md"), `# Project Context
83676
+ await Bun.write(join53(naxDir, "context.md"), `# Project Context
83559
83677
 
83560
83678
  This document defines coding standards, architectural decisions, and forbidden patterns for this project.
83561
83679
  Run \`nax generate\` to regenerate agent config files (CLAUDE.md, AGENTS.md, .cursorrules, etc.) from this file.
@@ -83652,7 +83770,7 @@ program2.command("run").description("Run the orchestration loop for a feature").
83652
83770
  console.error(source_default.red("Error: --plan requires --from <spec-path>"));
83653
83771
  process.exit(1);
83654
83772
  }
83655
- if (options.from && !existsSync32(options.from)) {
83773
+ if (options.from && !existsSync31(options.from)) {
83656
83774
  console.error(source_default.red(`Error: File not found: ${options.from} (required with --plan)`));
83657
83775
  process.exit(1);
83658
83776
  }
@@ -83685,10 +83803,10 @@ program2.command("run").description("Run the orchestration loop for a feature").
83685
83803
  console.error(source_default.red("nax not initialized. Run: nax init"));
83686
83804
  process.exit(1);
83687
83805
  }
83688
- const featureDir = join57(naxDir, "features", options.feature);
83689
- const prdPath = join57(featureDir, "prd.json");
83806
+ const featureDir = join53(naxDir, "features", options.feature);
83807
+ const prdPath = join53(featureDir, "prd.json");
83690
83808
  if (options.plan && options.from) {
83691
- if (existsSync32(prdPath) && !options.force) {
83809
+ if (existsSync31(prdPath) && !options.force) {
83692
83810
  console.error(source_default.red(`Error: prd.json already exists for feature "${options.feature}".`));
83693
83811
  console.error(source_default.dim(" Use --force to overwrite, or run without --plan to use the existing PRD."));
83694
83812
  process.exit(1);
@@ -83708,10 +83826,10 @@ program2.command("run").description("Run the orchestration loop for a feature").
83708
83826
  }
83709
83827
  }
83710
83828
  try {
83711
- const planLogDir = join57(featureDir, "plan");
83829
+ const planLogDir = join53(featureDir, "plan");
83712
83830
  mkdirSync7(planLogDir, { recursive: true });
83713
83831
  const planLogId = new Date().toISOString().replace(/:/g, "-").replace(/\..+/, "");
83714
- const planLogPath = join57(planLogDir, `${planLogId}.jsonl`);
83832
+ const planLogPath = join53(planLogDir, `${planLogId}.jsonl`);
83715
83833
  initLogger({ level: "info", filePath: planLogPath, useChalk: false, headless: true });
83716
83834
  console.log(source_default.dim(` [Plan log: ${planLogPath}]`));
83717
83835
  console.log(source_default.dim(" [Planning phase: generating PRD from spec]"));
@@ -83750,15 +83868,15 @@ program2.command("run").description("Run the orchestration loop for a feature").
83750
83868
  process.exit(1);
83751
83869
  }
83752
83870
  }
83753
- if (!existsSync32(prdPath)) {
83871
+ if (!existsSync31(prdPath)) {
83754
83872
  console.error(source_default.red(`Feature "${options.feature}" not found or missing prd.json`));
83755
83873
  process.exit(1);
83756
83874
  }
83757
83875
  resetLogger();
83758
- const runsDir = join57(featureDir, "runs");
83876
+ const runsDir = join53(featureDir, "runs");
83759
83877
  mkdirSync7(runsDir, { recursive: true });
83760
83878
  const runId = new Date().toISOString().replace(/:/g, "-").replace(/\..+/, "");
83761
- const logFilePath = join57(runsDir, `${runId}.jsonl`);
83879
+ const logFilePath = join53(runsDir, `${runId}.jsonl`);
83762
83880
  const isTTY = process.stdout.isTTY ?? false;
83763
83881
  const headlessFlag = options.headless ?? false;
83764
83882
  const headlessEnv = process.env.NAX_HEADLESS === "1";
@@ -83774,7 +83892,7 @@ program2.command("run").description("Run the orchestration loop for a feature").
83774
83892
  config2.autoMode.defaultAgent = options.agent;
83775
83893
  }
83776
83894
  config2.execution.maxIterations = Number.parseInt(options.maxIterations, 10);
83777
- const globalNaxDir = join57(homedir8(), ".nax");
83895
+ const globalNaxDir = join53(homedir8(), ".nax");
83778
83896
  const hooks = await loadHooksConfig(naxDir, globalNaxDir);
83779
83897
  const eventEmitter = new PipelineEventEmitter;
83780
83898
  let tuiInstance;
@@ -83797,7 +83915,7 @@ program2.command("run").description("Run the orchestration loop for a feature").
83797
83915
  } else {
83798
83916
  console.log(source_default.dim(" [Headless mode \u2014 pipe output]"));
83799
83917
  }
83800
- const statusFilePath = join57(workdir, ".nax", "status.json");
83918
+ const statusFilePath = join53(workdir, ".nax", "status.json");
83801
83919
  let parallel;
83802
83920
  if (options.parallel !== undefined) {
83803
83921
  parallel = Number.parseInt(options.parallel, 10);
@@ -83823,9 +83941,9 @@ program2.command("run").description("Run the orchestration loop for a feature").
83823
83941
  headless: useHeadless,
83824
83942
  skipPrecheck: options.skipPrecheck ?? false
83825
83943
  });
83826
- const latestSymlink = join57(runsDir, "latest.jsonl");
83944
+ const latestSymlink = join53(runsDir, "latest.jsonl");
83827
83945
  try {
83828
- if (existsSync32(latestSymlink)) {
83946
+ if (existsSync31(latestSymlink)) {
83829
83947
  Bun.spawnSync(["rm", latestSymlink]);
83830
83948
  }
83831
83949
  Bun.spawnSync(["ln", "-s", `${runId}.jsonl`, latestSymlink], {
@@ -83861,9 +83979,9 @@ features.command("create <name>").description("Create a new feature").option("-d
83861
83979
  console.error(source_default.red("nax not initialized. Run: nax init"));
83862
83980
  process.exit(1);
83863
83981
  }
83864
- const featureDir = join57(naxDir, "features", name);
83982
+ const featureDir = join53(naxDir, "features", name);
83865
83983
  mkdirSync7(featureDir, { recursive: true });
83866
- await Bun.write(join57(featureDir, "spec.md"), `# Feature: ${name}
83984
+ await Bun.write(join53(featureDir, "spec.md"), `# Feature: ${name}
83867
83985
 
83868
83986
  ## Overview
83869
83987
 
@@ -83896,7 +84014,7 @@ features.command("create <name>").description("Create a new feature").option("-d
83896
84014
 
83897
84015
  <!-- What this feature explicitly does NOT cover. -->
83898
84016
  `);
83899
- await Bun.write(join57(featureDir, "progress.txt"), `# Progress: ${name}
84017
+ await Bun.write(join53(featureDir, "progress.txt"), `# Progress: ${name}
83900
84018
 
83901
84019
  Created: ${new Date().toISOString()}
83902
84020
 
@@ -83922,8 +84040,8 @@ features.command("list").description("List all features").option("-d, --dir <pat
83922
84040
  console.error(source_default.red("nax not initialized."));
83923
84041
  process.exit(1);
83924
84042
  }
83925
- const featuresDir = join57(naxDir, "features");
83926
- if (!existsSync32(featuresDir)) {
84043
+ const featuresDir = join53(naxDir, "features");
84044
+ if (!existsSync31(featuresDir)) {
83927
84045
  console.log(source_default.dim("No features yet."));
83928
84046
  return;
83929
84047
  }
@@ -83937,8 +84055,8 @@ features.command("list").description("List all features").option("-d, --dir <pat
83937
84055
  Features:
83938
84056
  `));
83939
84057
  for (const name of entries) {
83940
- const prdPath = join57(featuresDir, name, "prd.json");
83941
- if (existsSync32(prdPath)) {
84058
+ const prdPath = join53(featuresDir, name, "prd.json");
84059
+ if (existsSync31(prdPath)) {
83942
84060
  const prd = await loadPRD(prdPath);
83943
84061
  const c = countStories(prd);
83944
84062
  console.log(` ${name} \u2014 ${c.passed}/${c.total} stories done`);
@@ -83972,10 +84090,10 @@ Use: nax plan -f <feature> --from <spec>`));
83972
84090
  cliOverrides.profile = options.profile;
83973
84091
  }
83974
84092
  const config2 = await loadConfig(workdir, cliOverrides);
83975
- const featureLogDir = join57(naxDir, "features", options.feature, "plan");
84093
+ const featureLogDir = join53(naxDir, "features", options.feature, "plan");
83976
84094
  mkdirSync7(featureLogDir, { recursive: true });
83977
84095
  const planLogId = new Date().toISOString().replace(/:/g, "-").replace(/\..+/, "");
83978
- const planLogPath = join57(featureLogDir, `${planLogId}.jsonl`);
84096
+ const planLogPath = join53(featureLogDir, `${planLogId}.jsonl`);
83979
84097
  initLogger({ level: "info", filePath: planLogPath, useChalk: false, headless: true });
83980
84098
  console.log(source_default.dim(` [Plan log: ${planLogPath}]`));
83981
84099
  try {