@gluecharm-lab/easyspecs-cli 0.3.3 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/commands.md CHANGED
@@ -178,7 +178,7 @@ Each row lists **command-specific CLI tokens**, then **what configuration applie
178
178
  | `auth status` | — | Reads session file. |
179
179
  | `run synthesis` | — | Single SRS-9 context pass (same as extension **Run Analysis**). `**requireOpenCode`** → executable + credentials unless skip flag. Uses `**merged.pipelineOpenCode**` from `**config.json**`. `**--promote` / `--no-promote**` controls promotion after success. |
180
180
  | `run synthesis resume-missing`, `run synthesis resume-synthesis` | Optional `**--worktree <path>**` | Same implementation: resolves checkout via `--worktree` or stored snapshot (`[resolveAdHocCheckoutRoot](../../src/cli/main.ts)`). OpenCode + `**merged.pipelineOpenCode**`. |
181
- | `analysis` | Optional tail: `**--force-new-context-analysis**`, `**--no-worktree**` / `**--noworktree**` (**SRS-71**: use **`<repoRoot>`** as the analysis checkout; no detached temp worktree). **`--upload`** → usage exit. **`--synthesis-only`**: accepted as a **hidden** Commander option (legacy scripts); **ignored** with one stderr **`[deprecated]`** line (**SRS-60**). | **SRS-32** full **Factory** loop: synthesis convergence (until no missing artefacts), coverage, zero-ref, report, **link mapping**, index assembly; **Phase 7** backend sync is skipped in CLI (**`skipBackendSync`**). **On success:** always promotes **`.gluecharm/context`** from the analysis worktree into **`<repoRoot>`** unless **`--no-promote`** or the checkout is already the repo root (stderr **`[pipeline:analysis]`**); **`easyspecs.analysis.promoteContextToWorkspace`** does **not** apply to this step. Push context with **`upload context`** / **`upload republish`** after analysis if you need cloud sync. Factory timing from `**merged.factory`** + `**--ci**` defaults. Debug: `**easyspecs.factory.debug**`. OpenCode options from `**merged.pipelineOpenCode**`. **SRS-46:** if `**easyspecs.factory.cloudContextAnalyzed`** is `**true**` and `**--force-new-context-analysis**` is absent, exits **0** without running the Factory (`**analysisSkipped**`, …). **SRS-59:** stale worktree registration recovery — see **§ Analysis git checkout** below. |
181
+ | `analysis` | Optional tail: `**--force-new-context-analysis**`, `**--no-worktree**` / `**--noworktree**` (**SRS-71**: use **`<repoRoot>`** as the analysis checkout; no detached temp worktree). **`--zero-reference-convergence=strict|best_effort`** (**SRS-73** — overrides `**easyspecs.factory.zeroReferenceRemediation.convergence**` for this run). **`--upload`** → usage exit. **`--synthesis-only`**: accepted as a **hidden** Commander option (legacy scripts); **ignored** with one stderr **`[deprecated]`** line (**SRS-60**). | **SRS-32** full **Factory** loop: synthesis convergence (until no missing artefacts), coverage, zero-ref, report, **link mapping**, index assembly; **Phase 7** backend sync is skipped in CLI (**`skipBackendSync`**). **On success:** always promotes **`.gluecharm/context`** from the analysis worktree into **`<repoRoot>`** unless **`--no-promote`** or the checkout is already the repo root (stderr **`[pipeline:analysis]`**); **`easyspecs.analysis.promoteContextToWorkspace`** does **not** apply to this step. Push context with **`upload context`** / **`upload republish`** after analysis if you need cloud sync. Factory timing from `**merged.factory`** + `**--ci**` defaults. Debug: `**easyspecs.factory.debug**`. OpenCode options from `**merged.pipelineOpenCode**`. **SRS-46:** if `**easyspecs.factory.cloudContextAnalyzed`** is `**true**` and `**--force-new-context-analysis**` is absent, exits **0** without running the Factory (`**analysisSkipped**`, …). **SRS-59:** stale worktree registration recovery — see **§ Analysis git checkout** below. |
182
182
  | `update context` | Optional `**--no-worktree**` / `**--noworktree**` (**SRS-71**). **SRS-60** removed `**--upload**`. | **SRS-55:** incremental context refresh — baseline → git delta → analysis checkout → `**changes-since-date.md`** → optional remediation → **`--promote` / `--no-promote**` (global) → persist `**lastRunAt**`. With **`--no-worktree`**, the checkout is **`<repoRoot>`** (no temp worktree; seed/promote steps that would copy repo → same repo are skipped). Run **`upload context`** separately if you need cloud sync. |
183
183
  | `diagnose reference-coverage` | `**--root workspace**` | `**--root worktree**`, optional `**--worktree <path>**` | Gate when `**easyspecs.diagnose.zeroReference.maxPercentNonReferenced**` is a finite number (not `**null**`): failure when metric exceeds it. |
184
184
  | `diagnose coordination-duplicates` | Same `**--root**` / `**--worktree**` | `**easyspecs.diagnose.coordinationDuplicates.strict**`: when `**true**` (default), duplicate/orphan issues fail the exit code. |
package/dist/main.cjs CHANGED
@@ -11226,11 +11226,13 @@ function mergeEasyspecsFactoryBlock(base, over) {
11226
11226
  const b = base ?? {};
11227
11227
  const o = over ?? {};
11228
11228
  const updateContext = b.updateContext || o.updateContext ? { ...b.updateContext ?? {}, ...o.updateContext ?? {} } : void 0;
11229
+ const zeroReferenceRemediation = b.zeroReferenceRemediation || o.zeroReferenceRemediation ? { ...b.zeroReferenceRemediation ?? {}, ...o.zeroReferenceRemediation ?? {} } : void 0;
11229
11230
  const out = {
11230
11231
  ...b,
11231
11232
  ...o,
11232
11233
  backoff: { ...b.backoff ?? {}, ...o.backoff ?? {} },
11233
- ...updateContext ? { updateContext } : {}
11234
+ ...updateContext ? { updateContext } : {},
11235
+ ...zeroReferenceRemediation ? { zeroReferenceRemediation } : {}
11234
11236
  };
11235
11237
  const backoffKeys = Object.keys(out.backoff ?? {});
11236
11238
  const topKeys = Object.keys(out).filter((k) => k !== "backoff");
@@ -11652,6 +11654,8 @@ function getEasyspecsMergedConfigValue(es, fullKey) {
11652
11654
  return es.factory?.cloudContextAnalyzedAt;
11653
11655
  case "easyspecs.analysis.cloudContextAnalyzedAt":
11654
11656
  return es.analysis?.cloudContextAnalyzedAt;
11657
+ case "easyspecs.factory.zeroReferenceRemediation.convergence":
11658
+ return es.factory?.zeroReferenceRemediation?.convergence;
11655
11659
  default:
11656
11660
  return void 0;
11657
11661
  }
@@ -11706,6 +11710,8 @@ function mergeEasyspecsCliSettings(cfg, overrides = {}) {
11706
11710
  if (ci && maxOuter === 0) {
11707
11711
  maxOuter = CI_DEFAULT_MAX_OUTER;
11708
11712
  }
11713
+ const zrCfg = readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.zeroReferenceRemediation.convergence") === "best_effort" ? "best_effort" : "strict";
11714
+ const zeroReferenceRemediationConvergence = overrides.zeroReferenceRemediationConvergence ?? zrCfg;
11709
11715
  const factory = {
11710
11716
  initialDelayMs: readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.backoff.initialDelayMs") ?? 3e4,
11711
11717
  backoffMultiplier: readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.backoff.multiplier") ?? 2,
@@ -11713,7 +11719,8 @@ function mergeEasyspecsCliSettings(cfg, overrides = {}) {
11713
11719
  maxOuterIterationsPerPhase: maxOuter,
11714
11720
  maxMacroPingPongCycles: readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.maxPingPongCycles") ?? 5,
11715
11721
  synthesisRemediationShareBackoff: readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.synthesisRemediationShareBackoff") !== false,
11716
- debugPhases: readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.debug") === true
11722
+ debugPhases: readEasyspecsMergedSetting(cfg.easyspecs, "easyspecs.factory.debug") === true,
11723
+ zeroReferenceRemediationConvergence
11717
11724
  };
11718
11725
  const openCodeChildEnv = buildOpenCodeProviderEnvFromConfig(cfg, overrides.repoRoot);
11719
11726
  const projectConfigOverlay = cfg.easyspecs?.openCodeRuntime?.projectConfigOverlay;
@@ -14602,7 +14609,8 @@ function runOpenCodeAgent(cwd, args, options) {
14602
14609
  const child = (0, import_child_process2.spawn)(cmd, args, {
14603
14610
  cwd,
14604
14611
  shell: USE_SHELL,
14605
- env: spawnEnv
14612
+ env: spawnEnv,
14613
+ stdio: ["ignore", "pipe", "pipe"]
14606
14614
  });
14607
14615
  let childResourceLogged = false;
14608
14616
  let cpuBaseline;
@@ -22445,6 +22453,25 @@ async function runGenerateContextFactory(deps) {
22445
22453
  contextDir: ""
22446
22454
  };
22447
22455
  let pingPong = 0;
22456
+ let zrDegradedSummary;
22457
+ function syncRemediationAuditDegraded(paths, outerAttemptsUsed) {
22458
+ zrDegradedSummary = { converged: false, remainingPaths: paths, outerAttemptsUsed };
22459
+ const a = deps.remediationConvergenceAudit;
22460
+ if (a) {
22461
+ a.converged = false;
22462
+ a.remainingPaths = paths;
22463
+ a.outerAttemptsUsed = outerAttemptsUsed;
22464
+ }
22465
+ }
22466
+ function clearRemediationAuditConverged() {
22467
+ zrDegradedSummary = void 0;
22468
+ const a = deps.remediationConvergenceAudit;
22469
+ if (a) {
22470
+ a.converged = true;
22471
+ delete a.remainingPaths;
22472
+ delete a.outerAttemptsUsed;
22473
+ }
22474
+ }
22448
22475
  try {
22449
22476
  if (!deps.readiness) {
22450
22477
  setRowTimed("diagnose_readiness", "succeeded", "Readiness context omitted (narrow unit test).");
@@ -22500,6 +22527,15 @@ async function runGenerateContextFactory(deps) {
22500
22527
  await post();
22501
22528
  }
22502
22529
  outer: while (true) {
22530
+ if (pingPong > 0) {
22531
+ zrDegradedSummary = void 0;
22532
+ }
22533
+ if (deps.remediationConvergenceAudit) {
22534
+ const a = deps.remediationConvergenceAudit;
22535
+ delete a.converged;
22536
+ delete a.remainingPaths;
22537
+ delete a.outerAttemptsUsed;
22538
+ }
22503
22539
  if (isAborted(signal)) {
22504
22540
  setRowTimed("synthesis_convergence", "cancelled", "Stopped.");
22505
22541
  await post();
@@ -22682,15 +22718,27 @@ async function runGenerateContextFactory(deps) {
22682
22718
  }
22683
22719
  const remFailures = typeof rem.signals?.failures === "number" && Number.isFinite(rem.signals.failures) ? rem.signals.failures : 0;
22684
22720
  if (remFailures === 0) {
22721
+ clearRemediationAuditConverged();
22685
22722
  break;
22686
22723
  }
22687
22724
  remIter += 1;
22688
22725
  if (config.maxOuterIterationsPerPhase > 0 && remIter >= config.maxOuterIterationsPerPhase) {
22689
- setRowTimed(
22690
- "zero_reference_remediation_convergence",
22691
- "failed",
22692
- `${String(remFailures)} path(s) still failing after ${String(config.maxOuterIterationsPerPhase)} attempt(s).`
22693
- );
22726
+ const capDetail = `${String(remFailures)} path(s) still failing after ${String(config.maxOuterIterationsPerPhase)} attempt(s).`;
22727
+ const zrMode = config.zeroReferenceRemediationConvergence ?? "strict";
22728
+ if (zrMode === "best_effort") {
22729
+ const sig = rem.signals;
22730
+ const fpRaw = sig?.failedPaths;
22731
+ const failedPathsRem = Array.isArray(fpRaw) && fpRaw.every((p) => typeof p === "string") ? fpRaw : [];
22732
+ setRowTimed(
22733
+ "zero_reference_remediation_convergence",
22734
+ "succeeded",
22735
+ `Degraded (best-effort): ${capDetail}`
22736
+ );
22737
+ syncRemediationAuditDegraded(failedPathsRem, config.maxOuterIterationsPerPhase);
22738
+ await post();
22739
+ break;
22740
+ }
22741
+ setRowTimed("zero_reference_remediation_convergence", "failed", capDetail);
22694
22742
  await post();
22695
22743
  return fail("Zero-reference remediation did not converge.");
22696
22744
  }
@@ -22711,8 +22759,10 @@ async function runGenerateContextFactory(deps) {
22711
22759
  }
22712
22760
  remDelay = computeNextBackoffDelay(remDelay, config.backoffMultiplier, config.maxDelayMs);
22713
22761
  }
22714
- setRowTimed("zero_reference_remediation_convergence", "succeeded");
22715
- await post();
22762
+ if (row(phases, "zero_reference_remediation_convergence")?.status !== "succeeded") {
22763
+ setRowTimed("zero_reference_remediation_convergence", "succeeded");
22764
+ await post();
22765
+ }
22716
22766
  if (isAborted(signal)) {
22717
22767
  setRowTimed("link_mapping_pipeline", "cancelled");
22718
22768
  await post();
@@ -22837,7 +22887,13 @@ async function runGenerateContextFactory(deps) {
22837
22887
  }
22838
22888
  setRowTimed("backend_context_sync", "succeeded", up.message);
22839
22889
  await post();
22840
- return fin({ ok: true, message: "Analysis complete.", totalElapsedMs: macroEnd() });
22890
+ const zrPayload = zrDegradedSummary;
22891
+ return fin({
22892
+ ok: true,
22893
+ message: "Analysis complete.",
22894
+ totalElapsedMs: macroEnd(),
22895
+ ...zrPayload ? { zeroReferenceRemediation: zrPayload } : {}
22896
+ });
22841
22897
  } catch (e) {
22842
22898
  const msg = e instanceof Error ? e.message : String(e);
22843
22899
  return fail(msg);
@@ -25275,6 +25331,7 @@ async function runRemediationPipelineZeroRefPass(p) {
25275
25331
  let cancelled = false;
25276
25332
  let completed = 0;
25277
25333
  let failures = 0;
25334
+ const failedPathSet = /* @__PURE__ */ new Set();
25278
25335
  const limit = Math.max(1, Math.min(64, p.maxConcurrent));
25279
25336
  let nextIndex = 0;
25280
25337
  async function worker() {
@@ -25307,10 +25364,12 @@ async function runRemediationPipelineZeroRefPass(p) {
25307
25364
  completed += 1;
25308
25365
  } else if (!r.cancelled) {
25309
25366
  failures += 1;
25367
+ failedPathSet.add(fp);
25310
25368
  }
25311
25369
  await p.onFileDone?.(fp, r);
25312
25370
  } catch (e) {
25313
25371
  failures += 1;
25372
+ failedPathSet.add(fp);
25314
25373
  const msg = e instanceof Error ? e.message : String(e);
25315
25374
  await p.onFileDone?.(fp, { ok: false, message: msg });
25316
25375
  }
@@ -25321,7 +25380,8 @@ async function runRemediationPipelineZeroRefPass(p) {
25321
25380
  if (p.abortSignal?.aborted) {
25322
25381
  cancelled = true;
25323
25382
  }
25324
- return { cancelled, completed, failures };
25383
+ const failedPaths = [...failedPathSet].sort((a, b) => a.localeCompare(b));
25384
+ return { cancelled, completed, failures, failedPaths };
25325
25385
  }
25326
25386
 
25327
25387
  // src/pipelines/coverage/coverageExecutionReport.ts
@@ -25338,7 +25398,7 @@ function formatMetric(value) {
25338
25398
  }
25339
25399
  return "\u2014";
25340
25400
  }
25341
- function formatReferenceCoverageExecutionReportMarkdown(coverageData, noActions) {
25401
+ function formatReferenceCoverageExecutionReportMarkdown(coverageData, noActions, remainingRemediationFailures) {
25342
25402
  const o = coverageData;
25343
25403
  const m = o.metrics ?? {};
25344
25404
  const generatedAt = typeof o.generatedAt === "string" ? o.generatedAt : formatMetric(o.generatedAt);
@@ -25375,6 +25435,18 @@ function formatReferenceCoverageExecutionReportMarkdown(coverageData, noActions)
25375
25435
  }
25376
25436
  lines.push("");
25377
25437
  }
25438
+ const backlog = remainingRemediationFailures && remainingRemediationFailures.length > 0 ? [...remainingRemediationFailures].sort((a, b) => a.localeCompare(b)) : [];
25439
+ if (backlog.length > 0) {
25440
+ lines.push("## Remediation backlog (non-converged)", "");
25441
+ lines.push(
25442
+ "Factory **best-effort** remediation ended with these paths still failing the zero-reference pool:",
25443
+ ""
25444
+ );
25445
+ for (const fp of backlog) {
25446
+ lines.push(`- ${fp}`);
25447
+ }
25448
+ lines.push("");
25449
+ }
25378
25450
  const iso = (/* @__PURE__ */ new Date()).toISOString();
25379
25451
  lines.push(
25380
25452
  "## Revision",
@@ -25384,7 +25456,7 @@ function formatReferenceCoverageExecutionReportMarkdown(coverageData, noActions)
25384
25456
  );
25385
25457
  return lines.join("\n");
25386
25458
  }
25387
- function validateReferenceCoverageExecutionReportMarkdown(body) {
25459
+ function validateReferenceCoverageExecutionReportMarkdown(body, options) {
25388
25460
  const errors = [];
25389
25461
  const checks = [
25390
25462
  { re: /^#\s+Reference coverage execution report\s*$/m, label: "`# Reference coverage execution report`" },
@@ -25396,6 +25468,11 @@ function validateReferenceCoverageExecutionReportMarkdown(body) {
25396
25468
  errors.push(`Missing required heading ${c.label}`);
25397
25469
  }
25398
25470
  }
25471
+ if (options?.requireRemediationBacklogSection === true) {
25472
+ if (!/^##\s+Remediation backlog \(non-converged\)\s*$/m.test(body)) {
25473
+ errors.push("Missing required heading `## Remediation backlog (non-converged)`");
25474
+ }
25475
+ }
25399
25476
  return errors.length === 0 ? { ok: true } : { ok: false, errors };
25400
25477
  }
25401
25478
  function collectNoActionRowsFromRoutingDoc(doc) {
@@ -25463,8 +25540,11 @@ async function runCoverageExecutionReport(p) {
25463
25540
  return { ok: false, error: routingRead.error };
25464
25541
  }
25465
25542
  const noActions = collectNoActionRowsFromRoutingDoc(routingRead.doc);
25466
- const md = formatReferenceCoverageExecutionReportMarkdown(cov.data, noActions);
25467
- const val = validateReferenceCoverageExecutionReportMarkdown(md);
25543
+ const backlog = p.remainingRemediationFailures?.length ? p.remainingRemediationFailures : void 0;
25544
+ const md = formatReferenceCoverageExecutionReportMarkdown(cov.data, noActions, backlog);
25545
+ const val = validateReferenceCoverageExecutionReportMarkdown(md, {
25546
+ requireRemediationBacklogSection: backlog !== void 0 && backlog.length > 0
25547
+ });
25468
25548
  if (!val.ok) {
25469
25549
  return { ok: false, error: `Report markdown invalid: ${val.errors.join("; ")}` };
25470
25550
  }
@@ -25568,13 +25648,15 @@ function buildFactoryPipelineRegistry(cb) {
25568
25648
  exitCondition: FACTORY_PIPELINE_EXIT_CONDITIONS.zero_reference_remediation_convergence ?? "",
25569
25649
  async run(_input, _ctx) {
25570
25650
  const rem = await cb.runRemediationPass();
25651
+ const failedPaths = Array.isArray(rem.failedPaths) ? rem.failedPaths : [];
25652
+ const signals = { failures: rem.failures, failedPaths };
25571
25653
  if (rem.cancelled) {
25572
- return { ok: false, cancelled: true, message: rem.message ?? "Remediation cancelled.", signals: { failures: rem.failures } };
25654
+ return { ok: false, cancelled: true, message: rem.message ?? "Remediation cancelled.", signals };
25573
25655
  }
25574
25656
  if (!rem.ok) {
25575
- return { ok: false, message: rem.message ?? "Remediation failed.", signals: { failures: rem.failures } };
25657
+ return { ok: false, message: rem.message ?? "Remediation failed.", signals };
25576
25658
  }
25577
- return { ok: true, signals: { failures: rem.failures }, message: rem.message };
25659
+ return { ok: true, signals, message: rem.message };
25578
25660
  }
25579
25661
  };
25580
25662
  const linkMapping = {
@@ -25623,6 +25705,7 @@ function buildFactoryPipelineRegistry(cb) {
25623
25705
  function buildFactoryDepsHeadless(input) {
25624
25706
  const { storageContext, repoRoot, agentsDirFs, buildOpenCodeOptions, log, signal, macroConfig } = input;
25625
25707
  const inPlace = input.inPlace === true;
25708
+ const remediationConvergenceAudit = {};
25626
25709
  let adHocWorktree;
25627
25710
  let macroFinalize;
25628
25711
  let macroSourceBranch;
@@ -25789,9 +25872,9 @@ function buildFactoryDepsHeadless(input) {
25789
25872
  onAgentLaunched: () => noteOpenCodeAgentLaunched()
25790
25873
  });
25791
25874
  if (poolRes.cancelled) {
25792
- return { ok: false, cancelled: true, failures: poolRes.failures, message: "Remediation stopped." };
25875
+ return { ok: false, cancelled: true, failures: poolRes.failures, failedPaths: poolRes.failedPaths, message: "Remediation stopped." };
25793
25876
  }
25794
- return { ok: true, failures: poolRes.failures };
25877
+ return { ok: true, failures: poolRes.failures, failedPaths: poolRes.failedPaths };
25795
25878
  };
25796
25879
  const runExecutionReport = async () => {
25797
25880
  const ar = analysisRoot();
@@ -25803,10 +25886,13 @@ function buildFactoryDepsHeadless(input) {
25803
25886
  return { ok: false, message: execLayout.error };
25804
25887
  }
25805
25888
  log(`[factory] reference coverage execution report \u2014 ${(/* @__PURE__ */ new Date()).toISOString()}`);
25889
+ const audit = remediationConvergenceAudit;
25890
+ const remaining = audit.converged === false && Array.isArray(audit.remainingPaths) && audit.remainingPaths.length > 0 ? audit.remainingPaths : void 0;
25806
25891
  const res = await runCoverageExecutionReport({
25807
25892
  repositoryRootAbs: ar,
25808
25893
  abortSignal: signal,
25809
- diagnosticLog: log
25894
+ diagnosticLog: log,
25895
+ ...remaining ? { remainingRemediationFailures: remaining } : {}
25810
25896
  });
25811
25897
  if (res.ok) {
25812
25898
  return { ok: true, message: `Report: ${path45.basename(res.outputAbsolutePath)}` };
@@ -25887,6 +25973,7 @@ function buildFactoryDepsHeadless(input) {
25887
25973
  },
25888
25974
  ...inPlace ? { analysisInPlace: true } : {},
25889
25975
  readiness: input.readiness,
25976
+ remediationConvergenceAudit,
25890
25977
  config: { ...macroConfig, ...input.synthesisOnly ? { synthesisOnly: true } : {} },
25891
25978
  sleep: (ms) => sleepUntilAborted(ms, signal),
25892
25979
  post: (payload) => {
@@ -29084,7 +29171,27 @@ function formatCliStderrLine(line, useAnsi) {
29084
29171
  }
29085
29172
 
29086
29173
  // src/cli/main.ts
29087
- var PKG_VERSION = "0.3.2";
29174
+ function resolveCliPackageVersion() {
29175
+ if (true) {
29176
+ return "0.3.5";
29177
+ }
29178
+ const candidates = [
29179
+ path61.join(__dirname, "..", "package.json"),
29180
+ path61.join(__dirname, "..", "..", "packages", "cli", "package.json")
29181
+ ];
29182
+ for (const pkgPath of candidates) {
29183
+ try {
29184
+ const raw = fs63.readFileSync(pkgPath, "utf8");
29185
+ const j = JSON.parse(raw);
29186
+ if (typeof j.version === "string") {
29187
+ return j.version;
29188
+ }
29189
+ } catch {
29190
+ }
29191
+ }
29192
+ return "0.0.0-dev";
29193
+ }
29194
+ var PKG_VERSION = resolveCliPackageVersion();
29088
29195
  function isNonEmptyFactoryFailureArray(x) {
29089
29196
  if (!Array.isArray(x) || x.length === 0) {
29090
29197
  return false;
@@ -30040,7 +30147,19 @@ async function main() {
30040
30147
  finish(ExitCode.internal, { ok: false, error: "Unexpected context drift result shape." });
30041
30148
  }
30042
30149
  if (pos[0] === "analysis") {
30150
+ let zrConvCli;
30043
30151
  for (const a of pos.slice(1)) {
30152
+ if (a.startsWith("--zero-reference-convergence=")) {
30153
+ const v = a.slice("--zero-reference-convergence=".length).trim();
30154
+ if (v !== "strict" && v !== "best_effort") {
30155
+ finish(ExitCode.usage, {
30156
+ ok: false,
30157
+ error: `invalid --zero-reference-convergence (use strict or best_effort): ${v}`
30158
+ });
30159
+ }
30160
+ zrConvCli = v;
30161
+ continue;
30162
+ }
30044
30163
  if (a === "--force-new-context-analysis") {
30045
30164
  continue;
30046
30165
  }
@@ -30061,9 +30180,16 @@ async function main() {
30061
30180
  }
30062
30181
  finish(ExitCode.usage, {
30063
30182
  ok: false,
30064
- error: `unknown analysis flag: ${a} (allowed: --force-new-context-analysis, --in-place, --no-worktree, --noworktree)`
30183
+ error: `unknown analysis flag: ${a} (allowed: --force-new-context-analysis, --in-place, --no-worktree, --noworktree, --zero-reference-convergence=strict|best_effort)`
30065
30184
  });
30066
30185
  }
30186
+ const mergedForAnalysis = mergeEasyspecsCliSettings(repoConfig, {
30187
+ ci: flags.ci,
30188
+ apiBaseUrl: flags.apiBaseUrl,
30189
+ promote: flags.promote,
30190
+ repoRoot,
30191
+ ...zrConvCli ? { zeroReferenceRemediationConvergence: zrConvCli } : {}
30192
+ });
30067
30193
  const forceNewContextAnalysis = positionals.includes("--force-new-context-analysis");
30068
30194
  const analysisInPlace = positionals.includes("--no-worktree") || positionals.includes("--noworktree") || positionals.includes("--in-place");
30069
30195
  const cloudCached = readEasyspecsMergedSetting(
@@ -30095,19 +30221,19 @@ async function main() {
30095
30221
  cliVersion: PKG_VERSION,
30096
30222
  repoRootAbs: repoRoot,
30097
30223
  analysisRootAbs: repoRoot,
30098
- apiBaseUrl: merged.apiBaseUrl,
30099
- openCodeExecutable: merged.openCodeExecutable,
30100
- openCodeSkipCredentialsCheck: merged.openCodeSkipCredentialsCheck,
30101
- providerEnvFromConfig: merged.openCodeChildEnv,
30224
+ apiBaseUrl: mergedForAnalysis.apiBaseUrl,
30225
+ openCodeExecutable: mergedForAnalysis.openCodeExecutable,
30226
+ openCodeSkipCredentialsCheck: mergedForAnalysis.openCodeSkipCredentialsCheck,
30227
+ providerEnvFromConfig: mergedForAnalysis.openCodeChildEnv,
30102
30228
  repoConfig
30103
30229
  };
30104
30230
  const deps = buildFactoryDepsHeadless({
30105
30231
  storageContext: storage,
30106
30232
  repoRoot,
30107
30233
  agentsDirFs: agentsDir,
30108
- macroConfig: merged.factory,
30234
+ macroConfig: mergedForAnalysis.factory,
30109
30235
  buildOpenCodeOptions: (checkout) => ({
30110
- ...merged.pipelineOpenCode
30236
+ ...mergedForAnalysis.pipelineOpenCode
30111
30237
  }),
30112
30238
  log: (line) => logErr(flags, line),
30113
30239
  signal: ctrl.signal,
@@ -30147,7 +30273,8 @@ async function main() {
30147
30273
  cancelled: res.cancelled,
30148
30274
  totalElapsedMs: res.totalElapsedMs,
30149
30275
  error: res.message,
30150
- ...analysisInPlace ? { analysisInPlace: true } : {}
30276
+ ...analysisInPlace ? { analysisInPlace: true } : {},
30277
+ ...res.zeroReferenceRemediation ? { zeroReferenceRemediation: res.zeroReferenceRemediation } : {}
30151
30278
  };
30152
30279
  if (!res.ok && !res.cancelled && res.factoryFailures && res.factoryFailures.length > 0) {
30153
30280
  analysisEnvelope.factoryFailures = res.factoryFailures;