@harness-engineering/cli 1.12.0 → 1.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +57 -9
  2. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +1 -1
  3. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +19 -2
  4. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +39 -12
  5. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +28 -11
  6. package/dist/agents/skills/claude-code/harness-roadmap/SKILL.md +34 -0
  7. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +42 -0
  8. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +57 -9
  9. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +1 -1
  10. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +19 -2
  11. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +39 -12
  12. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +28 -11
  13. package/dist/agents/skills/gemini-cli/harness-roadmap/SKILL.md +34 -0
  14. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +42 -0
  15. package/dist/{agents-md-KIS2RSMG.js → agents-md-P2RHSUV7.js} +1 -1
  16. package/dist/{architecture-AJAUDRQQ.js → architecture-ESOOE26S.js} +2 -2
  17. package/dist/bin/harness-mcp.js +10 -10
  18. package/dist/bin/harness.js +12 -12
  19. package/dist/{check-phase-gate-K7QCSYRJ.js → check-phase-gate-S2MZKLFQ.js} +2 -2
  20. package/dist/{chunk-2SWJ4VO7.js → chunk-2VU4MFM3.js} +4 -4
  21. package/dist/{chunk-ZU2UBYBY.js → chunk-3KOLLWWE.js} +1 -1
  22. package/dist/{chunk-EAURF4LH.js → chunk-5VY23YK3.js} +1 -1
  23. package/dist/{chunk-747VBPA4.js → chunk-7KQSUZVG.js} +96 -50
  24. package/dist/{chunk-FLOEMHDF.js → chunk-7PZWR4LI.js} +3 -3
  25. package/dist/{chunk-AE2OWWDH.js → chunk-KELT6K6M.js} +590 -253
  26. package/dist/{chunk-TJVVU3HB.js → chunk-LD3DKUK5.js} +1 -1
  27. package/dist/{chunk-JLXOEO5C.js → chunk-MACVXDZK.js} +2 -2
  28. package/dist/{chunk-CTTFXXKJ.js → chunk-MI5XJQDY.js} +3 -3
  29. package/dist/{chunk-YXOG2277.js → chunk-PSNN4LWX.js} +2 -2
  30. package/dist/{chunk-B5SBNH4S.js → chunk-RZSUJBZZ.js} +74 -14
  31. package/dist/{chunk-OIGVQF5V.js → chunk-WPPDRIJL.js} +1 -1
  32. package/dist/{ci-workflow-NBL4OT4A.js → ci-workflow-4NYBUG6R.js} +1 -1
  33. package/dist/{dist-IJ4J4C5G.js → dist-WF4C7A4A.js} +25 -1
  34. package/dist/{docs-CPTMH3VY.js → docs-BPYCN2DR.js} +2 -2
  35. package/dist/{engine-BUWPAAGD.js → engine-LXLIWQQ3.js} +1 -1
  36. package/dist/{entropy-Z4FYVQ7L.js → entropy-4VDVV5CR.js} +2 -2
  37. package/dist/{feedback-TT6WF5YX.js → feedback-63QB5RCA.js} +1 -1
  38. package/dist/{generate-agent-definitions-J5HANRNR.js → generate-agent-definitions-QABOJG56.js} +1 -1
  39. package/dist/index.d.ts +41 -41
  40. package/dist/index.js +12 -12
  41. package/dist/{loader-PCU5YWRH.js → loader-Z2IT7QX3.js} +1 -1
  42. package/dist/{mcp-YM6QLHLZ.js → mcp-KQHEL5IF.js} +10 -10
  43. package/dist/{performance-YJVXOKIB.js → performance-26BH47O4.js} +2 -2
  44. package/dist/{review-pipeline-KGMIMLIE.js → review-pipeline-GHR3WFBI.js} +1 -1
  45. package/dist/{runtime-F6R27LD6.js → runtime-PDWD7UIK.js} +1 -1
  46. package/dist/{security-MX5VVXBC.js → security-UQFUZXEN.js} +1 -1
  47. package/dist/{validate-EFNMSFKD.js → validate-N7QJOKFZ.js} +2 -2
  48. package/dist/{validate-cross-check-LJX65SBS.js → validate-cross-check-EDQ5QGTM.js} +1 -1
  49. package/package.json +4 -4
@@ -106,8 +106,8 @@ var ConstraintRuleSchema = z.object({
106
106
  // forward-compat for governs edges
107
107
  });
108
108
  function violationId(relativePath, category, normalizedDetail) {
109
- const path13 = relativePath.replace(/\\/g, "/");
110
- const input = `${path13}:${category}:${normalizedDetail}`;
109
+ const path20 = relativePath.replace(/\\/g, "/");
110
+ const input = `${path20}:${category}:${normalizedDetail}`;
111
111
  return createHash("sha256").update(input).digest("hex");
112
112
  }
113
113
  function constraintRuleId(category, scope, description) {
@@ -139,17 +139,17 @@ function resolveFileToLayer(file, layers) {
139
139
  }
140
140
  var accessAsync = promisify(access);
141
141
  var readFileAsync = promisify(readFile);
142
- async function fileExists(path13) {
142
+ async function fileExists(path20) {
143
143
  try {
144
- await accessAsync(path13, constants.F_OK);
144
+ await accessAsync(path20, constants.F_OK);
145
145
  return true;
146
146
  } catch {
147
147
  return false;
148
148
  }
149
149
  }
150
- async function readFileContent(path13) {
150
+ async function readFileContent(path20) {
151
151
  try {
152
- const content = await readFileAsync(path13, "utf-8");
152
+ const content = await readFileAsync(path20, "utf-8");
153
153
  return Ok(content);
154
154
  } catch (error) {
155
155
  return Err(error);
@@ -1770,31 +1770,45 @@ import * as path from "path";
1770
1770
  import { appendFileSync, writeFileSync as writeFileSync22, existsSync as existsSync22, mkdirSync as mkdirSync22 } from "fs";
1771
1771
  import { dirname as dirname7 } from "path";
1772
1772
  import { z as z3 } from "zod";
1773
- import * as fs6 from "fs";
1774
- import * as path3 from "path";
1775
- import { execSync as execSync2 } from "child_process";
1773
+ import * as fs8 from "fs";
1774
+ import * as path5 from "path";
1775
+ import * as fs7 from "fs";
1776
+ import * as path4 from "path";
1776
1777
  import * as fs5 from "fs";
1777
1778
  import * as path2 from "path";
1778
1779
  import { execSync } from "child_process";
1779
1780
  import { z as z4 } from "zod";
1780
- import * as fs8 from "fs/promises";
1781
- import { z as z5 } from "zod";
1782
- import * as fs7 from "fs";
1783
- import * as path4 from "path";
1784
- import * as path5 from "path";
1781
+ import * as fs6 from "fs";
1782
+ import * as path3 from "path";
1783
+ import * as fs9 from "fs";
1785
1784
  import * as path6 from "path";
1785
+ import * as fs10 from "fs";
1786
1786
  import * as path7 from "path";
1787
+ import * as fs11 from "fs";
1787
1788
  import * as path8 from "path";
1788
- import * as fs9 from "fs";
1789
+ import * as fs12 from "fs";
1789
1790
  import * as path9 from "path";
1790
- import { z as z6 } from "zod";
1791
- import * as fs10 from "fs/promises";
1791
+ import { execSync as execSync2 } from "child_process";
1792
+ import * as fs13 from "fs";
1792
1793
  import * as path10 from "path";
1793
- import * as fs11 from "fs/promises";
1794
+ import * as fs15 from "fs/promises";
1795
+ import { z as z5 } from "zod";
1796
+ import * as fs14 from "fs";
1794
1797
  import * as path11 from "path";
1795
- import * as ejs from "ejs";
1796
- import * as fs12 from "fs";
1797
1798
  import * as path12 from "path";
1799
+ import * as path13 from "path";
1800
+ import * as path14 from "path";
1801
+ import * as path15 from "path";
1802
+ import * as fs16 from "fs";
1803
+ import * as path16 from "path";
1804
+ import { z as z6 } from "zod";
1805
+ import * as fs17 from "fs/promises";
1806
+ import * as path17 from "path";
1807
+ import * as fs18 from "fs/promises";
1808
+ import * as path18 from "path";
1809
+ import * as ejs from "ejs";
1810
+ import * as fs19 from "fs";
1811
+ import * as path19 from "path";
1798
1812
  import * as os from "os";
1799
1813
  import { spawn } from "child_process";
1800
1814
  async function validateFileStructure(projectPath, conventions) {
@@ -1832,15 +1846,15 @@ function validateConfig(data, schema) {
1832
1846
  let message = "Configuration validation failed";
1833
1847
  const suggestions = [];
1834
1848
  if (firstError) {
1835
- const path13 = firstError.path.join(".");
1836
- const pathDisplay = path13 ? ` at "${path13}"` : "";
1849
+ const path20 = firstError.path.join(".");
1850
+ const pathDisplay = path20 ? ` at "${path20}"` : "";
1837
1851
  if (firstError.code === "invalid_type") {
1838
1852
  const received = firstError.received;
1839
1853
  const expected = firstError.expected;
1840
1854
  if (received === "undefined") {
1841
1855
  code = "MISSING_FIELD";
1842
1856
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
1843
- suggestions.push(`Field "${path13}" is required and must be of type "${expected}"`);
1857
+ suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
1844
1858
  } else {
1845
1859
  code = "INVALID_TYPE";
1846
1860
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -2046,30 +2060,27 @@ function extractSections(content) {
2046
2060
  return result;
2047
2061
  });
2048
2062
  }
2049
- function isExternalLink(path13) {
2050
- return path13.startsWith("http://") || path13.startsWith("https://") || path13.startsWith("#") || path13.startsWith("mailto:");
2063
+ function isExternalLink(path20) {
2064
+ return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
2051
2065
  }
2052
2066
  function resolveLinkPath(linkPath, baseDir) {
2053
2067
  return linkPath.startsWith(".") ? join4(baseDir, linkPath) : linkPath;
2054
2068
  }
2055
- async function validateAgentsMap(path13 = "./AGENTS.md") {
2056
- console.warn(
2057
- "[harness] validateAgentsMap() is deprecated. Use graph-based validation via Assembler.checkCoverage() from @harness-engineering/graph"
2058
- );
2059
- const contentResult = await readFileContent(path13);
2069
+ async function validateAgentsMap(path20 = "./AGENTS.md") {
2070
+ const contentResult = await readFileContent(path20);
2060
2071
  if (!contentResult.ok) {
2061
2072
  return Err(
2062
2073
  createError(
2063
2074
  "PARSE_ERROR",
2064
2075
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
2065
- { path: path13 },
2076
+ { path: path20 },
2066
2077
  ["Ensure the file exists", "Check file permissions"]
2067
2078
  )
2068
2079
  );
2069
2080
  }
2070
2081
  const content = contentResult.value;
2071
2082
  const sections = extractSections(content);
2072
- const baseDir = dirname4(path13);
2083
+ const baseDir = dirname4(path20);
2073
2084
  const sectionTitles = sections.map((s) => s.title);
2074
2085
  const missingSections = REQUIRED_SECTIONS.filter(
2075
2086
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -2203,8 +2214,8 @@ async function checkDocCoverage(domain, options = {}) {
2203
2214
  );
2204
2215
  }
2205
2216
  }
2206
- function suggestFix(path13, existingFiles) {
2207
- const targetName = basename2(path13).toLowerCase();
2217
+ function suggestFix(path20, existingFiles) {
2218
+ const targetName = basename2(path20).toLowerCase();
2208
2219
  const similar = existingFiles.find((file) => {
2209
2220
  const fileName = basename2(file).toLowerCase();
2210
2221
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -2212,12 +2223,9 @@ function suggestFix(path13, existingFiles) {
2212
2223
  if (similar) {
2213
2224
  return `Did you mean "${similar}"?`;
2214
2225
  }
2215
- return `Create the file "${path13}" or remove the link`;
2226
+ return `Create the file "${path20}" or remove the link`;
2216
2227
  }
2217
2228
  async function validateKnowledgeMap(rootDir = process.cwd()) {
2218
- console.warn(
2219
- "[harness] validateKnowledgeMap() is deprecated. Use graph-based validation via Assembler.checkCoverage() from @harness-engineering/graph"
2220
- );
2221
2229
  const agentsPath = join22(rootDir, "AGENTS.md");
2222
2230
  const agentsResult = await validateAgentsMap(agentsPath);
2223
2231
  if (!agentsResult.ok) {
@@ -2558,8 +2566,8 @@ function createBoundaryValidator(schema, name) {
2558
2566
  return Ok(result.data);
2559
2567
  }
2560
2568
  const suggestions = result.error.issues.map((issue) => {
2561
- const path13 = issue.path.join(".");
2562
- return path13 ? `${path13}: ${issue.message}` : issue.message;
2569
+ const path20 = issue.path.join(".");
2570
+ return path20 ? `${path20}: ${issue.message}` : issue.message;
2563
2571
  });
2564
2572
  return Err(
2565
2573
  createError(
@@ -3089,11 +3097,11 @@ function walk(node, visitor) {
3089
3097
  var TypeScriptParser = class {
3090
3098
  name = "typescript";
3091
3099
  extensions = [".ts", ".tsx", ".mts", ".cts"];
3092
- async parseFile(path13) {
3093
- const contentResult = await readFileContent(path13);
3100
+ async parseFile(path20) {
3101
+ const contentResult = await readFileContent(path20);
3094
3102
  if (!contentResult.ok) {
3095
3103
  return Err(
3096
- createParseError("NOT_FOUND", `File not found: ${path13}`, { path: path13 }, [
3104
+ createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
3097
3105
  "Check that the file exists",
3098
3106
  "Verify the path is correct"
3099
3107
  ])
@@ -3103,7 +3111,7 @@ var TypeScriptParser = class {
3103
3111
  const ast = parse(contentResult.value, {
3104
3112
  loc: true,
3105
3113
  range: true,
3106
- jsx: path13.endsWith(".tsx"),
3114
+ jsx: path20.endsWith(".tsx"),
3107
3115
  errorOnUnknownASTType: false
3108
3116
  });
3109
3117
  return Ok({
@@ -3114,7 +3122,7 @@ var TypeScriptParser = class {
3114
3122
  } catch (e) {
3115
3123
  const error = e;
3116
3124
  return Err(
3117
- createParseError("SYNTAX_ERROR", `Failed to parse ${path13}: ${error.message}`, { path: path13 }, [
3125
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
3118
3126
  "Check for syntax errors in the file",
3119
3127
  "Ensure valid TypeScript syntax"
3120
3128
  ])
@@ -3394,22 +3402,22 @@ function extractInlineRefs(content) {
3394
3402
  }
3395
3403
  return refs;
3396
3404
  }
3397
- async function parseDocumentationFile(path13) {
3398
- const contentResult = await readFileContent(path13);
3405
+ async function parseDocumentationFile(path20) {
3406
+ const contentResult = await readFileContent(path20);
3399
3407
  if (!contentResult.ok) {
3400
3408
  return Err(
3401
3409
  createEntropyError(
3402
3410
  "PARSE_ERROR",
3403
- `Failed to read documentation file: ${path13}`,
3404
- { file: path13 },
3411
+ `Failed to read documentation file: ${path20}`,
3412
+ { file: path20 },
3405
3413
  ["Check that the file exists"]
3406
3414
  )
3407
3415
  );
3408
3416
  }
3409
3417
  const content = contentResult.value;
3410
- const type = path13.endsWith(".md") ? "markdown" : "text";
3418
+ const type = path20.endsWith(".md") ? "markdown" : "text";
3411
3419
  return Ok({
3412
- path: path13,
3420
+ path: path20,
3413
3421
  type,
3414
3422
  content,
3415
3423
  codeBlocks: extractCodeBlocks(content),
@@ -4210,15 +4218,34 @@ async function detectPatternViolations(snapshot, config) {
4210
4218
  }
4211
4219
  }
4212
4220
  }
4221
+ if (config?.customPatterns) {
4222
+ for (const file of snapshot.files) {
4223
+ for (const custom of config.customPatterns) {
4224
+ const matches = custom.check(file, snapshot);
4225
+ for (const match of matches) {
4226
+ violations.push({
4227
+ pattern: custom.name,
4228
+ file: file.path,
4229
+ line: match.line,
4230
+ message: match.message,
4231
+ suggestion: match.suggestion || "Review and fix this pattern violation",
4232
+ severity: custom.severity
4233
+ });
4234
+ }
4235
+ }
4236
+ }
4237
+ }
4213
4238
  const errorCount = violations.filter((v) => v.severity === "error").length;
4214
4239
  const warningCount = violations.filter((v) => v.severity === "warning").length;
4215
- const totalChecks = snapshot.files.length * patterns.length;
4216
- const passRate = totalChecks > 0 ? (totalChecks - violations.length) / totalChecks : 1;
4240
+ const customCount = config?.customPatterns?.length ?? 0;
4241
+ const allPatternsCount = patterns.length + customCount;
4242
+ const totalChecks = snapshot.files.length * allPatternsCount;
4243
+ const passRate = totalChecks > 0 ? Math.max(0, (totalChecks - violations.length) / totalChecks) : 1;
4217
4244
  return Ok({
4218
4245
  violations,
4219
4246
  stats: {
4220
4247
  filesChecked: snapshot.files.length,
4221
- patternsApplied: patterns.length,
4248
+ patternsApplied: allPatternsCount,
4222
4249
  violationCount: violations.length,
4223
4250
  errorCount,
4224
4251
  warningCount
@@ -6349,8 +6376,16 @@ var DEFAULT_STREAM_INDEX = {
6349
6376
  streams: {}
6350
6377
  };
6351
6378
  var HARNESS_DIR = ".harness";
6352
- var STREAMS_DIR = "streams";
6379
+ var STATE_FILE = "state.json";
6380
+ var LEARNINGS_FILE = "learnings.md";
6381
+ var FAILURES_FILE = "failures.md";
6382
+ var HANDOFF_FILE = "handoff.json";
6383
+ var GATE_CONFIG_FILE = "gate.json";
6353
6384
  var INDEX_FILE = "index.json";
6385
+ var SESSIONS_DIR = "sessions";
6386
+ var SESSION_INDEX_FILE = "index.md";
6387
+ var SUMMARY_FILE = "summary.md";
6388
+ var STREAMS_DIR = "streams";
6354
6389
  var STREAM_NAME_REGEX = /^[a-z0-9][a-z0-9._-]*$/;
6355
6390
  function streamsDir(projectPath) {
6356
6391
  return path2.join(projectPath, HARNESS_DIR, STREAMS_DIR);
@@ -6576,25 +6611,60 @@ async function migrateToStreams(projectPath) {
6576
6611
  };
6577
6612
  return saveStreamIndex(projectPath, index);
6578
6613
  }
6579
- var HARNESS_DIR2 = ".harness";
6580
- var STATE_FILE = "state.json";
6581
- var LEARNINGS_FILE = "learnings.md";
6582
- var FAILURES_FILE = "failures.md";
6583
- var HANDOFF_FILE = "handoff.json";
6584
- var GATE_CONFIG_FILE = "gate.json";
6585
- var INDEX_FILE2 = "index.json";
6614
+ function resolveSessionDir(projectPath, sessionSlug, options) {
6615
+ if (!sessionSlug || sessionSlug.trim() === "") {
6616
+ return Err(new Error("Session slug must not be empty"));
6617
+ }
6618
+ if (sessionSlug.includes("..") || sessionSlug.includes("/") || sessionSlug.includes("\\")) {
6619
+ return Err(
6620
+ new Error(`Invalid session slug '${sessionSlug}': must not contain path traversal characters`)
6621
+ );
6622
+ }
6623
+ const sessionDir = path3.join(projectPath, HARNESS_DIR, SESSIONS_DIR, sessionSlug);
6624
+ if (options?.create) {
6625
+ fs6.mkdirSync(sessionDir, { recursive: true });
6626
+ }
6627
+ return Ok(sessionDir);
6628
+ }
6629
+ function updateSessionIndex(projectPath, sessionSlug, description) {
6630
+ const sessionsDir = path3.join(projectPath, HARNESS_DIR, SESSIONS_DIR);
6631
+ fs6.mkdirSync(sessionsDir, { recursive: true });
6632
+ const indexPath2 = path3.join(sessionsDir, SESSION_INDEX_FILE);
6633
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6634
+ const newLine = `- [${sessionSlug}](${sessionSlug}/summary.md) \u2014 ${description} (${date})`;
6635
+ if (!fs6.existsSync(indexPath2)) {
6636
+ fs6.writeFileSync(indexPath2, `## Active Sessions
6637
+
6638
+ ${newLine}
6639
+ `);
6640
+ return;
6641
+ }
6642
+ const content = fs6.readFileSync(indexPath2, "utf-8");
6643
+ const lines = content.split("\n");
6644
+ const slugPattern = `- [${sessionSlug}]`;
6645
+ const existingIdx = lines.findIndex((l) => l.startsWith(slugPattern));
6646
+ if (existingIdx >= 0) {
6647
+ lines[existingIdx] = newLine;
6648
+ } else {
6649
+ const lastNonEmpty = lines.reduce((last, line, i) => line.trim() !== "" ? i : last, 0);
6650
+ lines.splice(lastNonEmpty + 1, 0, newLine);
6651
+ }
6652
+ fs6.writeFileSync(indexPath2, lines.join("\n"));
6653
+ }
6586
6654
  var MAX_CACHE_ENTRIES = 8;
6587
- var learningsCacheMap = /* @__PURE__ */ new Map();
6588
- var failuresCacheMap = /* @__PURE__ */ new Map();
6589
6655
  function evictIfNeeded(map) {
6590
6656
  if (map.size > MAX_CACHE_ENTRIES) {
6591
6657
  const oldest = map.keys().next().value;
6592
6658
  if (oldest !== void 0) map.delete(oldest);
6593
6659
  }
6594
6660
  }
6595
- async function getStateDir(projectPath, stream) {
6596
- const streamsIndexPath = path3.join(projectPath, HARNESS_DIR2, "streams", INDEX_FILE2);
6597
- const hasStreams = fs6.existsSync(streamsIndexPath);
6661
+ async function getStateDir(projectPath, stream, session) {
6662
+ if (session) {
6663
+ const sessionResult = resolveSessionDir(projectPath, session, { create: true });
6664
+ return sessionResult;
6665
+ }
6666
+ const streamsIndexPath = path4.join(projectPath, HARNESS_DIR, "streams", INDEX_FILE);
6667
+ const hasStreams = fs7.existsSync(streamsIndexPath);
6598
6668
  if (stream || hasStreams) {
6599
6669
  const result = await resolveStreamPath(projectPath, stream ? { stream } : void 0);
6600
6670
  if (result.ok) {
@@ -6604,18 +6674,18 @@ async function getStateDir(projectPath, stream) {
6604
6674
  return result;
6605
6675
  }
6606
6676
  }
6607
- return Ok(path3.join(projectPath, HARNESS_DIR2));
6677
+ return Ok(path4.join(projectPath, HARNESS_DIR));
6608
6678
  }
6609
- async function loadState(projectPath, stream) {
6679
+ async function loadState(projectPath, stream, session) {
6610
6680
  try {
6611
- const dirResult = await getStateDir(projectPath, stream);
6681
+ const dirResult = await getStateDir(projectPath, stream, session);
6612
6682
  if (!dirResult.ok) return dirResult;
6613
6683
  const stateDir = dirResult.value;
6614
- const statePath = path3.join(stateDir, STATE_FILE);
6615
- if (!fs6.existsSync(statePath)) {
6684
+ const statePath = path5.join(stateDir, STATE_FILE);
6685
+ if (!fs8.existsSync(statePath)) {
6616
6686
  return Ok({ ...DEFAULT_STATE });
6617
6687
  }
6618
- const raw = fs6.readFileSync(statePath, "utf-8");
6688
+ const raw = fs8.readFileSync(statePath, "utf-8");
6619
6689
  const parsed = JSON.parse(raw);
6620
6690
  const result = HarnessStateSchema.safeParse(parsed);
6621
6691
  if (!result.success) {
@@ -6628,14 +6698,14 @@ async function loadState(projectPath, stream) {
6628
6698
  );
6629
6699
  }
6630
6700
  }
6631
- async function saveState(projectPath, state, stream) {
6701
+ async function saveState(projectPath, state, stream, session) {
6632
6702
  try {
6633
- const dirResult = await getStateDir(projectPath, stream);
6703
+ const dirResult = await getStateDir(projectPath, stream, session);
6634
6704
  if (!dirResult.ok) return dirResult;
6635
6705
  const stateDir = dirResult.value;
6636
- const statePath = path3.join(stateDir, STATE_FILE);
6637
- fs6.mkdirSync(stateDir, { recursive: true });
6638
- fs6.writeFileSync(statePath, JSON.stringify(state, null, 2));
6706
+ const statePath = path5.join(stateDir, STATE_FILE);
6707
+ fs8.mkdirSync(stateDir, { recursive: true });
6708
+ fs8.writeFileSync(statePath, JSON.stringify(state, null, 2));
6639
6709
  return Ok(void 0);
6640
6710
  } catch (error) {
6641
6711
  return Err(
@@ -6643,13 +6713,17 @@ async function saveState(projectPath, state, stream) {
6643
6713
  );
6644
6714
  }
6645
6715
  }
6646
- async function appendLearning(projectPath, learning, skillName, outcome, stream) {
6716
+ var learningsCacheMap = /* @__PURE__ */ new Map();
6717
+ function clearLearningsCache() {
6718
+ learningsCacheMap.clear();
6719
+ }
6720
+ async function appendLearning(projectPath, learning, skillName, outcome, stream, session) {
6647
6721
  try {
6648
- const dirResult = await getStateDir(projectPath, stream);
6722
+ const dirResult = await getStateDir(projectPath, stream, session);
6649
6723
  if (!dirResult.ok) return dirResult;
6650
6724
  const stateDir = dirResult.value;
6651
- const learningsPath = path3.join(stateDir, LEARNINGS_FILE);
6652
- fs6.mkdirSync(stateDir, { recursive: true });
6725
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6726
+ fs9.mkdirSync(stateDir, { recursive: true });
6653
6727
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6654
6728
  let entry;
6655
6729
  if (skillName && outcome) {
@@ -6665,11 +6739,11 @@ async function appendLearning(projectPath, learning, skillName, outcome, stream)
6665
6739
  - **${timestamp}:** ${learning}
6666
6740
  `;
6667
6741
  }
6668
- if (!fs6.existsSync(learningsPath)) {
6669
- fs6.writeFileSync(learningsPath, `# Learnings
6742
+ if (!fs9.existsSync(learningsPath)) {
6743
+ fs9.writeFileSync(learningsPath, `# Learnings
6670
6744
  ${entry}`);
6671
6745
  } else {
6672
- fs6.appendFileSync(learningsPath, entry);
6746
+ fs9.appendFileSync(learningsPath, entry);
6673
6747
  }
6674
6748
  learningsCacheMap.delete(learningsPath);
6675
6749
  return Ok(void 0);
@@ -6681,23 +6755,92 @@ ${entry}`);
6681
6755
  );
6682
6756
  }
6683
6757
  }
6684
- async function loadRelevantLearnings(projectPath, skillName, stream) {
6758
+ function estimateTokens(text) {
6759
+ return Math.ceil(text.length / 4);
6760
+ }
6761
+ function scoreRelevance(entry, intent) {
6762
+ if (!intent || intent.trim() === "") return 0;
6763
+ const intentWords = intent.toLowerCase().split(/\s+/).filter((w) => w.length > 2);
6764
+ if (intentWords.length === 0) return 0;
6765
+ const entryLower = entry.toLowerCase();
6766
+ const matches = intentWords.filter((word) => entryLower.includes(word));
6767
+ return matches.length / intentWords.length;
6768
+ }
6769
+ function parseDateFromEntry(entry) {
6770
+ const match = entry.match(/(\d{4}-\d{2}-\d{2})/);
6771
+ return match ? match[1] ?? null : null;
6772
+ }
6773
+ function analyzeLearningPatterns(entries) {
6774
+ const tagGroups = /* @__PURE__ */ new Map();
6775
+ for (const entry of entries) {
6776
+ const tagMatches = entry.matchAll(/\[(skill:[^\]]+)\]|\[(outcome:[^\]]+)\]/g);
6777
+ for (const match of tagMatches) {
6778
+ const tag = match[1] ?? match[2];
6779
+ if (tag) {
6780
+ const group = tagGroups.get(tag) ?? [];
6781
+ group.push(entry);
6782
+ tagGroups.set(tag, group);
6783
+ }
6784
+ }
6785
+ }
6786
+ const patterns = [];
6787
+ for (const [tag, groupEntries] of tagGroups) {
6788
+ if (groupEntries.length >= 3) {
6789
+ patterns.push({ tag, count: groupEntries.length, entries: groupEntries });
6790
+ }
6791
+ }
6792
+ return patterns.sort((a, b) => b.count - a.count);
6793
+ }
6794
+ async function loadBudgetedLearnings(projectPath, options) {
6795
+ const { intent, tokenBudget = 1e3, skill, session, stream } = options;
6796
+ const sortByRecencyAndRelevance = (entries) => {
6797
+ return [...entries].sort((a, b) => {
6798
+ const dateA = parseDateFromEntry(a) ?? "0000-00-00";
6799
+ const dateB = parseDateFromEntry(b) ?? "0000-00-00";
6800
+ const dateCompare = dateB.localeCompare(dateA);
6801
+ if (dateCompare !== 0) return dateCompare;
6802
+ return scoreRelevance(b, intent) - scoreRelevance(a, intent);
6803
+ });
6804
+ };
6805
+ const allEntries = [];
6806
+ if (session) {
6807
+ const sessionResult = await loadRelevantLearnings(projectPath, skill, stream, session);
6808
+ if (sessionResult.ok) {
6809
+ allEntries.push(...sortByRecencyAndRelevance(sessionResult.value));
6810
+ }
6811
+ }
6812
+ const globalResult = await loadRelevantLearnings(projectPath, skill, stream);
6813
+ if (globalResult.ok) {
6814
+ allEntries.push(...sortByRecencyAndRelevance(globalResult.value));
6815
+ }
6816
+ const budgeted = [];
6817
+ let totalTokens = 0;
6818
+ for (const entry of allEntries) {
6819
+ const separator = budgeted.length > 0 ? "\n" : "";
6820
+ const entryCost = estimateTokens(entry + separator);
6821
+ if (totalTokens + entryCost > tokenBudget) break;
6822
+ budgeted.push(entry);
6823
+ totalTokens += entryCost;
6824
+ }
6825
+ return Ok(budgeted);
6826
+ }
6827
+ async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6685
6828
  try {
6686
- const dirResult = await getStateDir(projectPath, stream);
6829
+ const dirResult = await getStateDir(projectPath, stream, session);
6687
6830
  if (!dirResult.ok) return dirResult;
6688
6831
  const stateDir = dirResult.value;
6689
- const learningsPath = path3.join(stateDir, LEARNINGS_FILE);
6690
- if (!fs6.existsSync(learningsPath)) {
6832
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6833
+ if (!fs9.existsSync(learningsPath)) {
6691
6834
  return Ok([]);
6692
6835
  }
6693
- const stats = fs6.statSync(learningsPath);
6836
+ const stats = fs9.statSync(learningsPath);
6694
6837
  const cacheKey = learningsPath;
6695
6838
  const cached = learningsCacheMap.get(cacheKey);
6696
6839
  let entries;
6697
6840
  if (cached && cached.mtimeMs === stats.mtimeMs) {
6698
6841
  entries = cached.entries;
6699
6842
  } else {
6700
- const content = fs6.readFileSync(learningsPath, "utf-8");
6843
+ const content = fs9.readFileSync(learningsPath, "utf-8");
6701
6844
  const lines = content.split("\n");
6702
6845
  entries = [];
6703
6846
  let currentBlock = [];
@@ -6733,23 +6876,106 @@ async function loadRelevantLearnings(projectPath, skillName, stream) {
6733
6876
  );
6734
6877
  }
6735
6878
  }
6736
- var FAILURE_LINE_REGEX = /^- \*\*(\d{4}-\d{2}-\d{2}) \[skill:([^\]]+)\] \[type:([^\]]+)\]:\*\* (.+)$/;
6737
- async function appendFailure(projectPath, description, skillName, type, stream) {
6879
+ async function archiveLearnings(projectPath, entries, stream) {
6738
6880
  try {
6739
6881
  const dirResult = await getStateDir(projectPath, stream);
6740
6882
  if (!dirResult.ok) return dirResult;
6741
6883
  const stateDir = dirResult.value;
6742
- const failuresPath = path3.join(stateDir, FAILURES_FILE);
6743
- fs6.mkdirSync(stateDir, { recursive: true });
6884
+ const archiveDir = path6.join(stateDir, "learnings-archive");
6885
+ fs9.mkdirSync(archiveDir, { recursive: true });
6886
+ const now = /* @__PURE__ */ new Date();
6887
+ const yearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
6888
+ const archivePath = path6.join(archiveDir, `${yearMonth}.md`);
6889
+ const archiveContent = entries.join("\n\n") + "\n";
6890
+ if (fs9.existsSync(archivePath)) {
6891
+ fs9.appendFileSync(archivePath, "\n" + archiveContent);
6892
+ } else {
6893
+ fs9.writeFileSync(archivePath, `# Learnings Archive
6894
+
6895
+ ${archiveContent}`);
6896
+ }
6897
+ return Ok(void 0);
6898
+ } catch (error) {
6899
+ return Err(
6900
+ new Error(
6901
+ `Failed to archive learnings: ${error instanceof Error ? error.message : String(error)}`
6902
+ )
6903
+ );
6904
+ }
6905
+ }
6906
+ async function pruneLearnings(projectPath, stream) {
6907
+ try {
6908
+ const dirResult = await getStateDir(projectPath, stream);
6909
+ if (!dirResult.ok) return dirResult;
6910
+ const stateDir = dirResult.value;
6911
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6912
+ if (!fs9.existsSync(learningsPath)) {
6913
+ return Ok({ kept: 0, archived: 0, patterns: [] });
6914
+ }
6915
+ const loadResult = await loadRelevantLearnings(projectPath, void 0, stream);
6916
+ if (!loadResult.ok) return loadResult;
6917
+ const allEntries = loadResult.value;
6918
+ if (allEntries.length <= 20) {
6919
+ const cutoffDate = /* @__PURE__ */ new Date();
6920
+ cutoffDate.setDate(cutoffDate.getDate() - 14);
6921
+ const cutoffStr = cutoffDate.toISOString().split("T")[0];
6922
+ const hasOld = allEntries.some((entry) => {
6923
+ const date = parseDateFromEntry(entry);
6924
+ return date !== null && date < cutoffStr;
6925
+ });
6926
+ if (!hasOld) {
6927
+ return Ok({ kept: allEntries.length, archived: 0, patterns: [] });
6928
+ }
6929
+ }
6930
+ const sorted = [...allEntries].sort((a, b) => {
6931
+ const dateA = parseDateFromEntry(a) ?? "0000-00-00";
6932
+ const dateB = parseDateFromEntry(b) ?? "0000-00-00";
6933
+ return dateB.localeCompare(dateA);
6934
+ });
6935
+ const toKeep = sorted.slice(0, 20);
6936
+ const toArchive = sorted.slice(20);
6937
+ const patterns = analyzeLearningPatterns(allEntries);
6938
+ if (toArchive.length > 0) {
6939
+ const archiveResult = await archiveLearnings(projectPath, toArchive, stream);
6940
+ if (!archiveResult.ok) return archiveResult;
6941
+ }
6942
+ const newContent = "# Learnings\n\n" + toKeep.join("\n\n") + "\n";
6943
+ fs9.writeFileSync(learningsPath, newContent);
6944
+ learningsCacheMap.delete(learningsPath);
6945
+ return Ok({
6946
+ kept: toKeep.length,
6947
+ archived: toArchive.length,
6948
+ patterns
6949
+ });
6950
+ } catch (error) {
6951
+ return Err(
6952
+ new Error(
6953
+ `Failed to prune learnings: ${error instanceof Error ? error.message : String(error)}`
6954
+ )
6955
+ );
6956
+ }
6957
+ }
6958
+ var failuresCacheMap = /* @__PURE__ */ new Map();
6959
+ function clearFailuresCache() {
6960
+ failuresCacheMap.clear();
6961
+ }
6962
+ var FAILURE_LINE_REGEX = /^- \*\*(\d{4}-\d{2}-\d{2}) \[skill:([^\]]+)\] \[type:([^\]]+)\]:\*\* (.+)$/;
6963
+ async function appendFailure(projectPath, description, skillName, type, stream, session) {
6964
+ try {
6965
+ const dirResult = await getStateDir(projectPath, stream, session);
6966
+ if (!dirResult.ok) return dirResult;
6967
+ const stateDir = dirResult.value;
6968
+ const failuresPath = path7.join(stateDir, FAILURES_FILE);
6969
+ fs10.mkdirSync(stateDir, { recursive: true });
6744
6970
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6745
6971
  const entry = `
6746
6972
  - **${timestamp} [skill:${skillName}] [type:${type}]:** ${description}
6747
6973
  `;
6748
- if (!fs6.existsSync(failuresPath)) {
6749
- fs6.writeFileSync(failuresPath, `# Failures
6974
+ if (!fs10.existsSync(failuresPath)) {
6975
+ fs10.writeFileSync(failuresPath, `# Failures
6750
6976
  ${entry}`);
6751
6977
  } else {
6752
- fs6.appendFileSync(failuresPath, entry);
6978
+ fs10.appendFileSync(failuresPath, entry);
6753
6979
  }
6754
6980
  failuresCacheMap.delete(failuresPath);
6755
6981
  return Ok(void 0);
@@ -6761,22 +6987,22 @@ ${entry}`);
6761
6987
  );
6762
6988
  }
6763
6989
  }
6764
- async function loadFailures(projectPath, stream) {
6990
+ async function loadFailures(projectPath, stream, session) {
6765
6991
  try {
6766
- const dirResult = await getStateDir(projectPath, stream);
6992
+ const dirResult = await getStateDir(projectPath, stream, session);
6767
6993
  if (!dirResult.ok) return dirResult;
6768
6994
  const stateDir = dirResult.value;
6769
- const failuresPath = path3.join(stateDir, FAILURES_FILE);
6770
- if (!fs6.existsSync(failuresPath)) {
6995
+ const failuresPath = path7.join(stateDir, FAILURES_FILE);
6996
+ if (!fs10.existsSync(failuresPath)) {
6771
6997
  return Ok([]);
6772
6998
  }
6773
- const stats = fs6.statSync(failuresPath);
6999
+ const stats = fs10.statSync(failuresPath);
6774
7000
  const cacheKey = failuresPath;
6775
7001
  const cached = failuresCacheMap.get(cacheKey);
6776
7002
  if (cached && cached.mtimeMs === stats.mtimeMs) {
6777
7003
  return Ok(cached.entries);
6778
7004
  }
6779
- const content = fs6.readFileSync(failuresPath, "utf-8");
7005
+ const content = fs10.readFileSync(failuresPath, "utf-8");
6780
7006
  const entries = [];
6781
7007
  for (const line of content.split("\n")) {
6782
7008
  const match = line.match(FAILURE_LINE_REGEX);
@@ -6800,25 +7026,25 @@ async function loadFailures(projectPath, stream) {
6800
7026
  );
6801
7027
  }
6802
7028
  }
6803
- async function archiveFailures(projectPath, stream) {
7029
+ async function archiveFailures(projectPath, stream, session) {
6804
7030
  try {
6805
- const dirResult = await getStateDir(projectPath, stream);
7031
+ const dirResult = await getStateDir(projectPath, stream, session);
6806
7032
  if (!dirResult.ok) return dirResult;
6807
7033
  const stateDir = dirResult.value;
6808
- const failuresPath = path3.join(stateDir, FAILURES_FILE);
6809
- if (!fs6.existsSync(failuresPath)) {
7034
+ const failuresPath = path7.join(stateDir, FAILURES_FILE);
7035
+ if (!fs10.existsSync(failuresPath)) {
6810
7036
  return Ok(void 0);
6811
7037
  }
6812
- const archiveDir = path3.join(stateDir, "archive");
6813
- fs6.mkdirSync(archiveDir, { recursive: true });
7038
+ const archiveDir = path7.join(stateDir, "archive");
7039
+ fs10.mkdirSync(archiveDir, { recursive: true });
6814
7040
  const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6815
7041
  let archiveName = `failures-${date}.md`;
6816
7042
  let counter = 2;
6817
- while (fs6.existsSync(path3.join(archiveDir, archiveName))) {
7043
+ while (fs10.existsSync(path7.join(archiveDir, archiveName))) {
6818
7044
  archiveName = `failures-${date}-${counter}.md`;
6819
7045
  counter++;
6820
7046
  }
6821
- fs6.renameSync(failuresPath, path3.join(archiveDir, archiveName));
7047
+ fs10.renameSync(failuresPath, path7.join(archiveDir, archiveName));
6822
7048
  failuresCacheMap.delete(failuresPath);
6823
7049
  return Ok(void 0);
6824
7050
  } catch (error) {
@@ -6829,14 +7055,14 @@ async function archiveFailures(projectPath, stream) {
6829
7055
  );
6830
7056
  }
6831
7057
  }
6832
- async function saveHandoff(projectPath, handoff, stream) {
7058
+ async function saveHandoff(projectPath, handoff, stream, session) {
6833
7059
  try {
6834
- const dirResult = await getStateDir(projectPath, stream);
7060
+ const dirResult = await getStateDir(projectPath, stream, session);
6835
7061
  if (!dirResult.ok) return dirResult;
6836
7062
  const stateDir = dirResult.value;
6837
- const handoffPath = path3.join(stateDir, HANDOFF_FILE);
6838
- fs6.mkdirSync(stateDir, { recursive: true });
6839
- fs6.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
7063
+ const handoffPath = path8.join(stateDir, HANDOFF_FILE);
7064
+ fs11.mkdirSync(stateDir, { recursive: true });
7065
+ fs11.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
6840
7066
  return Ok(void 0);
6841
7067
  } catch (error) {
6842
7068
  return Err(
@@ -6844,16 +7070,16 @@ async function saveHandoff(projectPath, handoff, stream) {
6844
7070
  );
6845
7071
  }
6846
7072
  }
6847
- async function loadHandoff(projectPath, stream) {
7073
+ async function loadHandoff(projectPath, stream, session) {
6848
7074
  try {
6849
- const dirResult = await getStateDir(projectPath, stream);
7075
+ const dirResult = await getStateDir(projectPath, stream, session);
6850
7076
  if (!dirResult.ok) return dirResult;
6851
7077
  const stateDir = dirResult.value;
6852
- const handoffPath = path3.join(stateDir, HANDOFF_FILE);
6853
- if (!fs6.existsSync(handoffPath)) {
7078
+ const handoffPath = path8.join(stateDir, HANDOFF_FILE);
7079
+ if (!fs11.existsSync(handoffPath)) {
6854
7080
  return Ok(null);
6855
7081
  }
6856
- const raw = fs6.readFileSync(handoffPath, "utf-8");
7082
+ const raw = fs11.readFileSync(handoffPath, "utf-8");
6857
7083
  const parsed = JSON.parse(raw);
6858
7084
  const result = HandoffSchema.safeParse(parsed);
6859
7085
  if (!result.success) {
@@ -6866,73 +7092,77 @@ async function loadHandoff(projectPath, stream) {
6866
7092
  );
6867
7093
  }
6868
7094
  }
7095
+ var SAFE_GATE_COMMAND = /^(?:npm|pnpm|yarn)\s+(?:test|run\s+[\w.-]+|run-script\s+[\w.-]+)$|^go\s+(?:test|build|vet|fmt)\s+[\w./ -]+$|^(?:python|python3)\s+-m\s+[\w.-]+$|^make\s+[\w.-]+$|^cargo\s+(?:test|build|check|clippy)(?:\s+[\w./ -]+)?$|^(?:gradle|mvn)\s+[\w:.-]+$/;
7096
+ function loadChecksFromConfig(gateConfigPath) {
7097
+ if (!fs12.existsSync(gateConfigPath)) return [];
7098
+ const raw = JSON.parse(fs12.readFileSync(gateConfigPath, "utf-8"));
7099
+ const config = GateConfigSchema.safeParse(raw);
7100
+ if (config.success && config.data.checks) return config.data.checks;
7101
+ return [];
7102
+ }
7103
+ function discoverChecksFromProject(projectPath) {
7104
+ const checks = [];
7105
+ const packageJsonPath = path9.join(projectPath, "package.json");
7106
+ if (fs12.existsSync(packageJsonPath)) {
7107
+ const pkg = JSON.parse(fs12.readFileSync(packageJsonPath, "utf-8"));
7108
+ const scripts = pkg.scripts || {};
7109
+ if (scripts.test) checks.push({ name: "test", command: "npm test" });
7110
+ if (scripts.lint) checks.push({ name: "lint", command: "npm run lint" });
7111
+ if (scripts.typecheck) checks.push({ name: "typecheck", command: "npm run typecheck" });
7112
+ if (scripts.build) checks.push({ name: "build", command: "npm run build" });
7113
+ }
7114
+ if (fs12.existsSync(path9.join(projectPath, "go.mod"))) {
7115
+ checks.push({ name: "test", command: "go test ./..." });
7116
+ checks.push({ name: "build", command: "go build ./..." });
7117
+ }
7118
+ if (fs12.existsSync(path9.join(projectPath, "pyproject.toml")) || fs12.existsSync(path9.join(projectPath, "setup.py"))) {
7119
+ checks.push({ name: "test", command: "python -m pytest" });
7120
+ }
7121
+ return checks;
7122
+ }
7123
+ function executeCheck(check, projectPath) {
7124
+ if (!SAFE_GATE_COMMAND.test(check.command)) {
7125
+ return {
7126
+ name: check.name,
7127
+ passed: false,
7128
+ command: check.command,
7129
+ output: `Blocked: command does not match safe gate pattern. Allowed prefixes: npm, pnpm, yarn, go, python, python3, make, cargo, gradle, mvn`,
7130
+ duration: 0
7131
+ };
7132
+ }
7133
+ const start = Date.now();
7134
+ try {
7135
+ execSync2(check.command, {
7136
+ cwd: projectPath,
7137
+ stdio: "pipe",
7138
+ timeout: 12e4
7139
+ });
7140
+ return {
7141
+ name: check.name,
7142
+ passed: true,
7143
+ command: check.command,
7144
+ duration: Date.now() - start
7145
+ };
7146
+ } catch (error) {
7147
+ const output = error instanceof Error ? error.stderr?.toString() || error.message : String(error);
7148
+ return {
7149
+ name: check.name,
7150
+ passed: false,
7151
+ command: check.command,
7152
+ output: output.slice(0, 2e3),
7153
+ duration: Date.now() - start
7154
+ };
7155
+ }
7156
+ }
6869
7157
  async function runMechanicalGate(projectPath) {
6870
- const harnessDir = path3.join(projectPath, HARNESS_DIR2);
6871
- const gateConfigPath = path3.join(harnessDir, GATE_CONFIG_FILE);
7158
+ const harnessDir = path9.join(projectPath, HARNESS_DIR);
7159
+ const gateConfigPath = path9.join(harnessDir, GATE_CONFIG_FILE);
6872
7160
  try {
6873
- let checks = [];
6874
- if (fs6.existsSync(gateConfigPath)) {
6875
- const raw = JSON.parse(fs6.readFileSync(gateConfigPath, "utf-8"));
6876
- const config = GateConfigSchema.safeParse(raw);
6877
- if (config.success && config.data.checks) {
6878
- checks = config.data.checks;
6879
- }
6880
- }
7161
+ let checks = loadChecksFromConfig(gateConfigPath);
6881
7162
  if (checks.length === 0) {
6882
- const packageJsonPath = path3.join(projectPath, "package.json");
6883
- if (fs6.existsSync(packageJsonPath)) {
6884
- const pkg = JSON.parse(fs6.readFileSync(packageJsonPath, "utf-8"));
6885
- const scripts = pkg.scripts || {};
6886
- if (scripts.test) checks.push({ name: "test", command: "npm test" });
6887
- if (scripts.lint) checks.push({ name: "lint", command: "npm run lint" });
6888
- if (scripts.typecheck) checks.push({ name: "typecheck", command: "npm run typecheck" });
6889
- if (scripts.build) checks.push({ name: "build", command: "npm run build" });
6890
- }
6891
- if (fs6.existsSync(path3.join(projectPath, "go.mod"))) {
6892
- checks.push({ name: "test", command: "go test ./..." });
6893
- checks.push({ name: "build", command: "go build ./..." });
6894
- }
6895
- if (fs6.existsSync(path3.join(projectPath, "pyproject.toml")) || fs6.existsSync(path3.join(projectPath, "setup.py"))) {
6896
- checks.push({ name: "test", command: "python -m pytest" });
6897
- }
6898
- }
6899
- const results = [];
6900
- const SAFE_GATE_COMMAND = /^(?:npm|pnpm|yarn)\s+(?:test|run\s+[\w.-]+|run-script\s+[\w.-]+)$|^go\s+(?:test|build|vet|fmt)\s+[\w./ -]+$|^(?:python|python3)\s+-m\s+[\w.-]+$|^make\s+[\w.-]+$|^cargo\s+(?:test|build|check|clippy)(?:\s+[\w./ -]+)?$|^(?:gradle|mvn)\s+[\w:.-]+$/;
6901
- for (const check of checks) {
6902
- if (!SAFE_GATE_COMMAND.test(check.command)) {
6903
- results.push({
6904
- name: check.name,
6905
- passed: false,
6906
- command: check.command,
6907
- output: `Blocked: command does not match safe gate pattern. Allowed prefixes: npm, npx, pnpm, yarn, go, python, python3, make, cargo, gradle, mvn`,
6908
- duration: 0
6909
- });
6910
- continue;
6911
- }
6912
- const start = Date.now();
6913
- try {
6914
- execSync2(check.command, {
6915
- cwd: projectPath,
6916
- stdio: "pipe",
6917
- timeout: 12e4
6918
- });
6919
- results.push({
6920
- name: check.name,
6921
- passed: true,
6922
- command: check.command,
6923
- duration: Date.now() - start
6924
- });
6925
- } catch (error) {
6926
- const output = error instanceof Error ? error.stderr?.toString() || error.message : String(error);
6927
- results.push({
6928
- name: check.name,
6929
- passed: false,
6930
- command: check.command,
6931
- output: output.slice(0, 2e3),
6932
- duration: Date.now() - start
6933
- });
6934
- }
7163
+ checks = discoverChecksFromProject(projectPath);
6935
7164
  }
7165
+ const results = checks.map((check) => executeCheck(check, projectPath));
6936
7166
  return Ok({
6937
7167
  passed: results.length === 0 || results.every((r) => r.passed),
6938
7168
  checks: results
@@ -6945,6 +7175,92 @@ async function runMechanicalGate(projectPath) {
6945
7175
  );
6946
7176
  }
6947
7177
  }
7178
+ function formatSummary(data) {
7179
+ const lines = [
7180
+ "## Session Summary",
7181
+ "",
7182
+ `**Session:** ${data.session}`,
7183
+ `**Last active:** ${data.lastActive}`,
7184
+ `**Skill:** ${data.skill}`
7185
+ ];
7186
+ if (data.phase) {
7187
+ lines.push(`**Phase:** ${data.phase}`);
7188
+ }
7189
+ lines.push(`**Status:** ${data.status}`);
7190
+ if (data.spec) {
7191
+ lines.push(`**Spec:** ${data.spec}`);
7192
+ }
7193
+ if (data.plan) {
7194
+ lines.push(`**Plan:** ${data.plan}`);
7195
+ }
7196
+ lines.push(`**Key context:** ${data.keyContext}`);
7197
+ lines.push(`**Next step:** ${data.nextStep}`);
7198
+ lines.push("");
7199
+ return lines.join("\n");
7200
+ }
7201
+ function deriveIndexDescription(data) {
7202
+ const skillShort = data.skill.replace("harness-", "");
7203
+ const parts = [skillShort];
7204
+ if (data.phase) {
7205
+ parts.push(`phase ${data.phase}`);
7206
+ }
7207
+ parts.push(data.status.toLowerCase());
7208
+ return parts.join(", ");
7209
+ }
7210
+ function writeSessionSummary(projectPath, sessionSlug, data) {
7211
+ try {
7212
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7213
+ if (!dirResult.ok) return dirResult;
7214
+ const sessionDir = dirResult.value;
7215
+ const summaryPath = path10.join(sessionDir, SUMMARY_FILE);
7216
+ const content = formatSummary(data);
7217
+ fs13.writeFileSync(summaryPath, content);
7218
+ const description = deriveIndexDescription(data);
7219
+ updateSessionIndex(projectPath, sessionSlug, description);
7220
+ return Ok(void 0);
7221
+ } catch (error) {
7222
+ return Err(
7223
+ new Error(
7224
+ `Failed to write session summary: ${error instanceof Error ? error.message : String(error)}`
7225
+ )
7226
+ );
7227
+ }
7228
+ }
7229
+ function loadSessionSummary(projectPath, sessionSlug) {
7230
+ try {
7231
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
7232
+ if (!dirResult.ok) return dirResult;
7233
+ const sessionDir = dirResult.value;
7234
+ const summaryPath = path10.join(sessionDir, SUMMARY_FILE);
7235
+ if (!fs13.existsSync(summaryPath)) {
7236
+ return Ok(null);
7237
+ }
7238
+ const content = fs13.readFileSync(summaryPath, "utf-8");
7239
+ return Ok(content);
7240
+ } catch (error) {
7241
+ return Err(
7242
+ new Error(
7243
+ `Failed to load session summary: ${error instanceof Error ? error.message : String(error)}`
7244
+ )
7245
+ );
7246
+ }
7247
+ }
7248
+ function listActiveSessions(projectPath) {
7249
+ try {
7250
+ const indexPath2 = path10.join(projectPath, HARNESS_DIR, SESSIONS_DIR, SESSION_INDEX_FILE);
7251
+ if (!fs13.existsSync(indexPath2)) {
7252
+ return Ok(null);
7253
+ }
7254
+ const content = fs13.readFileSync(indexPath2, "utf-8");
7255
+ return Ok(content);
7256
+ } catch (error) {
7257
+ return Err(
7258
+ new Error(
7259
+ `Failed to list active sessions: ${error instanceof Error ? error.message : String(error)}`
7260
+ )
7261
+ );
7262
+ }
7263
+ }
6948
7264
  async function executeWorkflow(workflow, executor) {
6949
7265
  const stepResults = [];
6950
7266
  const startTime = Date.now();
@@ -7166,11 +7482,11 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7166
7482
  }
7167
7483
  function detectStack(projectRoot) {
7168
7484
  const stacks = [];
7169
- const pkgJsonPath = path4.join(projectRoot, "package.json");
7170
- if (fs7.existsSync(pkgJsonPath)) {
7485
+ const pkgJsonPath = path11.join(projectRoot, "package.json");
7486
+ if (fs14.existsSync(pkgJsonPath)) {
7171
7487
  stacks.push("node");
7172
7488
  try {
7173
- const pkgJson = JSON.parse(fs7.readFileSync(pkgJsonPath, "utf-8"));
7489
+ const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
7174
7490
  const allDeps = {
7175
7491
  ...pkgJson.dependencies,
7176
7492
  ...pkgJson.devDependencies
@@ -7185,13 +7501,13 @@ function detectStack(projectRoot) {
7185
7501
  } catch {
7186
7502
  }
7187
7503
  }
7188
- const goModPath = path4.join(projectRoot, "go.mod");
7189
- if (fs7.existsSync(goModPath)) {
7504
+ const goModPath = path11.join(projectRoot, "go.mod");
7505
+ if (fs14.existsSync(goModPath)) {
7190
7506
  stacks.push("go");
7191
7507
  }
7192
- const requirementsPath = path4.join(projectRoot, "requirements.txt");
7193
- const pyprojectPath = path4.join(projectRoot, "pyproject.toml");
7194
- if (fs7.existsSync(requirementsPath) || fs7.existsSync(pyprojectPath)) {
7508
+ const requirementsPath = path11.join(projectRoot, "requirements.txt");
7509
+ const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
7510
+ if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
7195
7511
  stacks.push("python");
7196
7512
  }
7197
7513
  return stacks;
@@ -7594,7 +7910,7 @@ var SecurityScanner = class {
7594
7910
  }
7595
7911
  async scanFile(filePath) {
7596
7912
  if (!this.config.enabled) return [];
7597
- const content = await fs8.readFile(filePath, "utf-8");
7913
+ const content = await fs15.readFile(filePath, "utf-8");
7598
7914
  return this.scanContent(content, filePath, 1);
7599
7915
  }
7600
7916
  async scanFiles(filePaths) {
@@ -7633,7 +7949,7 @@ async function runSingleCheck(name, projectRoot, config) {
7633
7949
  try {
7634
7950
  switch (name) {
7635
7951
  case "validate": {
7636
- const agentsPath = path5.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7952
+ const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7637
7953
  const result = await validateAgentsMap(agentsPath);
7638
7954
  if (!result.ok) {
7639
7955
  issues.push({ severity: "error", message: result.error.message });
@@ -7688,7 +8004,7 @@ async function runSingleCheck(name, projectRoot, config) {
7688
8004
  break;
7689
8005
  }
7690
8006
  case "docs": {
7691
- const docsDir = path5.join(projectRoot, config.docsDir ?? "docs");
8007
+ const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
7692
8008
  const entropyConfig = config.entropy || {};
7693
8009
  const result = await checkDocCoverage("project", {
7694
8010
  docsDir,
@@ -7935,7 +8251,7 @@ async function runMechanicalChecks(options) {
7935
8251
  };
7936
8252
  if (!skip.includes("validate")) {
7937
8253
  try {
7938
- const agentsPath = path6.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8254
+ const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7939
8255
  const result = await validateAgentsMap(agentsPath);
7940
8256
  if (!result.ok) {
7941
8257
  statuses.validate = "fail";
@@ -7972,7 +8288,7 @@ async function runMechanicalChecks(options) {
7972
8288
  statuses.validate = "fail";
7973
8289
  findings.push({
7974
8290
  tool: "validate",
7975
- file: path6.join(projectRoot, "AGENTS.md"),
8291
+ file: path13.join(projectRoot, "AGENTS.md"),
7976
8292
  message: err instanceof Error ? err.message : String(err),
7977
8293
  severity: "error"
7978
8294
  });
@@ -8036,7 +8352,7 @@ async function runMechanicalChecks(options) {
8036
8352
  (async () => {
8037
8353
  const localFindings = [];
8038
8354
  try {
8039
- const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
8355
+ const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
8040
8356
  const result = await checkDocCoverage("project", { docsDir });
8041
8357
  if (!result.ok) {
8042
8358
  statuses["check-docs"] = "warn";
@@ -8063,7 +8379,7 @@ async function runMechanicalChecks(options) {
8063
8379
  statuses["check-docs"] = "warn";
8064
8380
  localFindings.push({
8065
8381
  tool: "check-docs",
8066
- file: path6.join(projectRoot, "docs"),
8382
+ file: path13.join(projectRoot, "docs"),
8067
8383
  message: err instanceof Error ? err.message : String(err),
8068
8384
  severity: "warning"
8069
8385
  });
@@ -8212,18 +8528,18 @@ function computeContextBudget(diffLines) {
8212
8528
  return diffLines;
8213
8529
  }
8214
8530
  function isWithinProject(absPath, projectRoot) {
8215
- const resolvedRoot = path7.resolve(projectRoot) + path7.sep;
8216
- const resolvedPath = path7.resolve(absPath);
8217
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path7.resolve(projectRoot);
8531
+ const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
8532
+ const resolvedPath = path14.resolve(absPath);
8533
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
8218
8534
  }
8219
8535
  async function readContextFile(projectRoot, filePath, reason) {
8220
- const absPath = path7.isAbsolute(filePath) ? filePath : path7.join(projectRoot, filePath);
8536
+ const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
8221
8537
  if (!isWithinProject(absPath, projectRoot)) return null;
8222
8538
  const result = await readFileContent(absPath);
8223
8539
  if (!result.ok) return null;
8224
8540
  const content = result.value;
8225
8541
  const lines = content.split("\n").length;
8226
- const relPath = path7.isAbsolute(filePath) ? path7.relative(projectRoot, filePath) : filePath;
8542
+ const relPath = path14.isAbsolute(filePath) ? path14.relative(projectRoot, filePath) : filePath;
8227
8543
  return { path: relPath, content, reason, lines };
8228
8544
  }
8229
8545
  function extractImportSources2(content) {
@@ -8238,18 +8554,18 @@ function extractImportSources2(content) {
8238
8554
  }
8239
8555
  async function resolveImportPath2(projectRoot, fromFile, importSource) {
8240
8556
  if (!importSource.startsWith(".")) return null;
8241
- const fromDir = path7.dirname(path7.join(projectRoot, fromFile));
8242
- const basePath = path7.resolve(fromDir, importSource);
8557
+ const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
8558
+ const basePath = path14.resolve(fromDir, importSource);
8243
8559
  if (!isWithinProject(basePath, projectRoot)) return null;
8244
- const relBase = path7.relative(projectRoot, basePath);
8560
+ const relBase = path14.relative(projectRoot, basePath);
8245
8561
  const candidates = [
8246
8562
  relBase + ".ts",
8247
8563
  relBase + ".tsx",
8248
8564
  relBase + ".mts",
8249
- path7.join(relBase, "index.ts")
8565
+ path14.join(relBase, "index.ts")
8250
8566
  ];
8251
8567
  for (const candidate of candidates) {
8252
- const absCandidate = path7.join(projectRoot, candidate);
8568
+ const absCandidate = path14.join(projectRoot, candidate);
8253
8569
  if (await fileExists(absCandidate)) {
8254
8570
  return candidate;
8255
8571
  }
@@ -8257,10 +8573,10 @@ async function resolveImportPath2(projectRoot, fromFile, importSource) {
8257
8573
  return null;
8258
8574
  }
8259
8575
  async function findTestFiles(projectRoot, sourceFile) {
8260
- const baseName = path7.basename(sourceFile, path7.extname(sourceFile));
8576
+ const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
8261
8577
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
8262
8578
  const results = await findFiles(pattern, projectRoot);
8263
- return results.map((f) => path7.relative(projectRoot, f));
8579
+ return results.map((f) => path14.relative(projectRoot, f));
8264
8580
  }
8265
8581
  async function gatherImportContext(projectRoot, changedFiles, budget) {
8266
8582
  const contextFiles = [];
@@ -9052,7 +9368,7 @@ function normalizePath(filePath, projectRoot) {
9052
9368
  let normalized = filePath;
9053
9369
  normalized = normalized.replace(/\\/g, "/");
9054
9370
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
9055
- if (path8.isAbsolute(normalized)) {
9371
+ if (path15.isAbsolute(normalized)) {
9056
9372
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
9057
9373
  if (normalized.startsWith(root)) {
9058
9374
  normalized = normalized.slice(root.length);
@@ -9077,12 +9393,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
9077
9393
  while ((match = importRegex.exec(content)) !== null) {
9078
9394
  const importPath = match[1];
9079
9395
  if (!importPath.startsWith(".")) continue;
9080
- const dir = path8.dirname(current.file);
9081
- let resolved = path8.join(dir, importPath).replace(/\\/g, "/");
9396
+ const dir = path15.dirname(current.file);
9397
+ let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
9082
9398
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
9083
9399
  resolved += ".ts";
9084
9400
  }
9085
- resolved = path8.normalize(resolved).replace(/\\/g, "/");
9401
+ resolved = path15.normalize(resolved).replace(/\\/g, "/");
9086
9402
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
9087
9403
  queue.push({ file: resolved, depth: current.depth + 1 });
9088
9404
  }
@@ -9099,7 +9415,7 @@ async function validateFindings(options) {
9099
9415
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
9100
9416
  continue;
9101
9417
  }
9102
- const absoluteFile = path8.isAbsolute(finding.file) ? finding.file : path8.join(projectRoot, finding.file).replace(/\\/g, "/");
9418
+ const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
9103
9419
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
9104
9420
  continue;
9105
9421
  }
@@ -9603,6 +9919,8 @@ function parseFrontmatter(raw) {
9603
9919
  const versionStr = map.get("version");
9604
9920
  const lastSynced = map.get("last_synced");
9605
9921
  const lastManualEdit = map.get("last_manual_edit");
9922
+ const created = map.get("created");
9923
+ const updated = map.get("updated");
9606
9924
  if (!project || !versionStr || !lastSynced || !lastManualEdit) {
9607
9925
  return Err(
9608
9926
  new Error(
@@ -9614,7 +9932,10 @@ function parseFrontmatter(raw) {
9614
9932
  if (isNaN(version)) {
9615
9933
  return Err(new Error("Frontmatter version must be a number"));
9616
9934
  }
9617
- return Ok({ project, version, lastSynced, lastManualEdit });
9935
+ const fm = { project, version, lastSynced, lastManualEdit };
9936
+ if (created) fm.created = created;
9937
+ if (updated) fm.updated = updated;
9938
+ return Ok(fm);
9618
9939
  }
9619
9940
  function parseMilestones(body) {
9620
9941
  const milestones = [];
@@ -9622,12 +9943,12 @@ function parseMilestones(body) {
9622
9943
  const h2Matches = [];
9623
9944
  let match;
9624
9945
  while ((match = h2Pattern.exec(body)) !== null) {
9625
- h2Matches.push({ heading: match[1], startIndex: match.index });
9946
+ h2Matches.push({ heading: match[1], startIndex: match.index, fullMatch: match[0] });
9626
9947
  }
9627
9948
  for (let i = 0; i < h2Matches.length; i++) {
9628
9949
  const h2 = h2Matches[i];
9629
9950
  const nextStart = i + 1 < h2Matches.length ? h2Matches[i + 1].startIndex : body.length;
9630
- const sectionBody = body.slice(h2.startIndex + h2.heading.length + 4, nextStart);
9951
+ const sectionBody = body.slice(h2.startIndex + h2.fullMatch.length, nextStart);
9631
9952
  const isBacklog = h2.heading === "Backlog";
9632
9953
  const milestoneName = isBacklog ? "Backlog" : h2.heading.replace(/^Milestone:\s*/, "");
9633
9954
  const featuresResult = parseFeatures(sectionBody);
@@ -9642,19 +9963,16 @@ function parseMilestones(body) {
9642
9963
  }
9643
9964
  function parseFeatures(sectionBody) {
9644
9965
  const features = [];
9645
- const h3Pattern = /^### Feature: (.+)$/gm;
9966
+ const h3Pattern = /^### (?:Feature: )?(.+)$/gm;
9646
9967
  const h3Matches = [];
9647
9968
  let match;
9648
9969
  while ((match = h3Pattern.exec(sectionBody)) !== null) {
9649
- h3Matches.push({ name: match[1], startIndex: match.index });
9970
+ h3Matches.push({ name: match[1], startIndex: match.index, fullMatch: match[0] });
9650
9971
  }
9651
9972
  for (let i = 0; i < h3Matches.length; i++) {
9652
9973
  const h3 = h3Matches[i];
9653
9974
  const nextStart = i + 1 < h3Matches.length ? h3Matches[i + 1].startIndex : sectionBody.length;
9654
- const featureBody = sectionBody.slice(
9655
- h3.startIndex + `### Feature: ${h3.name}`.length,
9656
- nextStart
9657
- );
9975
+ const featureBody = sectionBody.slice(h3.startIndex + h3.fullMatch.length, nextStart);
9658
9976
  const featureResult = parseFeatureFields(h3.name, featureBody);
9659
9977
  if (!featureResult.ok) return featureResult;
9660
9978
  features.push(featureResult.value);
@@ -9679,10 +9997,10 @@ function parseFeatureFields(name, body) {
9679
9997
  const status = statusRaw;
9680
9998
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
9681
9999
  const spec = specRaw === EM_DASH ? null : specRaw;
9682
- const plansRaw = fieldMap.get("Plans") ?? EM_DASH;
9683
- const plans = plansRaw === EM_DASH ? [] : plansRaw.split(",").map((p) => p.trim());
9684
- const blockedByRaw = fieldMap.get("Blocked by") ?? EM_DASH;
9685
- const blockedBy = blockedByRaw === EM_DASH ? [] : blockedByRaw.split(",").map((b) => b.trim());
10000
+ const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
10001
+ const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
10002
+ const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
10003
+ const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
9686
10004
  const summary = fieldMap.get("Summary") ?? "";
9687
10005
  return Ok({ name, status, spec, plans, blockedBy, summary });
9688
10006
  }
@@ -9692,11 +10010,17 @@ function serializeRoadmap(roadmap) {
9692
10010
  lines.push("---");
9693
10011
  lines.push(`project: ${roadmap.frontmatter.project}`);
9694
10012
  lines.push(`version: ${roadmap.frontmatter.version}`);
10013
+ if (roadmap.frontmatter.created) {
10014
+ lines.push(`created: ${roadmap.frontmatter.created}`);
10015
+ }
10016
+ if (roadmap.frontmatter.updated) {
10017
+ lines.push(`updated: ${roadmap.frontmatter.updated}`);
10018
+ }
9695
10019
  lines.push(`last_synced: ${roadmap.frontmatter.lastSynced}`);
9696
10020
  lines.push(`last_manual_edit: ${roadmap.frontmatter.lastManualEdit}`);
9697
10021
  lines.push("---");
9698
10022
  lines.push("");
9699
- lines.push("# Project Roadmap");
10023
+ lines.push("# Roadmap");
9700
10024
  for (const milestone of roadmap.milestones) {
9701
10025
  lines.push("");
9702
10026
  lines.push(serializeMilestoneHeading(milestone));
@@ -9709,19 +10033,20 @@ function serializeRoadmap(roadmap) {
9709
10033
  return lines.join("\n");
9710
10034
  }
9711
10035
  function serializeMilestoneHeading(milestone) {
9712
- return milestone.isBacklog ? "## Backlog" : `## Milestone: ${milestone.name}`;
10036
+ return milestone.isBacklog ? "## Backlog" : `## ${milestone.name}`;
9713
10037
  }
9714
10038
  function serializeFeature(feature) {
9715
10039
  const spec = feature.spec ?? EM_DASH2;
9716
10040
  const plans = feature.plans.length > 0 ? feature.plans.join(", ") : EM_DASH2;
9717
10041
  const blockedBy = feature.blockedBy.length > 0 ? feature.blockedBy.join(", ") : EM_DASH2;
9718
10042
  return [
9719
- `### Feature: ${feature.name}`,
10043
+ `### ${feature.name}`,
10044
+ "",
9720
10045
  `- **Status:** ${feature.status}`,
9721
10046
  `- **Spec:** ${spec}`,
9722
- `- **Plans:** ${plans}`,
9723
- `- **Blocked by:** ${blockedBy}`,
9724
- `- **Summary:** ${feature.summary}`
10047
+ `- **Summary:** ${feature.summary}`,
10048
+ `- **Blockers:** ${blockedBy}`,
10049
+ `- **Plan:** ${plans}`
9725
10050
  ];
9726
10051
  }
9727
10052
  function inferStatus(feature, projectPath, allFeatures) {
@@ -9737,10 +10062,10 @@ function inferStatus(feature, projectPath, allFeatures) {
9737
10062
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
9738
10063
  const useRootState = featuresWithPlans.length <= 1;
9739
10064
  if (useRootState) {
9740
- const rootStatePath = path9.join(projectPath, ".harness", "state.json");
9741
- if (fs9.existsSync(rootStatePath)) {
10065
+ const rootStatePath = path16.join(projectPath, ".harness", "state.json");
10066
+ if (fs16.existsSync(rootStatePath)) {
9742
10067
  try {
9743
- const raw = fs9.readFileSync(rootStatePath, "utf-8");
10068
+ const raw = fs16.readFileSync(rootStatePath, "utf-8");
9744
10069
  const state = JSON.parse(raw);
9745
10070
  if (state.progress) {
9746
10071
  for (const status of Object.values(state.progress)) {
@@ -9751,16 +10076,16 @@ function inferStatus(feature, projectPath, allFeatures) {
9751
10076
  }
9752
10077
  }
9753
10078
  }
9754
- const sessionsDir = path9.join(projectPath, ".harness", "sessions");
9755
- if (fs9.existsSync(sessionsDir)) {
10079
+ const sessionsDir = path16.join(projectPath, ".harness", "sessions");
10080
+ if (fs16.existsSync(sessionsDir)) {
9756
10081
  try {
9757
- const sessionDirs = fs9.readdirSync(sessionsDir, { withFileTypes: true });
10082
+ const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
9758
10083
  for (const entry of sessionDirs) {
9759
10084
  if (!entry.isDirectory()) continue;
9760
- const autopilotPath = path9.join(sessionsDir, entry.name, "autopilot-state.json");
9761
- if (!fs9.existsSync(autopilotPath)) continue;
10085
+ const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
10086
+ if (!fs16.existsSync(autopilotPath)) continue;
9762
10087
  try {
9763
- const raw = fs9.readFileSync(autopilotPath, "utf-8");
10088
+ const raw = fs16.readFileSync(autopilotPath, "utf-8");
9764
10089
  const autopilot = JSON.parse(raw);
9765
10090
  if (!autopilot.phases) continue;
9766
10091
  const linkedPhases = autopilot.phases.filter(
@@ -9840,10 +10165,10 @@ var ProjectScanner = class {
9840
10165
  this.rootDir = rootDir;
9841
10166
  }
9842
10167
  async scan() {
9843
- let projectName = path10.basename(this.rootDir);
10168
+ let projectName = path17.basename(this.rootDir);
9844
10169
  try {
9845
- const pkgPath = path10.join(this.rootDir, "package.json");
9846
- const pkgRaw = await fs10.readFile(pkgPath, "utf-8");
10170
+ const pkgPath = path17.join(this.rootDir, "package.json");
10171
+ const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
9847
10172
  const pkg = JSON.parse(pkgRaw);
9848
10173
  if (pkg.name) projectName = pkg.name;
9849
10174
  } catch {
@@ -9956,13 +10281,13 @@ var BlueprintGenerator = class {
9956
10281
  styles: STYLES,
9957
10282
  scripts: SCRIPTS
9958
10283
  });
9959
- await fs11.mkdir(options.outputDir, { recursive: true });
9960
- await fs11.writeFile(path11.join(options.outputDir, "index.html"), html);
10284
+ await fs18.mkdir(options.outputDir, { recursive: true });
10285
+ await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
9961
10286
  }
9962
10287
  };
9963
10288
  function getStatePath() {
9964
10289
  const home = process.env["HOME"] || os.homedir();
9965
- return path12.join(home, ".harness", "update-check.json");
10290
+ return path19.join(home, ".harness", "update-check.json");
9966
10291
  }
9967
10292
  function isUpdateCheckEnabled(configInterval) {
9968
10293
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -9975,7 +10300,7 @@ function shouldRunCheck(state, intervalMs) {
9975
10300
  }
9976
10301
  function readCheckState() {
9977
10302
  try {
9978
- const raw = fs12.readFileSync(getStatePath(), "utf-8");
10303
+ const raw = fs19.readFileSync(getStatePath(), "utf-8");
9979
10304
  const parsed = JSON.parse(raw);
9980
10305
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
9981
10306
  const state = parsed;
@@ -9992,7 +10317,7 @@ function readCheckState() {
9992
10317
  }
9993
10318
  function spawnBackgroundCheck(currentVersion) {
9994
10319
  const statePath = getStatePath();
9995
- const stateDir = path12.dirname(statePath);
10320
+ const stateDir = path19.dirname(statePath);
9996
10321
  const script = `
9997
10322
  const { execSync } = require('child_process');
9998
10323
  const fs = require('fs');
@@ -10182,16 +10507,28 @@ export {
10182
10507
  archiveStream,
10183
10508
  getStreamForBranch,
10184
10509
  migrateToStreams,
10510
+ resolveSessionDir,
10511
+ updateSessionIndex,
10185
10512
  loadState,
10186
10513
  saveState,
10514
+ clearLearningsCache,
10187
10515
  appendLearning,
10516
+ parseDateFromEntry,
10517
+ analyzeLearningPatterns,
10518
+ loadBudgetedLearnings,
10188
10519
  loadRelevantLearnings,
10520
+ archiveLearnings,
10521
+ pruneLearnings,
10522
+ clearFailuresCache,
10189
10523
  appendFailure,
10190
10524
  loadFailures,
10191
10525
  archiveFailures,
10192
10526
  saveHandoff,
10193
10527
  loadHandoff,
10194
10528
  runMechanicalGate,
10529
+ writeSessionSummary,
10530
+ loadSessionSummary,
10531
+ listActiveSessions,
10195
10532
  executeWorkflow,
10196
10533
  runPipeline,
10197
10534
  runMultiTurnPipeline,