@harness-engineering/cli 1.14.0 → 1.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +240 -39
  2. package/dist/agents/skills/claude-code/harness-autopilot/skill.yaml +6 -0
  3. package/dist/agents/skills/claude-code/harness-product-spec/SKILL.md +5 -5
  4. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +240 -39
  5. package/dist/agents/skills/gemini-cli/harness-autopilot/skill.yaml +6 -0
  6. package/dist/agents/skills/gemini-cli/harness-product-spec/SKILL.md +5 -5
  7. package/dist/agents/skills/package.json +1 -0
  8. package/dist/agents/skills/vitest.config.mts +5 -0
  9. package/dist/{agents-md-YTYQDA3P.js → agents-md-ZGNIDWAF.js} +1 -1
  10. package/dist/{architecture-JQZYM4US.js → architecture-ZLIH5533.js} +2 -2
  11. package/dist/bin/harness-mcp.js +11 -11
  12. package/dist/bin/harness.js +16 -14
  13. package/dist/{check-phase-gate-L3RADYWO.js → check-phase-gate-ZOXVBDCN.js} +3 -3
  14. package/dist/{chunk-7IP4JIFL.js → chunk-2BKLWLY6.js} +4 -4
  15. package/dist/{chunk-OSXBPAMK.js → chunk-3ZZKVN62.js} +1 -1
  16. package/dist/{chunk-O5OJVPL6.js → chunk-B2HKP423.js} +1 -1
  17. package/dist/{chunk-YPYGXRDR.js → chunk-EDXIVMAP.js} +4 -4
  18. package/dist/{chunk-XKECDXJS.js → chunk-J4RAX7YB.js} +738 -186
  19. package/dist/{chunk-6KTUUFRN.js → chunk-LGYBN7Y6.js} +1 -1
  20. package/dist/{chunk-3C2MLBPJ.js → chunk-N25INEIX.js} +1 -1
  21. package/dist/{chunk-NLVUVUGD.js → chunk-ND2ENWDM.js} +1 -1
  22. package/dist/{chunk-YZD2MRNQ.js → chunk-NNHDDXYT.js} +379 -119
  23. package/dist/{chunk-S2FXOWOR.js → chunk-OFXQSFOW.js} +2 -2
  24. package/dist/{chunk-OXLLOSSR.js → chunk-VEPAJXBW.js} +2 -2
  25. package/dist/{chunk-TPOTOBR7.js → chunk-YLXFKVJE.js} +3 -3
  26. package/dist/{chunk-ABQHQ6I5.js → chunk-Z2OOPXJO.js} +1238 -133
  27. package/dist/{ci-workflow-EQZFVX3P.js → ci-workflow-765LSHRD.js} +1 -1
  28. package/dist/{dist-HWXF2C3R.js → dist-ALQDD67R.js} +47 -1
  29. package/dist/{docs-7ECGYMAV.js → docs-NRMQCOJ6.js} +3 -3
  30. package/dist/{engine-EG4EH4IX.js → engine-3RB7MXPP.js} +1 -1
  31. package/dist/{entropy-5USWKLVS.js → entropy-6AGX2ZUN.js} +2 -2
  32. package/dist/{feedback-UTBXZZHF.js → feedback-MY4QZIFD.js} +1 -1
  33. package/dist/{generate-agent-definitions-3PM5EU7V.js → generate-agent-definitions-ZAE726AU.js} +1 -1
  34. package/dist/index.d.ts +8 -8
  35. package/dist/index.js +13 -13
  36. package/dist/{loader-ZPALXIVR.js → loader-UUTVMQCC.js} +1 -1
  37. package/dist/{mcp-362EZHF4.js → mcp-VU5FMO52.js} +11 -11
  38. package/dist/{performance-OQAFMJUD.js → performance-2D7G6NMJ.js} +2 -2
  39. package/dist/{review-pipeline-C4GCFVGP.js → review-pipeline-RAQ55ISU.js} +1 -1
  40. package/dist/{runtime-7YLVK453.js → runtime-BCK5RRZQ.js} +1 -1
  41. package/dist/{security-PZOX7AQS.js → security-2RPQEN62.js} +1 -1
  42. package/dist/{validate-FD3Z6VJD.js → validate-KBYQAEWE.js} +2 -2
  43. package/dist/{validate-cross-check-WNJM6H2D.js → validate-cross-check-OABMREW4.js} +1 -1
  44. package/package.json +5 -3
@@ -135,17 +135,17 @@ function resolveFileToLayer(file, layers) {
135
135
  }
136
136
  var accessAsync = promisify(access);
137
137
  var readFileAsync = promisify(readFile);
138
- async function fileExists(path22) {
138
+ async function fileExists(path23) {
139
139
  try {
140
- await accessAsync(path22, constants.F_OK);
140
+ await accessAsync(path23, constants.F_OK);
141
141
  return true;
142
142
  } catch {
143
143
  return false;
144
144
  }
145
145
  }
146
- async function readFileContent(path22) {
146
+ async function readFileContent(path23) {
147
147
  try {
148
- const content = await readFileAsync(path22, "utf-8");
148
+ const content = await readFileAsync(path23, "utf-8");
149
149
  return Ok(content);
150
150
  } catch (error) {
151
151
  return Err(error);
@@ -1832,6 +1832,7 @@ import * as fs6 from "fs";
1832
1832
  import * as path3 from "path";
1833
1833
  import * as fs9 from "fs";
1834
1834
  import * as path6 from "path";
1835
+ import * as crypto from "crypto";
1835
1836
  import * as fs10 from "fs";
1836
1837
  import * as path7 from "path";
1837
1838
  import * as fs11 from "fs";
@@ -1845,26 +1846,31 @@ import * as fs14 from "fs";
1845
1846
  import * as path11 from "path";
1846
1847
  import * as fs15 from "fs";
1847
1848
  import * as path12 from "path";
1848
- import * as fs17 from "fs/promises";
1849
- import { z as z5 } from "zod";
1850
1849
  import * as fs16 from "fs";
1851
1850
  import * as path13 from "path";
1851
+ import { z as z5 } from "zod";
1852
+ import * as fs18 from "fs/promises";
1853
+ import { minimatch as minimatch4 } from "minimatch";
1854
+ import { z as z6 } from "zod";
1855
+ import * as fs17 from "fs";
1852
1856
  import * as path14 from "path";
1853
1857
  import * as path15 from "path";
1854
1858
  import * as path16 from "path";
1855
1859
  import * as path17 from "path";
1856
- import * as fs18 from "fs";
1857
1860
  import * as path18 from "path";
1858
- import { z as z6 } from "zod";
1859
- import * as fs19 from "fs/promises";
1861
+ import * as fs19 from "fs";
1860
1862
  import * as path19 from "path";
1863
+ import { z as z7 } from "zod";
1861
1864
  import * as fs20 from "fs/promises";
1862
1865
  import * as path20 from "path";
1863
- import * as ejs from "ejs";
1864
- import * as fs21 from "fs";
1866
+ import * as fs21 from "fs/promises";
1865
1867
  import * as path21 from "path";
1868
+ import * as ejs from "ejs";
1869
+ import * as fs22 from "fs";
1870
+ import * as path22 from "path";
1866
1871
  import * as os from "os";
1867
1872
  import { spawn } from "child_process";
1873
+ import Parser from "web-tree-sitter";
1868
1874
  async function validateFileStructure(projectPath, conventions) {
1869
1875
  const missing = [];
1870
1876
  const unexpected = [];
@@ -1900,15 +1906,15 @@ function validateConfig(data, schema) {
1900
1906
  let message = "Configuration validation failed";
1901
1907
  const suggestions = [];
1902
1908
  if (firstError) {
1903
- const path22 = firstError.path.join(".");
1904
- const pathDisplay = path22 ? ` at "${path22}"` : "";
1909
+ const path23 = firstError.path.join(".");
1910
+ const pathDisplay = path23 ? ` at "${path23}"` : "";
1905
1911
  if (firstError.code === "invalid_type") {
1906
1912
  const received = firstError.received;
1907
1913
  const expected = firstError.expected;
1908
1914
  if (received === "undefined") {
1909
1915
  code = "MISSING_FIELD";
1910
1916
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
1911
- suggestions.push(`Field "${path22}" is required and must be of type "${expected}"`);
1917
+ suggestions.push(`Field "${path23}" is required and must be of type "${expected}"`);
1912
1918
  } else {
1913
1919
  code = "INVALID_TYPE";
1914
1920
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -2117,27 +2123,27 @@ function extractSections(content) {
2117
2123
  }
2118
2124
  return sections.map((section) => buildAgentMapSection(section, lines));
2119
2125
  }
2120
- function isExternalLink(path22) {
2121
- return path22.startsWith("http://") || path22.startsWith("https://") || path22.startsWith("#") || path22.startsWith("mailto:");
2126
+ function isExternalLink(path23) {
2127
+ return path23.startsWith("http://") || path23.startsWith("https://") || path23.startsWith("#") || path23.startsWith("mailto:");
2122
2128
  }
2123
2129
  function resolveLinkPath(linkPath, baseDir) {
2124
2130
  return linkPath.startsWith(".") ? join4(baseDir, linkPath) : linkPath;
2125
2131
  }
2126
- async function validateAgentsMap(path22 = "./AGENTS.md") {
2127
- const contentResult = await readFileContent(path22);
2132
+ async function validateAgentsMap(path23 = "./AGENTS.md") {
2133
+ const contentResult = await readFileContent(path23);
2128
2134
  if (!contentResult.ok) {
2129
2135
  return Err(
2130
2136
  createError(
2131
2137
  "PARSE_ERROR",
2132
2138
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
2133
- { path: path22 },
2139
+ { path: path23 },
2134
2140
  ["Ensure the file exists", "Check file permissions"]
2135
2141
  )
2136
2142
  );
2137
2143
  }
2138
2144
  const content = contentResult.value;
2139
2145
  const sections = extractSections(content);
2140
- const baseDir = dirname4(path22);
2146
+ const baseDir = dirname4(path23);
2141
2147
  const sectionTitles = sections.map((s) => s.title);
2142
2148
  const missingSections = REQUIRED_SECTIONS.filter(
2143
2149
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -2271,8 +2277,8 @@ async function checkDocCoverage(domain, options = {}) {
2271
2277
  );
2272
2278
  }
2273
2279
  }
2274
- function suggestFix(path22, existingFiles) {
2275
- const targetName = basename2(path22).toLowerCase();
2280
+ function suggestFix(path23, existingFiles) {
2281
+ const targetName = basename2(path23).toLowerCase();
2276
2282
  const similar = existingFiles.find((file) => {
2277
2283
  const fileName = basename2(file).toLowerCase();
2278
2284
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -2280,7 +2286,7 @@ function suggestFix(path22, existingFiles) {
2280
2286
  if (similar) {
2281
2287
  return `Did you mean "${similar}"?`;
2282
2288
  }
2283
- return `Create the file "${path22}" or remove the link`;
2289
+ return `Create the file "${path23}" or remove the link`;
2284
2290
  }
2285
2291
  async function validateKnowledgeMap(rootDir = process.cwd()) {
2286
2292
  const agentsPath = join22(rootDir, "AGENTS.md");
@@ -2623,8 +2629,8 @@ function createBoundaryValidator(schema, name) {
2623
2629
  return Ok(result.data);
2624
2630
  }
2625
2631
  const suggestions = result.error.issues.map((issue) => {
2626
- const path22 = issue.path.join(".");
2627
- return path22 ? `${path22}: ${issue.message}` : issue.message;
2632
+ const path23 = issue.path.join(".");
2633
+ return path23 ? `${path23}: ${issue.message}` : issue.message;
2628
2634
  });
2629
2635
  return Err(
2630
2636
  createError(
@@ -3232,11 +3238,11 @@ function processExportListSpecifiers(exportDecl, exports) {
3232
3238
  var TypeScriptParser = class {
3233
3239
  name = "typescript";
3234
3240
  extensions = [".ts", ".tsx", ".mts", ".cts"];
3235
- async parseFile(path22) {
3236
- const contentResult = await readFileContent(path22);
3241
+ async parseFile(path23) {
3242
+ const contentResult = await readFileContent(path23);
3237
3243
  if (!contentResult.ok) {
3238
3244
  return Err(
3239
- createParseError("NOT_FOUND", `File not found: ${path22}`, { path: path22 }, [
3245
+ createParseError("NOT_FOUND", `File not found: ${path23}`, { path: path23 }, [
3240
3246
  "Check that the file exists",
3241
3247
  "Verify the path is correct"
3242
3248
  ])
@@ -3246,7 +3252,7 @@ var TypeScriptParser = class {
3246
3252
  const ast = parse(contentResult.value, {
3247
3253
  loc: true,
3248
3254
  range: true,
3249
- jsx: path22.endsWith(".tsx"),
3255
+ jsx: path23.endsWith(".tsx"),
3250
3256
  errorOnUnknownASTType: false
3251
3257
  });
3252
3258
  return Ok({
@@ -3257,7 +3263,7 @@ var TypeScriptParser = class {
3257
3263
  } catch (e) {
3258
3264
  const error = e;
3259
3265
  return Err(
3260
- createParseError("SYNTAX_ERROR", `Failed to parse ${path22}: ${error.message}`, { path: path22 }, [
3266
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path23}: ${error.message}`, { path: path23 }, [
3261
3267
  "Check for syntax errors in the file",
3262
3268
  "Ensure valid TypeScript syntax"
3263
3269
  ])
@@ -3438,22 +3444,22 @@ function extractInlineRefs(content) {
3438
3444
  }
3439
3445
  return refs;
3440
3446
  }
3441
- async function parseDocumentationFile(path22) {
3442
- const contentResult = await readFileContent(path22);
3447
+ async function parseDocumentationFile(path23) {
3448
+ const contentResult = await readFileContent(path23);
3443
3449
  if (!contentResult.ok) {
3444
3450
  return Err(
3445
3451
  createEntropyError(
3446
3452
  "PARSE_ERROR",
3447
- `Failed to read documentation file: ${path22}`,
3448
- { file: path22 },
3453
+ `Failed to read documentation file: ${path23}`,
3454
+ { file: path23 },
3449
3455
  ["Check that the file exists"]
3450
3456
  )
3451
3457
  );
3452
3458
  }
3453
3459
  const content = contentResult.value;
3454
- const type = path22.endsWith(".md") ? "markdown" : "text";
3460
+ const type = path23.endsWith(".md") ? "markdown" : "text";
3455
3461
  return Ok({
3456
- path: path22,
3462
+ path: path23,
3457
3463
  type,
3458
3464
  content,
3459
3465
  codeBlocks: extractCodeBlocks(content),
@@ -6490,6 +6496,8 @@ var SESSION_INDEX_FILE = "index.md";
6490
6496
  var SUMMARY_FILE = "summary.md";
6491
6497
  var SESSION_STATE_FILE = "session-state.json";
6492
6498
  var ARCHIVE_DIR = "archive";
6499
+ var CONTENT_HASHES_FILE = "content-hashes.json";
6500
+ var EVENTS_FILE = "events.jsonl";
6493
6501
  var STREAMS_DIR = "streams";
6494
6502
  var STREAM_NAME_REGEX = /^[a-z0-9][a-z0-9._-]*$/;
6495
6503
  function streamsDir(projectPath) {
@@ -6818,6 +6826,84 @@ async function saveState(projectPath, state, stream, session) {
6818
6826
  );
6819
6827
  }
6820
6828
  }
6829
+ function parseFrontmatter(line) {
6830
+ const match = line.match(/^<!--\s+hash:([a-f0-9]+)(?:\s+tags:([^\s]+))?\s+-->/);
6831
+ if (!match) return null;
6832
+ const hash = match[1];
6833
+ const tags = match[2] ? match[2].split(",").filter(Boolean) : [];
6834
+ return { hash, tags };
6835
+ }
6836
+ function computeEntryHash(text) {
6837
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 8);
6838
+ }
6839
+ function normalizeLearningContent(text) {
6840
+ let normalized = text;
6841
+ normalized = normalized.replace(/\d{4}-\d{2}-\d{2}/g, "");
6842
+ normalized = normalized.replace(/\[skill:[^\]]*\]/g, "");
6843
+ normalized = normalized.replace(/\[outcome:[^\]]*\]/g, "");
6844
+ normalized = normalized.replace(/^[\s]*[-*]\s+/gm, "");
6845
+ normalized = normalized.replace(/\*\*/g, "");
6846
+ normalized = normalized.replace(/:\s*/g, " ");
6847
+ normalized = normalized.toLowerCase();
6848
+ normalized = normalized.replace(/\s+/g, " ").trim();
6849
+ return normalized;
6850
+ }
6851
+ function computeContentHash(text) {
6852
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 16);
6853
+ }
6854
+ function loadContentHashes(stateDir) {
6855
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
6856
+ if (!fs9.existsSync(hashesPath)) return {};
6857
+ try {
6858
+ const raw = fs9.readFileSync(hashesPath, "utf-8");
6859
+ const parsed = JSON.parse(raw);
6860
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return {};
6861
+ return parsed;
6862
+ } catch {
6863
+ return {};
6864
+ }
6865
+ }
6866
+ function saveContentHashes(stateDir, index) {
6867
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
6868
+ fs9.writeFileSync(hashesPath, JSON.stringify(index, null, 2) + "\n");
6869
+ }
6870
+ function rebuildContentHashes(stateDir) {
6871
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6872
+ if (!fs9.existsSync(learningsPath)) return {};
6873
+ const content = fs9.readFileSync(learningsPath, "utf-8");
6874
+ const lines = content.split("\n");
6875
+ const index = {};
6876
+ for (let i = 0; i < lines.length; i++) {
6877
+ const line = lines[i];
6878
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
6879
+ if (isDatedBullet) {
6880
+ const learningMatch = line.match(/:\*\*\s*(.+)$/);
6881
+ if (learningMatch?.[1]) {
6882
+ const normalized = normalizeLearningContent(learningMatch[1]);
6883
+ const hash = computeContentHash(normalized);
6884
+ const dateMatch = line.match(/(\d{4}-\d{2}-\d{2})/);
6885
+ index[hash] = { date: dateMatch?.[1] ?? "", line: i + 1 };
6886
+ }
6887
+ }
6888
+ }
6889
+ saveContentHashes(stateDir, index);
6890
+ return index;
6891
+ }
6892
+ function extractIndexEntry(entry) {
6893
+ const lines = entry.split("\n");
6894
+ const summary = lines[0] ?? entry;
6895
+ const tags = [];
6896
+ const skillMatch = entry.match(/\[skill:([^\]]+)\]/);
6897
+ if (skillMatch?.[1]) tags.push(skillMatch[1]);
6898
+ const outcomeMatch = entry.match(/\[outcome:([^\]]+)\]/);
6899
+ if (outcomeMatch?.[1]) tags.push(outcomeMatch[1]);
6900
+ return {
6901
+ hash: computeEntryHash(entry),
6902
+ tags,
6903
+ summary,
6904
+ fullText: entry
6905
+ };
6906
+ }
6821
6907
  var learningsCacheMap = /* @__PURE__ */ new Map();
6822
6908
  function clearLearningsCache() {
6823
6909
  learningsCacheMap.clear();
@@ -6829,27 +6915,55 @@ async function appendLearning(projectPath, learning, skillName, outcome, stream,
6829
6915
  const stateDir = dirResult.value;
6830
6916
  const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
6831
6917
  fs9.mkdirSync(stateDir, { recursive: true });
6918
+ const normalizedContent = normalizeLearningContent(learning);
6919
+ const contentHash = computeContentHash(normalizedContent);
6920
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
6921
+ let contentHashes;
6922
+ if (fs9.existsSync(hashesPath)) {
6923
+ contentHashes = loadContentHashes(stateDir);
6924
+ if (Object.keys(contentHashes).length === 0 && fs9.existsSync(learningsPath)) {
6925
+ contentHashes = rebuildContentHashes(stateDir);
6926
+ }
6927
+ } else if (fs9.existsSync(learningsPath)) {
6928
+ contentHashes = rebuildContentHashes(stateDir);
6929
+ } else {
6930
+ contentHashes = {};
6931
+ }
6932
+ if (contentHashes[contentHash]) {
6933
+ return Ok(void 0);
6934
+ }
6832
6935
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6833
- let entry;
6936
+ const fmTags = [];
6937
+ if (skillName) fmTags.push(skillName);
6938
+ if (outcome) fmTags.push(outcome);
6939
+ let bulletLine;
6834
6940
  if (skillName && outcome) {
6835
- entry = `
6836
- - **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}
6837
- `;
6941
+ bulletLine = `- **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}`;
6838
6942
  } else if (skillName) {
6839
- entry = `
6840
- - **${timestamp} [skill:${skillName}]:** ${learning}
6841
- `;
6943
+ bulletLine = `- **${timestamp} [skill:${skillName}]:** ${learning}`;
6842
6944
  } else {
6843
- entry = `
6844
- - **${timestamp}:** ${learning}
6845
- `;
6945
+ bulletLine = `- **${timestamp}:** ${learning}`;
6846
6946
  }
6947
+ const hash = crypto.createHash("sha256").update(bulletLine).digest("hex").slice(0, 8);
6948
+ const tagsStr = fmTags.length > 0 ? ` tags:${fmTags.join(",")}` : "";
6949
+ const frontmatter = `<!-- hash:${hash}${tagsStr} -->`;
6950
+ const entry = `
6951
+ ${frontmatter}
6952
+ ${bulletLine}
6953
+ `;
6954
+ let existingLineCount;
6847
6955
  if (!fs9.existsSync(learningsPath)) {
6848
6956
  fs9.writeFileSync(learningsPath, `# Learnings
6849
6957
  ${entry}`);
6958
+ existingLineCount = 1;
6850
6959
  } else {
6960
+ const existingContent = fs9.readFileSync(learningsPath, "utf-8");
6961
+ existingLineCount = existingContent.split("\n").length;
6851
6962
  fs9.appendFileSync(learningsPath, entry);
6852
6963
  }
6964
+ const bulletLine_lineNum = existingLineCount + 2;
6965
+ contentHashes[contentHash] = { date: timestamp ?? "", line: bulletLine_lineNum };
6966
+ saveContentHashes(stateDir, contentHashes);
6853
6967
  learningsCacheMap.delete(learningsPath);
6854
6968
  return Ok(void 0);
6855
6969
  } catch (error) {
@@ -6897,7 +7011,30 @@ function analyzeLearningPatterns(entries) {
6897
7011
  return patterns.sort((a, b) => b.count - a.count);
6898
7012
  }
6899
7013
  async function loadBudgetedLearnings(projectPath, options) {
6900
- const { intent, tokenBudget = 1e3, skill, session, stream } = options;
7014
+ const { intent, tokenBudget = 1e3, skill, session, stream, depth = "summary" } = options;
7015
+ if (depth === "index") {
7016
+ const indexEntries = [];
7017
+ if (session) {
7018
+ const sessionResult = await loadIndexEntries(projectPath, skill, stream, session);
7019
+ if (sessionResult.ok) indexEntries.push(...sessionResult.value);
7020
+ }
7021
+ const globalResult2 = await loadIndexEntries(projectPath, skill, stream);
7022
+ if (globalResult2.ok) {
7023
+ const sessionHashes = new Set(indexEntries.map((e) => e.hash));
7024
+ const uniqueGlobal = globalResult2.value.filter((e) => !sessionHashes.has(e.hash));
7025
+ indexEntries.push(...uniqueGlobal);
7026
+ }
7027
+ const budgeted2 = [];
7028
+ let totalTokens2 = 0;
7029
+ for (const entry of indexEntries) {
7030
+ const separator = budgeted2.length > 0 ? "\n" : "";
7031
+ const entryCost = estimateTokens(entry.summary + separator);
7032
+ if (totalTokens2 + entryCost > tokenBudget) break;
7033
+ budgeted2.push(entry.summary);
7034
+ totalTokens2 += entryCost;
7035
+ }
7036
+ return Ok(budgeted2);
7037
+ }
6901
7038
  const sortByRecencyAndRelevance = (entries) => {
6902
7039
  return [...entries].sort((a, b) => {
6903
7040
  const dateA = parseDateFromEntry(a) ?? "0000-00-00";
@@ -6916,7 +7053,9 @@ async function loadBudgetedLearnings(projectPath, options) {
6916
7053
  }
6917
7054
  const globalResult = await loadRelevantLearnings(projectPath, skill, stream);
6918
7055
  if (globalResult.ok) {
6919
- allEntries.push(...sortByRecencyAndRelevance(globalResult.value));
7056
+ const sessionSet = new Set(allEntries.map((e) => e.trim()));
7057
+ const uniqueGlobal = globalResult.value.filter((e) => !sessionSet.has(e.trim()));
7058
+ allEntries.push(...sortByRecencyAndRelevance(uniqueGlobal));
6920
7059
  }
6921
7060
  const budgeted = [];
6922
7061
  let totalTokens = 0;
@@ -6929,6 +7068,68 @@ async function loadBudgetedLearnings(projectPath, options) {
6929
7068
  }
6930
7069
  return Ok(budgeted);
6931
7070
  }
7071
+ async function loadIndexEntries(projectPath, skillName, stream, session) {
7072
+ try {
7073
+ const dirResult = await getStateDir(projectPath, stream, session);
7074
+ if (!dirResult.ok) return dirResult;
7075
+ const stateDir = dirResult.value;
7076
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
7077
+ if (!fs9.existsSync(learningsPath)) {
7078
+ return Ok([]);
7079
+ }
7080
+ const content = fs9.readFileSync(learningsPath, "utf-8");
7081
+ const lines = content.split("\n");
7082
+ const indexEntries = [];
7083
+ let pendingFrontmatter = null;
7084
+ let currentBlock = [];
7085
+ for (const line of lines) {
7086
+ if (line.startsWith("# ")) continue;
7087
+ const fm = parseFrontmatter(line);
7088
+ if (fm) {
7089
+ pendingFrontmatter = fm;
7090
+ continue;
7091
+ }
7092
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
7093
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
7094
+ if (isDatedBullet || isHeading) {
7095
+ if (pendingFrontmatter) {
7096
+ indexEntries.push({
7097
+ hash: pendingFrontmatter.hash,
7098
+ tags: pendingFrontmatter.tags,
7099
+ summary: line,
7100
+ fullText: ""
7101
+ // Placeholder — full text not loaded in index mode
7102
+ });
7103
+ pendingFrontmatter = null;
7104
+ } else {
7105
+ const idx = extractIndexEntry(line);
7106
+ indexEntries.push({
7107
+ hash: idx.hash,
7108
+ tags: idx.tags,
7109
+ summary: line,
7110
+ fullText: ""
7111
+ });
7112
+ }
7113
+ currentBlock = [line];
7114
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
7115
+ currentBlock.push(line);
7116
+ }
7117
+ }
7118
+ if (skillName) {
7119
+ const filtered = indexEntries.filter(
7120
+ (e) => e.tags.includes(skillName) || e.summary.includes(`[skill:${skillName}]`)
7121
+ );
7122
+ return Ok(filtered);
7123
+ }
7124
+ return Ok(indexEntries);
7125
+ } catch (error) {
7126
+ return Err(
7127
+ new Error(
7128
+ `Failed to load index entries: ${error instanceof Error ? error.message : String(error)}`
7129
+ )
7130
+ );
7131
+ }
7132
+ }
6932
7133
  async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6933
7134
  try {
6934
7135
  const dirResult = await getStateDir(projectPath, stream, session);
@@ -6951,6 +7152,7 @@ async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6951
7152
  let currentBlock = [];
6952
7153
  for (const line of lines) {
6953
7154
  if (line.startsWith("# ")) continue;
7155
+ if (/^<!--\s+hash:[a-f0-9]+/.test(line)) continue;
6954
7156
  const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
6955
7157
  const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
6956
7158
  if (isDatedBullet || isHeading) {
@@ -7060,6 +7262,68 @@ async function pruneLearnings(projectPath, stream) {
7060
7262
  );
7061
7263
  }
7062
7264
  }
7265
+ var PROMOTABLE_OUTCOMES = ["gotcha", "decision", "observation"];
7266
+ function isGeneralizable(entry) {
7267
+ for (const outcome of PROMOTABLE_OUTCOMES) {
7268
+ if (entry.includes(`[outcome:${outcome}]`)) return true;
7269
+ }
7270
+ return false;
7271
+ }
7272
+ async function promoteSessionLearnings(projectPath, sessionSlug, stream) {
7273
+ try {
7274
+ const sessionResult = await loadRelevantLearnings(projectPath, void 0, stream, sessionSlug);
7275
+ if (!sessionResult.ok) return sessionResult;
7276
+ const sessionEntries = sessionResult.value;
7277
+ if (sessionEntries.length === 0) {
7278
+ return Ok({ promoted: 0, skipped: 0 });
7279
+ }
7280
+ const toPromote = [];
7281
+ let skipped = 0;
7282
+ for (const entry of sessionEntries) {
7283
+ if (isGeneralizable(entry)) {
7284
+ toPromote.push(entry);
7285
+ } else {
7286
+ skipped++;
7287
+ }
7288
+ }
7289
+ if (toPromote.length === 0) {
7290
+ return Ok({ promoted: 0, skipped });
7291
+ }
7292
+ const dirResult = await getStateDir(projectPath, stream);
7293
+ if (!dirResult.ok) return dirResult;
7294
+ const stateDir = dirResult.value;
7295
+ const globalPath = path6.join(stateDir, LEARNINGS_FILE);
7296
+ const existingGlobal = fs9.existsSync(globalPath) ? fs9.readFileSync(globalPath, "utf-8") : "";
7297
+ const newEntries = toPromote.filter((entry) => !existingGlobal.includes(entry.trim()));
7298
+ if (newEntries.length === 0) {
7299
+ return Ok({ promoted: 0, skipped: skipped + toPromote.length });
7300
+ }
7301
+ const promotedContent = newEntries.join("\n\n") + "\n";
7302
+ if (!existingGlobal) {
7303
+ fs9.writeFileSync(globalPath, `# Learnings
7304
+
7305
+ ${promotedContent}`);
7306
+ } else {
7307
+ fs9.appendFileSync(globalPath, "\n\n" + promotedContent);
7308
+ }
7309
+ learningsCacheMap.delete(globalPath);
7310
+ return Ok({
7311
+ promoted: newEntries.length,
7312
+ skipped: skipped + (toPromote.length - newEntries.length)
7313
+ });
7314
+ } catch (error) {
7315
+ return Err(
7316
+ new Error(
7317
+ `Failed to promote session learnings: ${error instanceof Error ? error.message : String(error)}`
7318
+ )
7319
+ );
7320
+ }
7321
+ }
7322
+ async function countLearningEntries(projectPath, stream) {
7323
+ const loadResult = await loadRelevantLearnings(projectPath, void 0, stream);
7324
+ if (!loadResult.ok) return 0;
7325
+ return loadResult.value.length;
7326
+ }
7063
7327
  var failuresCacheMap = /* @__PURE__ */ new Map();
7064
7328
  function clearFailuresCache() {
7065
7329
  failuresCacheMap.clear();
@@ -7494,6 +7758,146 @@ async function archiveSession(projectPath, sessionSlug) {
7494
7758
  );
7495
7759
  }
7496
7760
  }
7761
+ var SkillEventSchema = z5.object({
7762
+ timestamp: z5.string(),
7763
+ skill: z5.string(),
7764
+ session: z5.string().optional(),
7765
+ type: z5.enum(["phase_transition", "decision", "gate_result", "handoff", "error", "checkpoint"]),
7766
+ summary: z5.string(),
7767
+ data: z5.record(z5.unknown()).optional(),
7768
+ refs: z5.array(z5.string()).optional(),
7769
+ contentHash: z5.string().optional()
7770
+ });
7771
+ function computeEventHash(event, session) {
7772
+ const identity = `${event.skill}|${event.type}|${event.summary}|${session ?? ""}`;
7773
+ return computeContentHash(identity);
7774
+ }
7775
+ var knownHashesCache = /* @__PURE__ */ new Map();
7776
+ function loadKnownHashes(eventsPath) {
7777
+ const cached = knownHashesCache.get(eventsPath);
7778
+ if (cached) return cached;
7779
+ const hashes = /* @__PURE__ */ new Set();
7780
+ if (fs16.existsSync(eventsPath)) {
7781
+ const content = fs16.readFileSync(eventsPath, "utf-8");
7782
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
7783
+ for (const line of lines) {
7784
+ try {
7785
+ const existing = JSON.parse(line);
7786
+ if (existing.contentHash) {
7787
+ hashes.add(existing.contentHash);
7788
+ }
7789
+ } catch {
7790
+ }
7791
+ }
7792
+ }
7793
+ knownHashesCache.set(eventsPath, hashes);
7794
+ return hashes;
7795
+ }
7796
+ function clearEventHashCache() {
7797
+ knownHashesCache.clear();
7798
+ }
7799
+ async function emitEvent(projectPath, event, options) {
7800
+ try {
7801
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
7802
+ if (!dirResult.ok) return dirResult;
7803
+ const stateDir = dirResult.value;
7804
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
7805
+ fs16.mkdirSync(stateDir, { recursive: true });
7806
+ const contentHash = computeEventHash(event, options?.session);
7807
+ const knownHashes = loadKnownHashes(eventsPath);
7808
+ if (knownHashes.has(contentHash)) {
7809
+ return Ok({ written: false, reason: "duplicate" });
7810
+ }
7811
+ const fullEvent = {
7812
+ ...event,
7813
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7814
+ contentHash
7815
+ };
7816
+ if (options?.session) {
7817
+ fullEvent.session = options.session;
7818
+ }
7819
+ fs16.appendFileSync(eventsPath, JSON.stringify(fullEvent) + "\n");
7820
+ knownHashes.add(contentHash);
7821
+ return Ok({ written: true });
7822
+ } catch (error) {
7823
+ return Err(
7824
+ new Error(`Failed to emit event: ${error instanceof Error ? error.message : String(error)}`)
7825
+ );
7826
+ }
7827
+ }
7828
+ async function loadEvents(projectPath, options) {
7829
+ try {
7830
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
7831
+ if (!dirResult.ok) return dirResult;
7832
+ const stateDir = dirResult.value;
7833
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
7834
+ if (!fs16.existsSync(eventsPath)) {
7835
+ return Ok([]);
7836
+ }
7837
+ const content = fs16.readFileSync(eventsPath, "utf-8");
7838
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
7839
+ const events = [];
7840
+ for (const line of lines) {
7841
+ try {
7842
+ const parsed = JSON.parse(line);
7843
+ const result = SkillEventSchema.safeParse(parsed);
7844
+ if (result.success) {
7845
+ events.push(result.data);
7846
+ }
7847
+ } catch {
7848
+ }
7849
+ }
7850
+ return Ok(events);
7851
+ } catch (error) {
7852
+ return Err(
7853
+ new Error(`Failed to load events: ${error instanceof Error ? error.message : String(error)}`)
7854
+ );
7855
+ }
7856
+ }
7857
+ function formatPhaseTransition(event) {
7858
+ const data = event.data;
7859
+ const suffix = data?.taskCount ? ` (${data.taskCount} tasks)` : "";
7860
+ return `phase: ${data?.from ?? "?"} -> ${data?.to ?? "?"}${suffix}`;
7861
+ }
7862
+ function formatGateResult(event) {
7863
+ const data = event.data;
7864
+ const status = data?.passed ? "passed" : "failed";
7865
+ const checks = data?.checks?.map((c) => `${c.name} ${c.passed ? "Y" : "N"}`).join(", ");
7866
+ return checks ? `gate: ${status} (${checks})` : `gate: ${status}`;
7867
+ }
7868
+ function formatHandoffDetail(event) {
7869
+ const data = event.data;
7870
+ const direction = data?.toSkill ? ` -> ${data.toSkill}` : "";
7871
+ return `handoff: ${event.summary}${direction}`;
7872
+ }
7873
+ var EVENT_FORMATTERS = {
7874
+ phase_transition: formatPhaseTransition,
7875
+ gate_result: formatGateResult,
7876
+ decision: (event) => `decision: ${event.summary}`,
7877
+ handoff: formatHandoffDetail,
7878
+ error: (event) => `error: ${event.summary}`,
7879
+ checkpoint: (event) => `checkpoint: ${event.summary}`
7880
+ };
7881
+ function formatEventTimeline(events, limit = 20) {
7882
+ if (events.length === 0) return "";
7883
+ const recent = events.slice(-limit);
7884
+ return recent.map((event) => {
7885
+ const time = formatTime(event.timestamp);
7886
+ const formatter = EVENT_FORMATTERS[event.type];
7887
+ const detail = formatter ? formatter(event) : event.summary;
7888
+ return `- ${time} [${event.skill}] ${detail}`;
7889
+ }).join("\n");
7890
+ }
7891
+ function formatTime(timestamp) {
7892
+ try {
7893
+ const date = new Date(timestamp);
7894
+ const hours = String(date.getHours()).padStart(2, "0");
7895
+ const minutes = String(date.getMinutes()).padStart(2, "0");
7896
+ return `${hours}:${minutes}`;
7897
+ } catch {
7898
+ return "??:??";
7899
+ }
7900
+ }
7497
7901
  async function executeWorkflow(workflow, executor) {
7498
7902
  const stepResults = [];
7499
7903
  const startTime = Date.now();
@@ -7670,19 +8074,19 @@ var DEFAULT_SECURITY_CONFIG = {
7670
8074
  rules: {},
7671
8075
  exclude: ["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]
7672
8076
  };
7673
- var RuleOverrideSchema = z5.enum(["off", "error", "warning", "info"]);
7674
- var SecurityConfigSchema = z5.object({
7675
- enabled: z5.boolean().default(true),
7676
- strict: z5.boolean().default(false),
7677
- rules: z5.record(z5.string(), RuleOverrideSchema).optional().default({}),
7678
- exclude: z5.array(z5.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
7679
- external: z5.object({
7680
- semgrep: z5.object({
7681
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto"),
7682
- rulesets: z5.array(z5.string()).optional()
8077
+ var RuleOverrideSchema = z6.enum(["off", "error", "warning", "info"]);
8078
+ var SecurityConfigSchema = z6.object({
8079
+ enabled: z6.boolean().default(true),
8080
+ strict: z6.boolean().default(false),
8081
+ rules: z6.record(z6.string(), RuleOverrideSchema).optional().default({}),
8082
+ exclude: z6.array(z6.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
8083
+ external: z6.object({
8084
+ semgrep: z6.object({
8085
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto"),
8086
+ rulesets: z6.array(z6.string()).optional()
7683
8087
  }).optional(),
7684
- gitleaks: z5.object({
7685
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto")
8088
+ gitleaks: z6.object({
8089
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto")
7686
8090
  }).optional()
7687
8091
  }).optional()
7688
8092
  });
@@ -7715,11 +8119,11 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7715
8119
  }
7716
8120
  function detectStack(projectRoot) {
7717
8121
  const stacks = [];
7718
- const pkgJsonPath = path13.join(projectRoot, "package.json");
7719
- if (fs16.existsSync(pkgJsonPath)) {
8122
+ const pkgJsonPath = path14.join(projectRoot, "package.json");
8123
+ if (fs17.existsSync(pkgJsonPath)) {
7720
8124
  stacks.push("node");
7721
8125
  try {
7722
- const pkgJson = JSON.parse(fs16.readFileSync(pkgJsonPath, "utf-8"));
8126
+ const pkgJson = JSON.parse(fs17.readFileSync(pkgJsonPath, "utf-8"));
7723
8127
  const allDeps = {
7724
8128
  ...pkgJson.dependencies,
7725
8129
  ...pkgJson.devDependencies
@@ -7734,13 +8138,13 @@ function detectStack(projectRoot) {
7734
8138
  } catch {
7735
8139
  }
7736
8140
  }
7737
- const goModPath = path13.join(projectRoot, "go.mod");
7738
- if (fs16.existsSync(goModPath)) {
8141
+ const goModPath = path14.join(projectRoot, "go.mod");
8142
+ if (fs17.existsSync(goModPath)) {
7739
8143
  stacks.push("go");
7740
8144
  }
7741
- const requirementsPath = path13.join(projectRoot, "requirements.txt");
7742
- const pyprojectPath = path13.join(projectRoot, "pyproject.toml");
7743
- if (fs16.existsSync(requirementsPath) || fs16.existsSync(pyprojectPath)) {
8145
+ const requirementsPath = path14.join(projectRoot, "requirements.txt");
8146
+ const pyprojectPath = path14.join(projectRoot, "pyproject.toml");
8147
+ if (fs17.existsSync(requirementsPath) || fs17.existsSync(pyprojectPath)) {
7744
8148
  stacks.push("python");
7745
8149
  }
7746
8150
  return stacks;
@@ -7802,6 +8206,72 @@ var secretRules = [
7802
8206
  message: "Hardcoded JWT token detected",
7803
8207
  remediation: "Tokens should be fetched at runtime, not embedded in source",
7804
8208
  references: ["CWE-798"]
8209
+ },
8210
+ {
8211
+ id: "SEC-SEC-006",
8212
+ name: "Anthropic API Key",
8213
+ category: "secrets",
8214
+ severity: "error",
8215
+ confidence: "high",
8216
+ patterns: [/sk-ant-api\d{2}-[A-Za-z0-9_-]{20,}/],
8217
+ message: "Hardcoded Anthropic API key detected",
8218
+ remediation: "Use environment variables: process.env.ANTHROPIC_API_KEY",
8219
+ references: ["CWE-798"]
8220
+ },
8221
+ {
8222
+ id: "SEC-SEC-007",
8223
+ name: "OpenAI API Key",
8224
+ category: "secrets",
8225
+ severity: "error",
8226
+ confidence: "high",
8227
+ patterns: [/sk-proj-[A-Za-z0-9_-]{20,}/],
8228
+ message: "Hardcoded OpenAI API key detected",
8229
+ remediation: "Use environment variables: process.env.OPENAI_API_KEY",
8230
+ references: ["CWE-798"]
8231
+ },
8232
+ {
8233
+ id: "SEC-SEC-008",
8234
+ name: "Google API Key",
8235
+ category: "secrets",
8236
+ severity: "error",
8237
+ confidence: "high",
8238
+ patterns: [/AIza[A-Za-z0-9_-]{35}/],
8239
+ message: "Hardcoded Google API key detected",
8240
+ remediation: "Use environment variables or a secrets manager for Google API keys",
8241
+ references: ["CWE-798"]
8242
+ },
8243
+ {
8244
+ id: "SEC-SEC-009",
8245
+ name: "GitHub Personal Access Token",
8246
+ category: "secrets",
8247
+ severity: "error",
8248
+ confidence: "high",
8249
+ patterns: [/gh[pous]_[A-Za-z0-9_]{36,}/],
8250
+ message: "Hardcoded GitHub personal access token detected",
8251
+ remediation: "Use environment variables: process.env.GITHUB_TOKEN",
8252
+ references: ["CWE-798"]
8253
+ },
8254
+ {
8255
+ id: "SEC-SEC-010",
8256
+ name: "Stripe Live Key",
8257
+ category: "secrets",
8258
+ severity: "error",
8259
+ confidence: "high",
8260
+ patterns: [/\b[spr]k_live_[A-Za-z0-9]{24,}/],
8261
+ message: "Hardcoded Stripe live key detected",
8262
+ remediation: "Use environment variables for Stripe keys; never commit live keys",
8263
+ references: ["CWE-798"]
8264
+ },
8265
+ {
8266
+ id: "SEC-SEC-011",
8267
+ name: "Database Connection String with Credentials",
8268
+ category: "secrets",
8269
+ severity: "error",
8270
+ confidence: "high",
8271
+ patterns: [/(?:postgres|mysql|mongodb|redis|amqp|mssql)(?:\+\w+)?:\/\/[^/\s:]+:[^@/\s]+@/i],
8272
+ message: "Database connection string with embedded credentials detected",
8273
+ remediation: "Use environment variables for connection strings; separate credentials from URIs",
8274
+ references: ["CWE-798"]
7805
8275
  }
7806
8276
  ];
7807
8277
  var injectionRules = [
@@ -7975,6 +8445,154 @@ var deserializationRules = [
7975
8445
  references: ["CWE-502"]
7976
8446
  }
7977
8447
  ];
8448
+ var agentConfigRules = [
8449
+ {
8450
+ id: "SEC-AGT-001",
8451
+ name: "Hidden Unicode Characters",
8452
+ category: "agent-config",
8453
+ severity: "error",
8454
+ confidence: "high",
8455
+ patterns: [/\u200B|\u200C|\u200D|\uFEFF|\u2060/],
8456
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/*.yaml",
8457
+ message: "Hidden zero-width Unicode characters detected in agent configuration",
8458
+ remediation: "Remove invisible Unicode characters; they may hide malicious instructions",
8459
+ references: ["CWE-116"]
8460
+ },
8461
+ {
8462
+ id: "SEC-AGT-002",
8463
+ name: "URL Execution Directives",
8464
+ category: "agent-config",
8465
+ severity: "warning",
8466
+ confidence: "medium",
8467
+ patterns: [/\b(?:curl|wget)\s+\S+/i, /\bfetch\s*\(/i],
8468
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md",
8469
+ message: "URL execution directive found in agent configuration",
8470
+ remediation: "Avoid instructing agents to download and execute remote content",
8471
+ references: ["CWE-94"]
8472
+ },
8473
+ {
8474
+ id: "SEC-AGT-003",
8475
+ name: "Wildcard Tool Permissions",
8476
+ category: "agent-config",
8477
+ severity: "warning",
8478
+ confidence: "high",
8479
+ patterns: [/(?:Bash|Write|Edit)\s*\(\s*\*\s*\)/],
8480
+ fileGlob: "**/.claude/**,**/settings*.json",
8481
+ message: "Wildcard tool permissions grant unrestricted access",
8482
+ remediation: "Scope tool permissions to specific patterns instead of wildcards",
8483
+ references: ["CWE-250"]
8484
+ },
8485
+ {
8486
+ id: "SEC-AGT-004",
8487
+ name: "Auto-approve Patterns",
8488
+ category: "agent-config",
8489
+ severity: "warning",
8490
+ confidence: "high",
8491
+ patterns: [/\bautoApprove\b/i, /\bauto_approve\b/i],
8492
+ fileGlob: "**/.claude/**,**/.mcp.json",
8493
+ message: "Auto-approve configuration bypasses human review of tool calls",
8494
+ remediation: "Review auto-approved tools carefully; prefer explicit approval for destructive operations",
8495
+ references: ["CWE-862"]
8496
+ },
8497
+ {
8498
+ id: "SEC-AGT-005",
8499
+ name: "Prompt Injection Surface",
8500
+ category: "agent-config",
8501
+ severity: "warning",
8502
+ confidence: "medium",
8503
+ patterns: [/\$\{[^}]*\}/, /\{\{[^}]*\}\}/],
8504
+ fileGlob: "**/skill.yaml",
8505
+ message: "Template interpolation syntax in skill YAML may enable prompt injection",
8506
+ remediation: "Avoid dynamic interpolation in skill descriptions; use static text",
8507
+ references: ["CWE-94"]
8508
+ },
8509
+ {
8510
+ id: "SEC-AGT-006",
8511
+ name: "Permission Bypass Flags",
8512
+ category: "agent-config",
8513
+ severity: "error",
8514
+ confidence: "high",
8515
+ patterns: [/--dangerously-skip-permissions/, /--no-verify/],
8516
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/.claude/**",
8517
+ message: "Permission bypass flag detected in agent configuration",
8518
+ remediation: "Remove flags that bypass safety checks; they undermine enforcement",
8519
+ references: ["CWE-863"]
8520
+ },
8521
+ {
8522
+ id: "SEC-AGT-007",
8523
+ name: "Hook Injection Surface",
8524
+ category: "agent-config",
8525
+ severity: "error",
8526
+ confidence: "low",
8527
+ patterns: [/\$\(/, /`[^`]+`/, /\s&&\s/, /\s\|\|\s/],
8528
+ fileGlob: "**/settings*.json,**/hooks.json",
8529
+ message: "Shell metacharacters in hook commands may enable command injection",
8530
+ remediation: "Use simple, single-command hooks without shell operators; chain logic inside the script",
8531
+ references: ["CWE-78"]
8532
+ }
8533
+ ];
8534
+ var mcpRules = [
8535
+ {
8536
+ id: "SEC-MCP-001",
8537
+ name: "Hardcoded MCP Secrets",
8538
+ category: "mcp",
8539
+ severity: "error",
8540
+ confidence: "medium",
8541
+ patterns: [/(?:API_KEY|SECRET|TOKEN|PASSWORD|CREDENTIAL)\s*["']?\s*:\s*["'][^"']{8,}["']/i],
8542
+ fileGlob: "**/.mcp.json",
8543
+ message: "Hardcoded secret detected in MCP server configuration",
8544
+ remediation: "Use environment variable references instead of inline secrets in .mcp.json",
8545
+ references: ["CWE-798"]
8546
+ },
8547
+ {
8548
+ id: "SEC-MCP-002",
8549
+ name: "Shell Injection in MCP Args",
8550
+ category: "mcp",
8551
+ severity: "error",
8552
+ confidence: "medium",
8553
+ patterns: [/\$\(/, /`[^`]+`/],
8554
+ fileGlob: "**/.mcp.json",
8555
+ message: "Shell metacharacters detected in MCP server arguments",
8556
+ remediation: "Use literal argument values; avoid shell interpolation in MCP args",
8557
+ references: ["CWE-78"]
8558
+ },
8559
+ {
8560
+ id: "SEC-MCP-003",
8561
+ name: "Network Exposure",
8562
+ category: "mcp",
8563
+ severity: "warning",
8564
+ confidence: "high",
8565
+ patterns: [/0\.0\.0\.0/, /["']\*["']\s*:\s*\d/, /host["']?\s*:\s*["']\*["']/i],
8566
+ fileGlob: "**/.mcp.json",
8567
+ message: "MCP server binding to all network interfaces (0.0.0.0 or wildcard *)",
8568
+ remediation: "Bind to 127.0.0.1 or localhost to restrict access to local machine",
8569
+ references: ["CWE-668"]
8570
+ },
8571
+ {
8572
+ id: "SEC-MCP-004",
8573
+ name: "Typosquatting Vector",
8574
+ category: "mcp",
8575
+ severity: "warning",
8576
+ confidence: "medium",
8577
+ patterns: [/\bnpx\s+(?:-y|--yes)\b/],
8578
+ fileGlob: "**/.mcp.json",
8579
+ message: "npx -y auto-installs packages without confirmation, enabling typosquatting",
8580
+ remediation: "Pin exact package versions or install packages explicitly before use",
8581
+ references: ["CWE-427"]
8582
+ },
8583
+ {
8584
+ id: "SEC-MCP-005",
8585
+ name: "Unencrypted Transport",
8586
+ category: "mcp",
8587
+ severity: "warning",
8588
+ confidence: "medium",
8589
+ patterns: [/http:\/\/(?!localhost\b|127\.0\.0\.1\b)/],
8590
+ fileGlob: "**/.mcp.json",
8591
+ message: "Unencrypted HTTP transport detected for MCP server connection",
8592
+ remediation: "Use https:// for all non-localhost MCP server connections",
8593
+ references: ["CWE-319"]
8594
+ }
8595
+ ];
7978
8596
  var nodeRules = [
7979
8597
  {
7980
8598
  id: "SEC-NODE-001",
@@ -8093,7 +8711,9 @@ var SecurityScanner = class {
8093
8711
  ...cryptoRules,
8094
8712
  ...pathTraversalRules,
8095
8713
  ...networkRules,
8096
- ...deserializationRules
8714
+ ...deserializationRules,
8715
+ ...agentConfigRules,
8716
+ ...mcpRules
8097
8717
  ]);
8098
8718
  this.registry.registerAll([...nodeRules, ...expressRules, ...reactRules, ...goRules]);
8099
8719
  this.activeRules = this.registry.getAll();
@@ -8102,6 +8722,12 @@ var SecurityScanner = class {
8102
8722
  const stacks = detectStack(projectRoot);
8103
8723
  this.activeRules = this.registry.getForStacks(stacks.length > 0 ? stacks : []);
8104
8724
  }
8725
+ /**
8726
+ * Scan raw content against all active rules. Note: this method does NOT apply
8727
+ * fileGlob filtering — every active rule is evaluated regardless of filePath.
8728
+ * If you are scanning a specific file and want fileGlob-based rule filtering,
8729
+ * use {@link scanFile} instead.
8730
+ */
8105
8731
  scanContent(content, filePath, startLine = 1) {
8106
8732
  if (!this.config.enabled) return [];
8107
8733
  const findings = [];
@@ -8143,8 +8769,52 @@ var SecurityScanner = class {
8143
8769
  }
8144
8770
  async scanFile(filePath) {
8145
8771
  if (!this.config.enabled) return [];
8146
- const content = await fs17.readFile(filePath, "utf-8");
8147
- return this.scanContent(content, filePath, 1);
8772
+ const content = await fs18.readFile(filePath, "utf-8");
8773
+ return this.scanContentForFile(content, filePath, 1);
8774
+ }
8775
+ scanContentForFile(content, filePath, startLine = 1) {
8776
+ if (!this.config.enabled) return [];
8777
+ const findings = [];
8778
+ const lines = content.split("\n");
8779
+ const applicableRules = this.activeRules.filter((rule) => {
8780
+ if (!rule.fileGlob) return true;
8781
+ const globs = rule.fileGlob.split(",").map((g) => g.trim());
8782
+ return globs.some((glob2) => minimatch4(filePath, glob2, { dot: true }));
8783
+ });
8784
+ for (const rule of applicableRules) {
8785
+ const resolved = resolveRuleSeverity(
8786
+ rule.id,
8787
+ rule.severity,
8788
+ this.config.rules ?? {},
8789
+ this.config.strict
8790
+ );
8791
+ if (resolved === "off") continue;
8792
+ for (let i = 0; i < lines.length; i++) {
8793
+ const line = lines[i] ?? "";
8794
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
8795
+ for (const pattern of rule.patterns) {
8796
+ pattern.lastIndex = 0;
8797
+ if (pattern.test(line)) {
8798
+ findings.push({
8799
+ ruleId: rule.id,
8800
+ ruleName: rule.name,
8801
+ category: rule.category,
8802
+ severity: resolved,
8803
+ confidence: rule.confidence,
8804
+ file: filePath,
8805
+ line: startLine + i,
8806
+ match: line.trim(),
8807
+ context: line,
8808
+ message: rule.message,
8809
+ remediation: rule.remediation,
8810
+ ...rule.references ? { references: rule.references } : {}
8811
+ });
8812
+ break;
8813
+ }
8814
+ }
8815
+ }
8816
+ }
8817
+ return findings;
8148
8818
  }
8149
8819
  async scanFiles(filePaths) {
8150
8820
  const allFindings = [];
@@ -8178,7 +8848,7 @@ var ALL_CHECKS = [
8178
8848
  ];
8179
8849
  async function runValidateCheck(projectRoot, config) {
8180
8850
  const issues = [];
8181
- const agentsPath = path14.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8851
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8182
8852
  const result = await validateAgentsMap(agentsPath);
8183
8853
  if (!result.ok) {
8184
8854
  issues.push({ severity: "error", message: result.error.message });
@@ -8235,7 +8905,7 @@ async function runDepsCheck(projectRoot, config) {
8235
8905
  }
8236
8906
  async function runDocsCheck(projectRoot, config) {
8237
8907
  const issues = [];
8238
- const docsDir = path14.join(projectRoot, config.docsDir ?? "docs");
8908
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
8239
8909
  const entropyConfig = config.entropy || {};
8240
8910
  const result = await checkDocCoverage("project", {
8241
8911
  docsDir,
@@ -8522,7 +9192,7 @@ async function runMechanicalChecks(options) {
8522
9192
  };
8523
9193
  if (!skip.includes("validate")) {
8524
9194
  try {
8525
- const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
9195
+ const agentsPath = path16.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8526
9196
  const result = await validateAgentsMap(agentsPath);
8527
9197
  if (!result.ok) {
8528
9198
  statuses.validate = "fail";
@@ -8559,7 +9229,7 @@ async function runMechanicalChecks(options) {
8559
9229
  statuses.validate = "fail";
8560
9230
  findings.push({
8561
9231
  tool: "validate",
8562
- file: path15.join(projectRoot, "AGENTS.md"),
9232
+ file: path16.join(projectRoot, "AGENTS.md"),
8563
9233
  message: err instanceof Error ? err.message : String(err),
8564
9234
  severity: "error"
8565
9235
  });
@@ -8623,7 +9293,7 @@ async function runMechanicalChecks(options) {
8623
9293
  (async () => {
8624
9294
  const localFindings = [];
8625
9295
  try {
8626
- const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
9296
+ const docsDir = path16.join(projectRoot, config.docsDir ?? "docs");
8627
9297
  const result = await checkDocCoverage("project", { docsDir });
8628
9298
  if (!result.ok) {
8629
9299
  statuses["check-docs"] = "warn";
@@ -8650,7 +9320,7 @@ async function runMechanicalChecks(options) {
8650
9320
  statuses["check-docs"] = "warn";
8651
9321
  localFindings.push({
8652
9322
  tool: "check-docs",
8653
- file: path15.join(projectRoot, "docs"),
9323
+ file: path16.join(projectRoot, "docs"),
8654
9324
  message: err instanceof Error ? err.message : String(err),
8655
9325
  severity: "warning"
8656
9326
  });
@@ -8799,18 +9469,18 @@ function computeContextBudget(diffLines) {
8799
9469
  return diffLines;
8800
9470
  }
8801
9471
  function isWithinProject(absPath, projectRoot) {
8802
- const resolvedRoot = path16.resolve(projectRoot) + path16.sep;
8803
- const resolvedPath = path16.resolve(absPath);
8804
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path16.resolve(projectRoot);
9472
+ const resolvedRoot = path17.resolve(projectRoot) + path17.sep;
9473
+ const resolvedPath = path17.resolve(absPath);
9474
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path17.resolve(projectRoot);
8805
9475
  }
8806
9476
  async function readContextFile(projectRoot, filePath, reason) {
8807
- const absPath = path16.isAbsolute(filePath) ? filePath : path16.join(projectRoot, filePath);
9477
+ const absPath = path17.isAbsolute(filePath) ? filePath : path17.join(projectRoot, filePath);
8808
9478
  if (!isWithinProject(absPath, projectRoot)) return null;
8809
9479
  const result = await readFileContent(absPath);
8810
9480
  if (!result.ok) return null;
8811
9481
  const content = result.value;
8812
9482
  const lines = content.split("\n").length;
8813
- const relPath = path16.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9483
+ const relPath = path17.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
8814
9484
  return { path: relPath, content, reason, lines };
8815
9485
  }
8816
9486
  function extractImportSources2(content) {
@@ -8825,18 +9495,18 @@ function extractImportSources2(content) {
8825
9495
  }
8826
9496
  async function resolveImportPath2(projectRoot, fromFile, importSource) {
8827
9497
  if (!importSource.startsWith(".")) return null;
8828
- const fromDir = path16.dirname(path16.join(projectRoot, fromFile));
8829
- const basePath = path16.resolve(fromDir, importSource);
9498
+ const fromDir = path17.dirname(path17.join(projectRoot, fromFile));
9499
+ const basePath = path17.resolve(fromDir, importSource);
8830
9500
  if (!isWithinProject(basePath, projectRoot)) return null;
8831
9501
  const relBase = relativePosix(projectRoot, basePath);
8832
9502
  const candidates = [
8833
9503
  relBase + ".ts",
8834
9504
  relBase + ".tsx",
8835
9505
  relBase + ".mts",
8836
- path16.join(relBase, "index.ts")
9506
+ path17.join(relBase, "index.ts")
8837
9507
  ];
8838
9508
  for (const candidate of candidates) {
8839
- const absCandidate = path16.join(projectRoot, candidate);
9509
+ const absCandidate = path17.join(projectRoot, candidate);
8840
9510
  if (await fileExists(absCandidate)) {
8841
9511
  return candidate;
8842
9512
  }
@@ -8844,7 +9514,7 @@ async function resolveImportPath2(projectRoot, fromFile, importSource) {
8844
9514
  return null;
8845
9515
  }
8846
9516
  async function findTestFiles(projectRoot, sourceFile) {
8847
- const baseName = path16.basename(sourceFile, path16.extname(sourceFile));
9517
+ const baseName = path17.basename(sourceFile, path17.extname(sourceFile));
8848
9518
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
8849
9519
  const results = await findFiles(pattern, projectRoot);
8850
9520
  return results.map((f) => relativePosix(projectRoot, f));
@@ -9657,7 +10327,7 @@ function normalizePath(filePath, projectRoot) {
9657
10327
  let normalized = filePath;
9658
10328
  normalized = normalized.replace(/\\/g, "/");
9659
10329
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
9660
- if (path17.isAbsolute(normalized)) {
10330
+ if (path18.isAbsolute(normalized)) {
9661
10331
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
9662
10332
  if (normalized.startsWith(root)) {
9663
10333
  normalized = normalized.slice(root.length);
@@ -9682,12 +10352,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
9682
10352
  while ((match = importRegex.exec(content)) !== null) {
9683
10353
  const importPath = match[1];
9684
10354
  if (!importPath.startsWith(".")) continue;
9685
- const dir = path17.dirname(current.file);
9686
- let resolved = path17.join(dir, importPath).replace(/\\/g, "/");
10355
+ const dir = path18.dirname(current.file);
10356
+ let resolved = path18.join(dir, importPath).replace(/\\/g, "/");
9687
10357
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
9688
10358
  resolved += ".ts";
9689
10359
  }
9690
- resolved = path17.normalize(resolved).replace(/\\/g, "/");
10360
+ resolved = path18.normalize(resolved).replace(/\\/g, "/");
9691
10361
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
9692
10362
  queue.push({ file: resolved, depth: current.depth + 1 });
9693
10363
  }
@@ -9704,7 +10374,7 @@ async function validateFindings(options) {
9704
10374
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
9705
10375
  continue;
9706
10376
  }
9707
- const absoluteFile = path17.isAbsolute(finding.file) ? finding.file : path17.join(projectRoot, finding.file).replace(/\\/g, "/");
10377
+ const absoluteFile = path18.isAbsolute(finding.file) ? finding.file : path18.join(projectRoot, finding.file).replace(/\\/g, "/");
9708
10378
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
9709
10379
  continue;
9710
10380
  }
@@ -10313,7 +10983,7 @@ function parseRoadmap(markdown) {
10313
10983
  if (!fmMatch) {
10314
10984
  return Err(new Error("Missing or malformed YAML frontmatter"));
10315
10985
  }
10316
- const fmResult = parseFrontmatter(fmMatch[1]);
10986
+ const fmResult = parseFrontmatter2(fmMatch[1]);
10317
10987
  if (!fmResult.ok) return fmResult;
10318
10988
  const body = markdown.slice(fmMatch[0].length);
10319
10989
  const milestonesResult = parseMilestones(body);
@@ -10323,7 +10993,7 @@ function parseRoadmap(markdown) {
10323
10993
  milestones: milestonesResult.value
10324
10994
  });
10325
10995
  }
10326
- function parseFrontmatter(raw) {
10996
+ function parseFrontmatter2(raw) {
10327
10997
  const lines = raw.split("\n");
10328
10998
  const map = /* @__PURE__ */ new Map();
10329
10999
  for (const line of lines) {
@@ -10498,10 +11168,10 @@ function inferStatus(feature, projectPath, allFeatures) {
10498
11168
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
10499
11169
  const useRootState = featuresWithPlans.length <= 1;
10500
11170
  if (useRootState) {
10501
- const rootStatePath = path18.join(projectPath, ".harness", "state.json");
10502
- if (fs18.existsSync(rootStatePath)) {
11171
+ const rootStatePath = path19.join(projectPath, ".harness", "state.json");
11172
+ if (fs19.existsSync(rootStatePath)) {
10503
11173
  try {
10504
- const raw = fs18.readFileSync(rootStatePath, "utf-8");
11174
+ const raw = fs19.readFileSync(rootStatePath, "utf-8");
10505
11175
  const state = JSON.parse(raw);
10506
11176
  if (state.progress) {
10507
11177
  for (const status of Object.values(state.progress)) {
@@ -10512,16 +11182,16 @@ function inferStatus(feature, projectPath, allFeatures) {
10512
11182
  }
10513
11183
  }
10514
11184
  }
10515
- const sessionsDir = path18.join(projectPath, ".harness", "sessions");
10516
- if (fs18.existsSync(sessionsDir)) {
11185
+ const sessionsDir = path19.join(projectPath, ".harness", "sessions");
11186
+ if (fs19.existsSync(sessionsDir)) {
10517
11187
  try {
10518
- const sessionDirs = fs18.readdirSync(sessionsDir, { withFileTypes: true });
11188
+ const sessionDirs = fs19.readdirSync(sessionsDir, { withFileTypes: true });
10519
11189
  for (const entry of sessionDirs) {
10520
11190
  if (!entry.isDirectory()) continue;
10521
- const autopilotPath = path18.join(sessionsDir, entry.name, "autopilot-state.json");
10522
- if (!fs18.existsSync(autopilotPath)) continue;
11191
+ const autopilotPath = path19.join(sessionsDir, entry.name, "autopilot-state.json");
11192
+ if (!fs19.existsSync(autopilotPath)) continue;
10523
11193
  try {
10524
- const raw = fs18.readFileSync(autopilotPath, "utf-8");
11194
+ const raw = fs19.readFileSync(autopilotPath, "utf-8");
10525
11195
  const autopilot = JSON.parse(raw);
10526
11196
  if (!autopilot.phases) continue;
10527
11197
  const linkedPhases = autopilot.phases.filter(
@@ -10551,17 +11221,26 @@ function inferStatus(feature, projectPath, allFeatures) {
10551
11221
  if (anyStarted) return "in-progress";
10552
11222
  return null;
10553
11223
  }
11224
+ var STATUS_RANK = {
11225
+ backlog: 0,
11226
+ planned: 1,
11227
+ blocked: 1,
11228
+ // lateral to planned — sync can move to/from blocked freely
11229
+ "in-progress": 2,
11230
+ done: 3
11231
+ };
11232
+ function isRegression(from, to) {
11233
+ return STATUS_RANK[to] < STATUS_RANK[from];
11234
+ }
10554
11235
  function syncRoadmap(options) {
10555
11236
  const { projectPath, roadmap, forceSync } = options;
10556
- const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
10557
- const skipOverride = isManuallyEdited && !forceSync;
10558
11237
  const allFeatures = roadmap.milestones.flatMap((m) => m.features);
10559
11238
  const changes = [];
10560
11239
  for (const feature of allFeatures) {
10561
- if (skipOverride) continue;
10562
11240
  const inferred = inferStatus(feature, projectPath, allFeatures);
10563
11241
  if (inferred === null) continue;
10564
11242
  if (inferred === feature.status) continue;
11243
+ if (!forceSync && isRegression(feature.status, inferred)) continue;
10565
11244
  changes.push({
10566
11245
  feature: feature.name,
10567
11246
  from: feature.status,
@@ -10570,28 +11249,40 @@ function syncRoadmap(options) {
10570
11249
  }
10571
11250
  return Ok(changes);
10572
11251
  }
10573
- var InteractionTypeSchema = z6.enum(["question", "confirmation", "transition"]);
10574
- var QuestionSchema = z6.object({
10575
- text: z6.string(),
10576
- options: z6.array(z6.string()).optional(),
10577
- default: z6.string().optional()
11252
+ function applySyncChanges(roadmap, changes) {
11253
+ for (const change of changes) {
11254
+ for (const m of roadmap.milestones) {
11255
+ const feature = m.features.find((f) => f.name.toLowerCase() === change.feature.toLowerCase());
11256
+ if (feature) {
11257
+ feature.status = change.to;
11258
+ break;
11259
+ }
11260
+ }
11261
+ }
11262
+ roadmap.frontmatter.lastSynced = (/* @__PURE__ */ new Date()).toISOString();
11263
+ }
11264
+ var InteractionTypeSchema = z7.enum(["question", "confirmation", "transition"]);
11265
+ var QuestionSchema = z7.object({
11266
+ text: z7.string(),
11267
+ options: z7.array(z7.string()).optional(),
11268
+ default: z7.string().optional()
10578
11269
  });
10579
- var ConfirmationSchema = z6.object({
10580
- text: z6.string(),
10581
- context: z6.string()
11270
+ var ConfirmationSchema = z7.object({
11271
+ text: z7.string(),
11272
+ context: z7.string()
10582
11273
  });
10583
- var TransitionSchema = z6.object({
10584
- completedPhase: z6.string(),
10585
- suggestedNext: z6.string(),
10586
- reason: z6.string(),
10587
- artifacts: z6.array(z6.string()),
10588
- requiresConfirmation: z6.boolean(),
10589
- summary: z6.string()
11274
+ var TransitionSchema = z7.object({
11275
+ completedPhase: z7.string(),
11276
+ suggestedNext: z7.string(),
11277
+ reason: z7.string(),
11278
+ artifacts: z7.array(z7.string()),
11279
+ requiresConfirmation: z7.boolean(),
11280
+ summary: z7.string()
10590
11281
  });
10591
- var EmitInteractionInputSchema = z6.object({
10592
- path: z6.string(),
11282
+ var EmitInteractionInputSchema = z7.object({
11283
+ path: z7.string(),
10593
11284
  type: InteractionTypeSchema,
10594
- stream: z6.string().optional(),
11285
+ stream: z7.string().optional(),
10595
11286
  question: QuestionSchema.optional(),
10596
11287
  confirmation: ConfirmationSchema.optional(),
10597
11288
  transition: TransitionSchema.optional()
@@ -10601,10 +11292,10 @@ var ProjectScanner = class {
10601
11292
  this.rootDir = rootDir;
10602
11293
  }
10603
11294
  async scan() {
10604
- let projectName = path19.basename(this.rootDir);
11295
+ let projectName = path20.basename(this.rootDir);
10605
11296
  try {
10606
- const pkgPath = path19.join(this.rootDir, "package.json");
10607
- const pkgRaw = await fs19.readFile(pkgPath, "utf-8");
11297
+ const pkgPath = path20.join(this.rootDir, "package.json");
11298
+ const pkgRaw = await fs20.readFile(pkgPath, "utf-8");
10608
11299
  const pkg = JSON.parse(pkgRaw);
10609
11300
  if (pkg.name) projectName = pkg.name;
10610
11301
  } catch {
@@ -10717,13 +11408,13 @@ var BlueprintGenerator = class {
10717
11408
  styles: STYLES,
10718
11409
  scripts: SCRIPTS
10719
11410
  });
10720
- await fs20.mkdir(options.outputDir, { recursive: true });
10721
- await fs20.writeFile(path20.join(options.outputDir, "index.html"), html);
11411
+ await fs21.mkdir(options.outputDir, { recursive: true });
11412
+ await fs21.writeFile(path21.join(options.outputDir, "index.html"), html);
10722
11413
  }
10723
11414
  };
10724
11415
  function getStatePath() {
10725
11416
  const home = process.env["HOME"] || os.homedir();
10726
- return path21.join(home, ".harness", "update-check.json");
11417
+ return path22.join(home, ".harness", "update-check.json");
10727
11418
  }
10728
11419
  function isUpdateCheckEnabled(configInterval) {
10729
11420
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -10736,7 +11427,7 @@ function shouldRunCheck(state, intervalMs) {
10736
11427
  }
10737
11428
  function readCheckState() {
10738
11429
  try {
10739
- const raw = fs21.readFileSync(getStatePath(), "utf-8");
11430
+ const raw = fs22.readFileSync(getStatePath(), "utf-8");
10740
11431
  const parsed = JSON.parse(raw);
10741
11432
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
10742
11433
  const state = parsed;
@@ -10753,7 +11444,7 @@ function readCheckState() {
10753
11444
  }
10754
11445
  function spawnBackgroundCheck(currentVersion) {
10755
11446
  const statePath = getStatePath();
10756
- const stateDir = path21.dirname(statePath);
11447
+ const stateDir = path22.dirname(statePath);
10757
11448
  const script = `
10758
11449
  const { execSync } = require('child_process');
10759
11450
  const fs = require('fs');
@@ -10805,7 +11496,398 @@ function getUpdateNotification(currentVersion) {
10805
11496
  return `Update available: v${currentVersion} -> v${state.latestVersion}
10806
11497
  Run "harness update" to upgrade.`;
10807
11498
  }
10808
- var VERSION = "0.14.0";
11499
+ var EXTENSION_MAP = {
11500
+ ".ts": "typescript",
11501
+ ".tsx": "typescript",
11502
+ ".mts": "typescript",
11503
+ ".cts": "typescript",
11504
+ ".js": "javascript",
11505
+ ".jsx": "javascript",
11506
+ ".mjs": "javascript",
11507
+ ".cjs": "javascript",
11508
+ ".py": "python"
11509
+ };
11510
+ function detectLanguage(filePath) {
11511
+ const ext = filePath.slice(filePath.lastIndexOf("."));
11512
+ return EXTENSION_MAP[ext] ?? null;
11513
+ }
11514
+ var parserCache = /* @__PURE__ */ new Map();
11515
+ var initialized = false;
11516
+ var GRAMMAR_MAP = {
11517
+ typescript: "tree-sitter-typescript",
11518
+ javascript: "tree-sitter-javascript",
11519
+ python: "tree-sitter-python"
11520
+ };
11521
+ async function ensureInit() {
11522
+ if (!initialized) {
11523
+ await Parser.init();
11524
+ initialized = true;
11525
+ }
11526
+ }
11527
+ async function resolveWasmPath(grammarName) {
11528
+ const { createRequire } = await import("module");
11529
+ const require2 = createRequire(import.meta.url ?? __filename);
11530
+ const pkgPath = require2.resolve("tree-sitter-wasms/package.json");
11531
+ const path23 = await import("path");
11532
+ const pkgDir = path23.dirname(pkgPath);
11533
+ return path23.join(pkgDir, "out", `${grammarName}.wasm`);
11534
+ }
11535
+ async function loadLanguage(lang) {
11536
+ const grammarName = GRAMMAR_MAP[lang];
11537
+ const wasmPath = await resolveWasmPath(grammarName);
11538
+ return Parser.Language.load(wasmPath);
11539
+ }
11540
+ async function getParser(lang) {
11541
+ const cached = parserCache.get(lang);
11542
+ if (cached) return cached;
11543
+ await ensureInit();
11544
+ const parser = new Parser();
11545
+ const language = await loadLanguage(lang);
11546
+ parser.setLanguage(language);
11547
+ parserCache.set(lang, parser);
11548
+ return parser;
11549
+ }
11550
+ async function parseFile(filePath) {
11551
+ const lang = detectLanguage(filePath);
11552
+ if (!lang) {
11553
+ return Err({
11554
+ code: "UNSUPPORTED_LANGUAGE",
11555
+ message: `Unsupported file extension: ${filePath}`
11556
+ });
11557
+ }
11558
+ const contentResult = await readFileContent(filePath);
11559
+ if (!contentResult.ok) {
11560
+ return Err({
11561
+ code: "FILE_NOT_FOUND",
11562
+ message: `Cannot read file: ${filePath}`
11563
+ });
11564
+ }
11565
+ try {
11566
+ const parser = await getParser(lang);
11567
+ const tree = parser.parse(contentResult.value);
11568
+ return Ok({ tree, language: lang, source: contentResult.value, filePath });
11569
+ } catch (e) {
11570
+ return Err({
11571
+ code: "PARSE_FAILED",
11572
+ message: `Tree-sitter parse failed for ${filePath}: ${e.message}`
11573
+ });
11574
+ }
11575
+ }
11576
+ function resetParserCache() {
11577
+ parserCache.clear();
11578
+ initialized = false;
11579
+ }
11580
+ var TOP_LEVEL_TYPES = {
11581
+ typescript: {
11582
+ function_declaration: "function",
11583
+ class_declaration: "class",
11584
+ interface_declaration: "interface",
11585
+ type_alias_declaration: "type",
11586
+ lexical_declaration: "variable",
11587
+ variable_declaration: "variable",
11588
+ export_statement: "export",
11589
+ import_statement: "import",
11590
+ enum_declaration: "type"
11591
+ },
11592
+ javascript: {
11593
+ function_declaration: "function",
11594
+ class_declaration: "class",
11595
+ lexical_declaration: "variable",
11596
+ variable_declaration: "variable",
11597
+ export_statement: "export",
11598
+ import_statement: "import"
11599
+ },
11600
+ python: {
11601
+ function_definition: "function",
11602
+ class_definition: "class",
11603
+ assignment: "variable",
11604
+ import_statement: "import",
11605
+ import_from_statement: "import"
11606
+ }
11607
+ };
11608
+ var METHOD_TYPES = {
11609
+ typescript: ["method_definition", "public_field_definition"],
11610
+ javascript: ["method_definition"],
11611
+ python: ["function_definition"]
11612
+ };
11613
+ var IDENTIFIER_TYPES = /* @__PURE__ */ new Set(["identifier", "property_identifier", "type_identifier"]);
11614
+ function findIdentifier(node) {
11615
+ return node.childForFieldName("name") ?? node.children.find((c) => IDENTIFIER_TYPES.has(c.type)) ?? null;
11616
+ }
11617
+ function getVariableDeclarationName(node) {
11618
+ const declarator = node.children.find((c) => c.type === "variable_declarator");
11619
+ if (!declarator) return null;
11620
+ const id = findIdentifier(declarator);
11621
+ return id?.text ?? null;
11622
+ }
11623
+ function getExportName(node, source) {
11624
+ const decl = node.children.find(
11625
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== "comment"
11626
+ );
11627
+ return decl ? getNodeName(decl, source) : "<anonymous>";
11628
+ }
11629
+ function getAssignmentName(node) {
11630
+ const left = node.childForFieldName("left") ?? node.children[0];
11631
+ return left?.text ?? "<anonymous>";
11632
+ }
11633
+ function getNodeName(node, source) {
11634
+ const id = findIdentifier(node);
11635
+ if (id) return id.text;
11636
+ const isVarDecl = node.type === "lexical_declaration" || node.type === "variable_declaration";
11637
+ if (isVarDecl) return getVariableDeclarationName(node) ?? "<anonymous>";
11638
+ if (node.type === "export_statement") return getExportName(node, source);
11639
+ if (node.type === "assignment") return getAssignmentName(node);
11640
+ return "<anonymous>";
11641
+ }
11642
+ function getSignature(node, source) {
11643
+ const startLine = node.startPosition.row;
11644
+ const lines = source.split("\n");
11645
+ return (lines[startLine] ?? "").trim();
11646
+ }
11647
+ function extractMethods(classNode, language, source, filePath) {
11648
+ const methodTypes = METHOD_TYPES[language] ?? [];
11649
+ const body = classNode.childForFieldName("body") ?? classNode.children.find((c) => c.type === "class_body" || c.type === "block");
11650
+ if (!body) return [];
11651
+ return body.children.filter((child) => methodTypes.includes(child.type)).map((child) => ({
11652
+ name: getNodeName(child, source),
11653
+ kind: "method",
11654
+ file: filePath,
11655
+ line: child.startPosition.row + 1,
11656
+ endLine: child.endPosition.row + 1,
11657
+ signature: getSignature(child, source)
11658
+ }));
11659
+ }
11660
+ function nodeToSymbol(node, kind, source, filePath) {
11661
+ return {
11662
+ name: getNodeName(node, source),
11663
+ kind,
11664
+ file: filePath,
11665
+ line: node.startPosition.row + 1,
11666
+ endLine: node.endPosition.row + 1,
11667
+ signature: getSignature(node, source)
11668
+ };
11669
+ }
11670
+ function processExportStatement(child, topLevelTypes, lang, source, filePath) {
11671
+ const declaration = child.children.find(
11672
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== ";" && c.type !== "comment"
11673
+ );
11674
+ const kind = declaration ? topLevelTypes[declaration.type] : void 0;
11675
+ if (declaration && kind) {
11676
+ const sym = nodeToSymbol(child, kind, source, filePath);
11677
+ sym.name = getNodeName(declaration, source);
11678
+ if (kind === "class") {
11679
+ sym.children = extractMethods(declaration, lang, source, filePath);
11680
+ }
11681
+ return sym;
11682
+ }
11683
+ return nodeToSymbol(child, "export", source, filePath);
11684
+ }
11685
+ function extractSymbols(rootNode, lang, source, filePath) {
11686
+ const symbols = [];
11687
+ const topLevelTypes = TOP_LEVEL_TYPES[lang] ?? {};
11688
+ for (const child of rootNode.children) {
11689
+ if (child.type === "export_statement") {
11690
+ symbols.push(processExportStatement(child, topLevelTypes, lang, source, filePath));
11691
+ continue;
11692
+ }
11693
+ const kind = topLevelTypes[child.type];
11694
+ if (!kind || kind === "import") continue;
11695
+ const sym = nodeToSymbol(child, kind, source, filePath);
11696
+ if (kind === "class") {
11697
+ sym.children = extractMethods(child, lang, source, filePath);
11698
+ }
11699
+ symbols.push(sym);
11700
+ }
11701
+ return symbols;
11702
+ }
11703
+ function buildFailedResult(filePath, lang) {
11704
+ return { file: filePath, language: lang, totalLines: 0, symbols: [], error: "[parse-failed]" };
11705
+ }
11706
+ async function getOutline(filePath) {
11707
+ const lang = detectLanguage(filePath);
11708
+ if (!lang) return buildFailedResult(filePath, "unknown");
11709
+ const result = await parseFile(filePath);
11710
+ if (!result.ok) return buildFailedResult(filePath, lang);
11711
+ const { tree, source } = result.value;
11712
+ const totalLines = source.split("\n").length;
11713
+ const symbols = extractSymbols(tree.rootNode, lang, source, filePath);
11714
+ return { file: filePath, language: lang, totalLines, symbols };
11715
+ }
11716
+ function formatOutline(outline) {
11717
+ if (outline.error) {
11718
+ return `${outline.file} ${outline.error}`;
11719
+ }
11720
+ const lines = [`${outline.file} (${outline.totalLines} lines)`];
11721
+ const last = outline.symbols.length - 1;
11722
+ outline.symbols.forEach((sym, i) => {
11723
+ const prefix = i === last ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
11724
+ lines.push(`${prefix} ${sym.signature} :${sym.line}`);
11725
+ if (sym.children) {
11726
+ const childLast = sym.children.length - 1;
11727
+ sym.children.forEach((child, j) => {
11728
+ const childConnector = i === last ? " " : "\u2502 ";
11729
+ const childPrefix = j === childLast ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
11730
+ lines.push(`${childConnector}${childPrefix} ${child.signature} :${child.line}`);
11731
+ });
11732
+ }
11733
+ });
11734
+ return lines.join("\n");
11735
+ }
11736
+ function buildGlob(directory, fileGlob) {
11737
+ const dir = directory.replaceAll("\\", "/");
11738
+ if (fileGlob) {
11739
+ return `${dir}/**/${fileGlob}`;
11740
+ }
11741
+ const exts = Object.keys(EXTENSION_MAP).map((e) => e.slice(1));
11742
+ return `${dir}/**/*.{${exts.join(",")}}`;
11743
+ }
11744
+ function matchesQuery(name, query) {
11745
+ return name.toLowerCase().includes(query.toLowerCase());
11746
+ }
11747
+ function flattenSymbols(symbols) {
11748
+ const flat = [];
11749
+ for (const sym of symbols) {
11750
+ flat.push(sym);
11751
+ if (sym.children) {
11752
+ flat.push(...sym.children);
11753
+ }
11754
+ }
11755
+ return flat;
11756
+ }
11757
+ async function searchSymbols(query, directory, fileGlob) {
11758
+ const pattern = buildGlob(directory, fileGlob);
11759
+ let files;
11760
+ try {
11761
+ files = await findFiles(pattern, directory);
11762
+ } catch {
11763
+ files = [];
11764
+ }
11765
+ const matches = [];
11766
+ const skipped = [];
11767
+ for (const file of files) {
11768
+ const lang = detectLanguage(file);
11769
+ if (!lang) {
11770
+ skipped.push(file);
11771
+ continue;
11772
+ }
11773
+ const outline = await getOutline(file);
11774
+ if (outline.error) {
11775
+ skipped.push(file);
11776
+ continue;
11777
+ }
11778
+ const allSymbols = flattenSymbols(outline.symbols);
11779
+ for (const sym of allSymbols) {
11780
+ if (matchesQuery(sym.name, query)) {
11781
+ matches.push({
11782
+ symbol: sym,
11783
+ context: sym.signature
11784
+ });
11785
+ }
11786
+ }
11787
+ }
11788
+ return { query, matches, skipped };
11789
+ }
11790
+ function findSymbolInList(symbols, name) {
11791
+ for (const sym of symbols) {
11792
+ if (sym.name === name) return sym;
11793
+ if (sym.children) {
11794
+ const found = findSymbolInList(sym.children, name);
11795
+ if (found) return found;
11796
+ }
11797
+ }
11798
+ return null;
11799
+ }
11800
+ function extractLines(source, startLine, endLine) {
11801
+ const lines = source.split("\n");
11802
+ const start = Math.max(0, startLine - 1);
11803
+ const end = Math.min(lines.length, endLine);
11804
+ return lines.slice(start, end).join("\n");
11805
+ }
11806
+ function buildFallbackResult(filePath, symbolName, content, language) {
11807
+ const totalLines = content ? content.split("\n").length : 0;
11808
+ return {
11809
+ file: filePath,
11810
+ symbolName,
11811
+ startLine: content ? 1 : 0,
11812
+ endLine: totalLines,
11813
+ content,
11814
+ language,
11815
+ fallback: true,
11816
+ warning: "[fallback: raw content]"
11817
+ };
11818
+ }
11819
+ async function readContentSafe(filePath) {
11820
+ const result = await readFileContent(filePath);
11821
+ return result.ok ? result.value : "";
11822
+ }
11823
+ async function unfoldSymbol(filePath, symbolName) {
11824
+ const lang = detectLanguage(filePath);
11825
+ if (!lang) {
11826
+ const content2 = await readContentSafe(filePath);
11827
+ return buildFallbackResult(filePath, symbolName, content2, "unknown");
11828
+ }
11829
+ const outline = await getOutline(filePath);
11830
+ if (outline.error) {
11831
+ const content2 = await readContentSafe(filePath);
11832
+ return buildFallbackResult(filePath, symbolName, content2, lang);
11833
+ }
11834
+ const symbol = findSymbolInList(outline.symbols, symbolName);
11835
+ if (!symbol) {
11836
+ const content2 = await readContentSafe(filePath);
11837
+ return buildFallbackResult(filePath, symbolName, content2, lang);
11838
+ }
11839
+ const parseResult = await parseFile(filePath);
11840
+ if (!parseResult.ok) {
11841
+ const content2 = await readContentSafe(filePath);
11842
+ return {
11843
+ ...buildFallbackResult(
11844
+ filePath,
11845
+ symbolName,
11846
+ extractLines(content2, symbol.line, symbol.endLine),
11847
+ lang
11848
+ ),
11849
+ startLine: symbol.line,
11850
+ endLine: symbol.endLine
11851
+ };
11852
+ }
11853
+ const content = extractLines(parseResult.value.source, symbol.line, symbol.endLine);
11854
+ return {
11855
+ file: filePath,
11856
+ symbolName,
11857
+ startLine: symbol.line,
11858
+ endLine: symbol.endLine,
11859
+ content,
11860
+ language: lang,
11861
+ fallback: false
11862
+ };
11863
+ }
11864
+ async function unfoldRange(filePath, startLine, endLine) {
11865
+ const lang = detectLanguage(filePath) ?? "unknown";
11866
+ const contentResult = await readFileContent(filePath);
11867
+ if (!contentResult.ok) {
11868
+ return {
11869
+ file: filePath,
11870
+ startLine: 0,
11871
+ endLine: 0,
11872
+ content: "",
11873
+ language: lang,
11874
+ fallback: true,
11875
+ warning: "[fallback: raw content]"
11876
+ };
11877
+ }
11878
+ const totalLines = contentResult.value.split("\n").length;
11879
+ const clampedEnd = Math.min(endLine, totalLines);
11880
+ const content = extractLines(contentResult.value, startLine, clampedEnd);
11881
+ return {
11882
+ file: filePath,
11883
+ startLine,
11884
+ endLine: clampedEnd,
11885
+ content,
11886
+ language: lang,
11887
+ fallback: false
11888
+ };
11889
+ }
11890
+ var VERSION = "0.15.0";
10809
11891
 
10810
11892
  export {
10811
11893
  ArchMetricCategorySchema,
@@ -10947,14 +12029,19 @@ export {
10947
12029
  updateSessionIndex,
10948
12030
  loadState,
10949
12031
  saveState,
12032
+ parseFrontmatter,
12033
+ extractIndexEntry,
10950
12034
  clearLearningsCache,
10951
12035
  appendLearning,
10952
12036
  parseDateFromEntry,
10953
12037
  analyzeLearningPatterns,
10954
12038
  loadBudgetedLearnings,
12039
+ loadIndexEntries,
10955
12040
  loadRelevantLearnings,
10956
12041
  archiveLearnings,
10957
12042
  pruneLearnings,
12043
+ promoteSessionLearnings,
12044
+ countLearningEntries,
10958
12045
  clearFailuresCache,
10959
12046
  appendFailure,
10960
12047
  loadFailures,
@@ -10970,6 +12057,11 @@ export {
10970
12057
  appendSessionEntry,
10971
12058
  updateSessionEntryStatus,
10972
12059
  archiveSession,
12060
+ SkillEventSchema,
12061
+ clearEventHashCache,
12062
+ emitEvent,
12063
+ loadEvents,
12064
+ formatEventTimeline,
10973
12065
  executeWorkflow,
10974
12066
  runPipeline,
10975
12067
  runMultiTurnPipeline,
@@ -10986,6 +12078,8 @@ export {
10986
12078
  pathTraversalRules,
10987
12079
  networkRules,
10988
12080
  deserializationRules,
12081
+ agentConfigRules,
12082
+ mcpRules,
10989
12083
  nodeRules,
10990
12084
  expressRules,
10991
12085
  reactRules,
@@ -11025,6 +12119,7 @@ export {
11025
12119
  parseRoadmap,
11026
12120
  serializeRoadmap,
11027
12121
  syncRoadmap,
12122
+ applySyncChanges,
11028
12123
  InteractionTypeSchema,
11029
12124
  QuestionSchema,
11030
12125
  ConfirmationSchema,
@@ -11038,5 +12133,15 @@ export {
11038
12133
  readCheckState,
11039
12134
  spawnBackgroundCheck,
11040
12135
  getUpdateNotification,
12136
+ EXTENSION_MAP,
12137
+ detectLanguage,
12138
+ getParser,
12139
+ parseFile,
12140
+ resetParserCache,
12141
+ getOutline,
12142
+ formatOutline,
12143
+ searchSymbols,
12144
+ unfoldSymbol,
12145
+ unfoldRange,
11041
12146
  VERSION
11042
12147
  };