@harness-engineering/core 0.21.0 → 0.21.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -41,7 +41,7 @@ import {
41
41
  runAll,
42
42
  validateDependencies,
43
43
  violationId
44
- } from "./chunk-BQUWXBGR.mjs";
44
+ } from "./chunk-4W4FRAA6.mjs";
45
45
 
46
46
  // src/index.ts
47
47
  export * from "@harness-engineering/types";
@@ -84,15 +84,15 @@ function validateConfig(data, schema) {
84
84
  let message = "Configuration validation failed";
85
85
  const suggestions = [];
86
86
  if (firstError) {
87
- const path28 = firstError.path.join(".");
88
- const pathDisplay = path28 ? ` at "${path28}"` : "";
87
+ const path31 = firstError.path.join(".");
88
+ const pathDisplay = path31 ? ` at "${path31}"` : "";
89
89
  if (firstError.code === "invalid_type") {
90
90
  const received = firstError.received;
91
91
  const expected = firstError.expected;
92
92
  if (received === "undefined") {
93
93
  code = "MISSING_FIELD";
94
94
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
95
- suggestions.push(`Field "${path28}" is required and must be of type "${expected}"`);
95
+ suggestions.push(`Field "${path31}" is required and must be of type "${expected}"`);
96
96
  } else {
97
97
  code = "INVALID_TYPE";
98
98
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -149,21 +149,11 @@ function validateCommitMessage(message, format = "conventional") {
149
149
  issues: []
150
150
  });
151
151
  }
152
- function validateConventionalCommit(message) {
153
- const lines = message.split("\n");
154
- const headerLine = lines[0];
155
- if (!headerLine) {
156
- const error = createError(
157
- "VALIDATION_FAILED",
158
- "Commit message header cannot be empty",
159
- { message },
160
- ["Provide a commit message with at least a header line"]
161
- );
162
- return Err(error);
163
- }
152
+ function parseConventionalHeader(message, headerLine) {
164
153
  const match = headerLine.match(CONVENTIONAL_PATTERN);
165
- if (!match) {
166
- const error = createError(
154
+ if (match) return Ok(match);
155
+ return Err(
156
+ createError(
167
157
  "VALIDATION_FAILED",
168
158
  "Commit message does not follow conventional format",
169
159
  { message, header: headerLine },
@@ -172,13 +162,10 @@ function validateConventionalCommit(message) {
172
162
  "Valid types: " + VALID_TYPES.join(", "),
173
163
  "Example: feat(core): add new feature"
174
164
  ]
175
- );
176
- return Err(error);
177
- }
178
- const type = match[1];
179
- const scope = match[3];
180
- const breaking = match[4] === "!";
181
- const description = match[5];
165
+ )
166
+ );
167
+ }
168
+ function collectCommitIssues(type, description) {
182
169
  const issues = [];
183
170
  if (!VALID_TYPES.includes(type)) {
184
171
  issues.push(`Invalid commit type "${type}". Valid types: ${VALID_TYPES.join(", ")}`);
@@ -186,34 +173,50 @@ function validateConventionalCommit(message) {
186
173
  if (!description || description.trim() === "") {
187
174
  issues.push("Commit description cannot be empty");
188
175
  }
189
- let hasBreakingChange = breaking;
190
- if (lines.length > 1) {
191
- const body = lines.slice(1).join("\n");
192
- if (body.includes("BREAKING CHANGE:")) {
193
- hasBreakingChange = true;
194
- }
176
+ return issues;
177
+ }
178
+ function hasBreakingChangeInBody(lines) {
179
+ if (lines.length <= 1) return false;
180
+ return lines.slice(1).join("\n").includes("BREAKING CHANGE:");
181
+ }
182
+ function validateConventionalCommit(message) {
183
+ const lines = message.split("\n");
184
+ const headerLine = lines[0];
185
+ if (!headerLine) {
186
+ return Err(
187
+ createError(
188
+ "VALIDATION_FAILED",
189
+ "Commit message header cannot be empty",
190
+ { message },
191
+ ["Provide a commit message with at least a header line"]
192
+ )
193
+ );
195
194
  }
195
+ const matchResult = parseConventionalHeader(message, headerLine);
196
+ if (!matchResult.ok) return matchResult;
197
+ const match = matchResult.value;
198
+ const type = match[1];
199
+ const scope = match[3];
200
+ const breaking = match[4] === "!";
201
+ const description = match[5];
202
+ const issues = collectCommitIssues(type, description);
196
203
  if (issues.length > 0) {
197
- let errorMessage = `Commit message validation failed: ${issues.join("; ")}`;
198
- if (issues.some((issue) => issue.includes("description cannot be empty"))) {
199
- errorMessage = `Commit message validation failed: ${issues.join("; ")}`;
200
- }
201
- const error = createError(
202
- "VALIDATION_FAILED",
203
- errorMessage,
204
- { message, issues, type, scope },
205
- ["Review and fix the validation issues above"]
204
+ return Err(
205
+ createError(
206
+ "VALIDATION_FAILED",
207
+ `Commit message validation failed: ${issues.join("; ")}`,
208
+ { message, issues, type, scope },
209
+ ["Review and fix the validation issues above"]
210
+ )
206
211
  );
207
- return Err(error);
208
212
  }
209
- const result = {
213
+ return Ok({
210
214
  valid: true,
211
215
  type,
212
216
  ...scope && { scope },
213
- breaking: hasBreakingChange,
217
+ breaking: breaking || hasBreakingChangeInBody(lines),
214
218
  issues: []
215
- };
216
- return Ok(result);
219
+ });
217
220
  }
218
221
 
219
222
  // src/context/types.ts
@@ -308,27 +311,27 @@ function extractSections(content) {
308
311
  }
309
312
  return sections.map((section) => buildAgentMapSection(section, lines));
310
313
  }
311
- function isExternalLink(path28) {
312
- return path28.startsWith("http://") || path28.startsWith("https://") || path28.startsWith("#") || path28.startsWith("mailto:");
314
+ function isExternalLink(path31) {
315
+ return path31.startsWith("http://") || path31.startsWith("https://") || path31.startsWith("#") || path31.startsWith("mailto:");
313
316
  }
314
317
  function resolveLinkPath(linkPath, baseDir) {
315
318
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
316
319
  }
317
- async function validateAgentsMap(path28 = "./AGENTS.md") {
318
- const contentResult = await readFileContent(path28);
320
+ async function validateAgentsMap(path31 = "./AGENTS.md") {
321
+ const contentResult = await readFileContent(path31);
319
322
  if (!contentResult.ok) {
320
323
  return Err(
321
324
  createError(
322
325
  "PARSE_ERROR",
323
326
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
324
- { path: path28 },
327
+ { path: path31 },
325
328
  ["Ensure the file exists", "Check file permissions"]
326
329
  )
327
330
  );
328
331
  }
329
332
  const content = contentResult.value;
330
333
  const sections = extractSections(content);
331
- const baseDir = dirname(path28);
334
+ const baseDir = dirname(path31);
332
335
  const sectionTitles = sections.map((s) => s.title);
333
336
  const missingSections = REQUIRED_SECTIONS.filter(
334
337
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -469,8 +472,8 @@ async function checkDocCoverage(domain, options = {}) {
469
472
 
470
473
  // src/context/knowledge-map.ts
471
474
  import { join as join2, basename as basename2 } from "path";
472
- function suggestFix(path28, existingFiles) {
473
- const targetName = basename2(path28).toLowerCase();
475
+ function suggestFix(path31, existingFiles) {
476
+ const targetName = basename2(path31).toLowerCase();
474
477
  const similar = existingFiles.find((file) => {
475
478
  const fileName = basename2(file).toLowerCase();
476
479
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -478,7 +481,7 @@ function suggestFix(path28, existingFiles) {
478
481
  if (similar) {
479
482
  return `Did you mean "${similar}"?`;
480
483
  }
481
- return `Create the file "${path28}" or remove the link`;
484
+ return `Create the file "${path31}" or remove the link`;
482
485
  }
483
486
  async function validateKnowledgeMap(rootDir = process.cwd()) {
484
487
  const agentsPath = join2(rootDir, "AGENTS.md");
@@ -671,6 +674,47 @@ var NODE_TYPE_TO_CATEGORY = {
671
674
  prompt: "systemPrompt",
672
675
  system: "systemPrompt"
673
676
  };
677
+ function makeZeroWeights() {
678
+ return {
679
+ systemPrompt: 0,
680
+ projectManifest: 0,
681
+ taskSpec: 0,
682
+ activeCode: 0,
683
+ interfaces: 0,
684
+ reserve: 0
685
+ };
686
+ }
687
+ function normalizeRatios(ratios) {
688
+ const sum = Object.values(ratios).reduce((s, r) => s + r, 0);
689
+ if (sum === 0) return;
690
+ for (const key of Object.keys(ratios)) {
691
+ ratios[key] = ratios[key] / sum;
692
+ }
693
+ }
694
+ function enforceMinimumRatios(ratios, min) {
695
+ for (const key of Object.keys(ratios)) {
696
+ if (ratios[key] < min) ratios[key] = min;
697
+ }
698
+ }
699
+ function applyGraphDensity(ratios, graphDensity) {
700
+ const weights = makeZeroWeights();
701
+ for (const [nodeType, count] of Object.entries(graphDensity)) {
702
+ const category = NODE_TYPE_TO_CATEGORY[nodeType];
703
+ if (category) weights[category] += count;
704
+ }
705
+ const totalWeight = Object.values(weights).reduce((s, w) => s + w, 0);
706
+ if (totalWeight === 0) return;
707
+ const MIN = 0.01;
708
+ for (const key of Object.keys(ratios)) {
709
+ ratios[key] = weights[key] > 0 ? weights[key] / totalWeight : MIN;
710
+ }
711
+ if (ratios.reserve < DEFAULT_RATIOS.reserve) ratios.reserve = DEFAULT_RATIOS.reserve;
712
+ if (ratios.systemPrompt < DEFAULT_RATIOS.systemPrompt)
713
+ ratios.systemPrompt = DEFAULT_RATIOS.systemPrompt;
714
+ normalizeRatios(ratios);
715
+ enforceMinimumRatios(ratios, MIN);
716
+ normalizeRatios(ratios);
717
+ }
674
718
  function contextBudget(totalTokens, overrides, graphDensity) {
675
719
  const ratios = {
676
720
  systemPrompt: DEFAULT_RATIOS.systemPrompt,
@@ -681,50 +725,7 @@ function contextBudget(totalTokens, overrides, graphDensity) {
681
725
  reserve: DEFAULT_RATIOS.reserve
682
726
  };
683
727
  if (graphDensity) {
684
- const categoryWeights = {
685
- systemPrompt: 0,
686
- projectManifest: 0,
687
- taskSpec: 0,
688
- activeCode: 0,
689
- interfaces: 0,
690
- reserve: 0
691
- };
692
- for (const [nodeType, count] of Object.entries(graphDensity)) {
693
- const category = NODE_TYPE_TO_CATEGORY[nodeType];
694
- if (category) {
695
- categoryWeights[category] += count;
696
- }
697
- }
698
- const totalWeight = Object.values(categoryWeights).reduce((sum, w) => sum + w, 0);
699
- if (totalWeight > 0) {
700
- const MIN_ALLOCATION = 0.01;
701
- for (const key of Object.keys(ratios)) {
702
- if (categoryWeights[key] > 0) {
703
- ratios[key] = categoryWeights[key] / totalWeight;
704
- } else {
705
- ratios[key] = MIN_ALLOCATION;
706
- }
707
- }
708
- if (ratios.reserve < DEFAULT_RATIOS.reserve) {
709
- ratios.reserve = DEFAULT_RATIOS.reserve;
710
- }
711
- if (ratios.systemPrompt < DEFAULT_RATIOS.systemPrompt) {
712
- ratios.systemPrompt = DEFAULT_RATIOS.systemPrompt;
713
- }
714
- const ratioSum = Object.values(ratios).reduce((sum, r) => sum + r, 0);
715
- for (const key of Object.keys(ratios)) {
716
- ratios[key] = ratios[key] / ratioSum;
717
- }
718
- for (const key of Object.keys(ratios)) {
719
- if (ratios[key] < MIN_ALLOCATION) {
720
- ratios[key] = MIN_ALLOCATION;
721
- }
722
- }
723
- const finalSum = Object.values(ratios).reduce((sum, r) => sum + r, 0);
724
- for (const key of Object.keys(ratios)) {
725
- ratios[key] = ratios[key] / finalSum;
726
- }
727
- }
728
+ applyGraphDensity(ratios, graphDensity);
728
729
  }
729
730
  if (overrides) {
730
731
  let overrideSum = 0;
@@ -830,8 +831,8 @@ function createBoundaryValidator(schema, name) {
830
831
  return Ok(result.data);
831
832
  }
832
833
  const suggestions = result.error.issues.map((issue) => {
833
- const path28 = issue.path.join(".");
834
- return path28 ? `${path28}: ${issue.message}` : issue.message;
834
+ const path31 = issue.path.join(".");
835
+ return path31 ? `${path31}: ${issue.message}` : issue.message;
835
836
  });
836
837
  return Err(
837
838
  createError(
@@ -1031,21 +1032,23 @@ function extractBundle(manifest, config) {
1031
1032
  }
1032
1033
 
1033
1034
  // src/constraints/sharing/merge.ts
1035
+ function arraysEqual(a, b) {
1036
+ if (a.length !== b.length) return false;
1037
+ return a.every((val, i) => deepEqual(val, b[i]));
1038
+ }
1039
+ function objectsEqual(a, b) {
1040
+ const keysA = Object.keys(a);
1041
+ const keysB = Object.keys(b);
1042
+ if (keysA.length !== keysB.length) return false;
1043
+ return keysA.every((key) => deepEqual(a[key], b[key]));
1044
+ }
1034
1045
  function deepEqual(a, b) {
1035
1046
  if (a === b) return true;
1036
1047
  if (typeof a !== typeof b) return false;
1037
1048
  if (typeof a !== "object" || a === null || b === null) return false;
1038
- if (Array.isArray(a) && Array.isArray(b)) {
1039
- if (a.length !== b.length) return false;
1040
- return a.every((val, i) => deepEqual(val, b[i]));
1041
- }
1049
+ if (Array.isArray(a) && Array.isArray(b)) return arraysEqual(a, b);
1042
1050
  if (Array.isArray(a) !== Array.isArray(b)) return false;
1043
- const keysA = Object.keys(a);
1044
- const keysB = Object.keys(b);
1045
- if (keysA.length !== keysB.length) return false;
1046
- return keysA.every(
1047
- (key) => deepEqual(a[key], b[key])
1048
- );
1051
+ return objectsEqual(a, b);
1049
1052
  }
1050
1053
  function stringArraysEqual(a, b) {
1051
1054
  if (a.length !== b.length) return false;
@@ -1463,11 +1466,11 @@ function processExportListSpecifiers(exportDecl, exports) {
1463
1466
  var TypeScriptParser = class {
1464
1467
  name = "typescript";
1465
1468
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1466
- async parseFile(path28) {
1467
- const contentResult = await readFileContent(path28);
1469
+ async parseFile(path31) {
1470
+ const contentResult = await readFileContent(path31);
1468
1471
  if (!contentResult.ok) {
1469
1472
  return Err(
1470
- createParseError("NOT_FOUND", `File not found: ${path28}`, { path: path28 }, [
1473
+ createParseError("NOT_FOUND", `File not found: ${path31}`, { path: path31 }, [
1471
1474
  "Check that the file exists",
1472
1475
  "Verify the path is correct"
1473
1476
  ])
@@ -1477,7 +1480,7 @@ var TypeScriptParser = class {
1477
1480
  const ast = parse(contentResult.value, {
1478
1481
  loc: true,
1479
1482
  range: true,
1480
- jsx: path28.endsWith(".tsx"),
1483
+ jsx: path31.endsWith(".tsx"),
1481
1484
  errorOnUnknownASTType: false
1482
1485
  });
1483
1486
  return Ok({
@@ -1488,7 +1491,7 @@ var TypeScriptParser = class {
1488
1491
  } catch (e) {
1489
1492
  const error = e;
1490
1493
  return Err(
1491
- createParseError("SYNTAX_ERROR", `Failed to parse ${path28}: ${error.message}`, { path: path28 }, [
1494
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path31}: ${error.message}`, { path: path31 }, [
1492
1495
  "Check for syntax errors in the file",
1493
1496
  "Ensure valid TypeScript syntax"
1494
1497
  ])
@@ -1673,22 +1676,22 @@ function extractInlineRefs(content) {
1673
1676
  }
1674
1677
  return refs;
1675
1678
  }
1676
- async function parseDocumentationFile(path28) {
1677
- const contentResult = await readFileContent(path28);
1679
+ async function parseDocumentationFile(path31) {
1680
+ const contentResult = await readFileContent(path31);
1678
1681
  if (!contentResult.ok) {
1679
1682
  return Err(
1680
1683
  createEntropyError(
1681
1684
  "PARSE_ERROR",
1682
- `Failed to read documentation file: ${path28}`,
1683
- { file: path28 },
1685
+ `Failed to read documentation file: ${path31}`,
1686
+ { file: path31 },
1684
1687
  ["Check that the file exists"]
1685
1688
  )
1686
1689
  );
1687
1690
  }
1688
1691
  const content = contentResult.value;
1689
- const type = path28.endsWith(".md") ? "markdown" : "text";
1692
+ const type = path31.endsWith(".md") ? "markdown" : "text";
1690
1693
  return Ok({
1691
- path: path28,
1694
+ path: path31,
1692
1695
  type,
1693
1696
  content,
1694
1697
  codeBlocks: extractCodeBlocks(content),
@@ -1698,17 +1701,22 @@ async function parseDocumentationFile(path28) {
1698
1701
  function makeInternalSymbol(name, type, line) {
1699
1702
  return { name, type, line, references: 0, calledBy: [] };
1700
1703
  }
1704
+ function extractFunctionSymbol(node, line) {
1705
+ if (node.id?.name) return [makeInternalSymbol(node.id.name, "function", line)];
1706
+ return [];
1707
+ }
1708
+ function extractVariableSymbols(node, line) {
1709
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
1710
+ }
1711
+ function extractClassSymbol(node, line) {
1712
+ if (node.id?.name) return [makeInternalSymbol(node.id.name, "class", line)];
1713
+ return [];
1714
+ }
1701
1715
  function extractSymbolsFromNode(node) {
1702
1716
  const line = node.loc?.start?.line || 0;
1703
- if (node.type === "FunctionDeclaration" && node.id?.name) {
1704
- return [makeInternalSymbol(node.id.name, "function", line)];
1705
- }
1706
- if (node.type === "VariableDeclaration") {
1707
- return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
1708
- }
1709
- if (node.type === "ClassDeclaration" && node.id?.name) {
1710
- return [makeInternalSymbol(node.id.name, "class", line)];
1711
- }
1717
+ if (node.type === "FunctionDeclaration") return extractFunctionSymbol(node, line);
1718
+ if (node.type === "VariableDeclaration") return extractVariableSymbols(node, line);
1719
+ if (node.type === "ClassDeclaration") return extractClassSymbol(node, line);
1712
1720
  return [];
1713
1721
  }
1714
1722
  function extractInternalSymbols(ast) {
@@ -1717,21 +1725,17 @@ function extractInternalSymbols(ast) {
1717
1725
  const nodes = body.body;
1718
1726
  return nodes.flatMap(extractSymbolsFromNode);
1719
1727
  }
1728
+ function toJSDocComment(comment) {
1729
+ if (comment.type !== "Block" || !comment.value?.startsWith("*")) return null;
1730
+ return { content: comment.value, line: comment.loc?.start?.line || 0 };
1731
+ }
1720
1732
  function extractJSDocComments(ast) {
1721
- const comments = [];
1722
1733
  const body = ast.body;
1723
- if (body?.comments) {
1724
- for (const comment of body.comments) {
1725
- if (comment.type === "Block" && comment.value?.startsWith("*")) {
1726
- const jsDocComment = {
1727
- content: comment.value,
1728
- line: comment.loc?.start?.line || 0
1729
- };
1730
- comments.push(jsDocComment);
1731
- }
1732
- }
1733
- }
1734
- return comments;
1734
+ if (!body?.comments) return [];
1735
+ return body.comments.flatMap((c) => {
1736
+ const doc = toJSDocComment(c);
1737
+ return doc ? [doc] : [];
1738
+ });
1735
1739
  }
1736
1740
  function buildExportMap(files) {
1737
1741
  const byFile = /* @__PURE__ */ new Map();
@@ -1746,41 +1750,42 @@ function buildExportMap(files) {
1746
1750
  }
1747
1751
  return { byFile, byName };
1748
1752
  }
1749
- function extractAllCodeReferences(docs) {
1753
+ var CODE_BLOCK_LANGUAGES = /* @__PURE__ */ new Set(["typescript", "ts", "javascript", "js"]);
1754
+ function refsFromInlineRefs(doc) {
1755
+ return doc.inlineRefs.map((inlineRef) => ({
1756
+ docFile: doc.path,
1757
+ line: inlineRef.line,
1758
+ column: inlineRef.column,
1759
+ reference: inlineRef.reference,
1760
+ context: "inline"
1761
+ }));
1762
+ }
1763
+ function refsFromCodeBlock(docPath, block) {
1764
+ if (!CODE_BLOCK_LANGUAGES.has(block.language)) return [];
1750
1765
  const refs = [];
1751
- for (const doc of docs) {
1752
- for (const inlineRef of doc.inlineRefs) {
1766
+ const importRegex = /import\s+\{([^}]+)\}\s+from/g;
1767
+ let match;
1768
+ while ((match = importRegex.exec(block.content)) !== null) {
1769
+ const group = match[1];
1770
+ if (group === void 0) continue;
1771
+ for (const name of group.split(",").map((n) => n.trim())) {
1753
1772
  refs.push({
1754
- docFile: doc.path,
1755
- line: inlineRef.line,
1756
- column: inlineRef.column,
1757
- reference: inlineRef.reference,
1758
- context: "inline"
1773
+ docFile: docPath,
1774
+ line: block.line,
1775
+ column: 0,
1776
+ reference: name,
1777
+ context: "code-block"
1759
1778
  });
1760
1779
  }
1761
- for (const block of doc.codeBlocks) {
1762
- if (block.language === "typescript" || block.language === "ts" || block.language === "javascript" || block.language === "js") {
1763
- const importRegex = /import\s+\{([^}]+)\}\s+from/g;
1764
- let match;
1765
- while ((match = importRegex.exec(block.content)) !== null) {
1766
- const matchedGroup = match[1];
1767
- if (matchedGroup === void 0) continue;
1768
- const names = matchedGroup.split(",").map((n) => n.trim());
1769
- for (const name of names) {
1770
- refs.push({
1771
- docFile: doc.path,
1772
- line: block.line,
1773
- column: 0,
1774
- reference: name,
1775
- context: "code-block"
1776
- });
1777
- }
1778
- }
1779
- }
1780
- }
1781
1780
  }
1782
1781
  return refs;
1783
1782
  }
1783
+ function refsFromCodeBlocks(doc) {
1784
+ return doc.codeBlocks.flatMap((block) => refsFromCodeBlock(doc.path, block));
1785
+ }
1786
+ function extractAllCodeReferences(docs) {
1787
+ return docs.flatMap((doc) => [...refsFromInlineRefs(doc), ...refsFromCodeBlocks(doc)]);
1788
+ }
1784
1789
  async function buildSnapshot(config) {
1785
1790
  const startTime = Date.now();
1786
1791
  const parser = config.parser || new TypeScriptParser();
@@ -1986,44 +1991,52 @@ async function checkStructureDrift(snapshot, _config) {
1986
1991
  }
1987
1992
  return drifts;
1988
1993
  }
1994
+ function computeDriftSeverity(driftCount) {
1995
+ if (driftCount === 0) return "none";
1996
+ if (driftCount <= 3) return "low";
1997
+ if (driftCount <= 10) return "medium";
1998
+ return "high";
1999
+ }
2000
+ function buildGraphDriftReport(graphDriftData) {
2001
+ const drifts = [];
2002
+ for (const target of graphDriftData.missingTargets) {
2003
+ drifts.push({
2004
+ type: "api-signature",
2005
+ docFile: target,
2006
+ line: 0,
2007
+ reference: target,
2008
+ context: "graph-missing-target",
2009
+ issue: "NOT_FOUND",
2010
+ details: `Graph node "${target}" has no matching code target`,
2011
+ confidence: "high"
2012
+ });
2013
+ }
2014
+ for (const edge of graphDriftData.staleEdges) {
2015
+ drifts.push({
2016
+ type: "api-signature",
2017
+ docFile: edge.docNodeId,
2018
+ line: 0,
2019
+ reference: edge.codeNodeId,
2020
+ context: `graph-stale-edge:${edge.edgeType}`,
2021
+ issue: "NOT_FOUND",
2022
+ details: `Stale edge from doc "${edge.docNodeId}" to code "${edge.codeNodeId}" (${edge.edgeType})`,
2023
+ confidence: "medium"
2024
+ });
2025
+ }
2026
+ return Ok({
2027
+ drifts,
2028
+ stats: {
2029
+ docsScanned: graphDriftData.staleEdges.length,
2030
+ referencesChecked: graphDriftData.staleEdges.length + graphDriftData.missingTargets.length,
2031
+ driftsFound: drifts.length,
2032
+ byType: { api: drifts.length, example: 0, structure: 0 }
2033
+ },
2034
+ severity: computeDriftSeverity(drifts.length)
2035
+ });
2036
+ }
1989
2037
  async function detectDocDrift(snapshot, config, graphDriftData) {
1990
2038
  if (graphDriftData) {
1991
- const drifts2 = [];
1992
- for (const target of graphDriftData.missingTargets) {
1993
- drifts2.push({
1994
- type: "api-signature",
1995
- docFile: target,
1996
- line: 0,
1997
- reference: target,
1998
- context: "graph-missing-target",
1999
- issue: "NOT_FOUND",
2000
- details: `Graph node "${target}" has no matching code target`,
2001
- confidence: "high"
2002
- });
2003
- }
2004
- for (const edge of graphDriftData.staleEdges) {
2005
- drifts2.push({
2006
- type: "api-signature",
2007
- docFile: edge.docNodeId,
2008
- line: 0,
2009
- reference: edge.codeNodeId,
2010
- context: `graph-stale-edge:${edge.edgeType}`,
2011
- issue: "NOT_FOUND",
2012
- details: `Stale edge from doc "${edge.docNodeId}" to code "${edge.codeNodeId}" (${edge.edgeType})`,
2013
- confidence: "medium"
2014
- });
2015
- }
2016
- const severity2 = drifts2.length === 0 ? "none" : drifts2.length <= 3 ? "low" : drifts2.length <= 10 ? "medium" : "high";
2017
- return Ok({
2018
- drifts: drifts2,
2019
- stats: {
2020
- docsScanned: graphDriftData.staleEdges.length,
2021
- referencesChecked: graphDriftData.staleEdges.length + graphDriftData.missingTargets.length,
2022
- driftsFound: drifts2.length,
2023
- byType: { api: drifts2.length, example: 0, structure: 0 }
2024
- },
2025
- severity: severity2
2026
- });
2039
+ return buildGraphDriftReport(graphDriftData);
2027
2040
  }
2028
2041
  const fullConfig = { ...DEFAULT_DRIFT_CONFIG, ...config };
2029
2042
  const drifts = [];
@@ -2072,6 +2085,23 @@ function resolveImportToFile(importSource, fromFile, snapshot) {
2072
2085
  }
2073
2086
  return null;
2074
2087
  }
2088
+ function enqueueResolved(sources, current, snapshot, visited, queue) {
2089
+ for (const item of sources) {
2090
+ if (!item.source) continue;
2091
+ const resolved = resolveImportToFile(item.source, current, snapshot);
2092
+ if (resolved && !visited.has(resolved)) {
2093
+ queue.push(resolved);
2094
+ }
2095
+ }
2096
+ }
2097
+ function processReachabilityNode(current, snapshot, reachability, visited, queue) {
2098
+ reachability.set(current, true);
2099
+ const sourceFile = snapshot.files.find((f) => f.path === current);
2100
+ if (!sourceFile) return;
2101
+ enqueueResolved(sourceFile.imports, current, snapshot, visited, queue);
2102
+ const reExports = sourceFile.exports.filter((e) => e.isReExport);
2103
+ enqueueResolved(reExports, current, snapshot, visited, queue);
2104
+ }
2075
2105
  function buildReachabilityMap(snapshot) {
2076
2106
  const reachability = /* @__PURE__ */ new Map();
2077
2107
  for (const file of snapshot.files) {
@@ -2083,23 +2113,7 @@ function buildReachabilityMap(snapshot) {
2083
2113
  const current = queue.shift();
2084
2114
  if (visited.has(current)) continue;
2085
2115
  visited.add(current);
2086
- reachability.set(current, true);
2087
- const sourceFile = snapshot.files.find((f) => f.path === current);
2088
- if (!sourceFile) continue;
2089
- for (const imp of sourceFile.imports) {
2090
- const resolved = resolveImportToFile(imp.source, current, snapshot);
2091
- if (resolved && !visited.has(resolved)) {
2092
- queue.push(resolved);
2093
- }
2094
- }
2095
- for (const exp of sourceFile.exports) {
2096
- if (exp.isReExport && exp.source) {
2097
- const resolved = resolveImportToFile(exp.source, current, snapshot);
2098
- if (resolved && !visited.has(resolved)) {
2099
- queue.push(resolved);
2100
- }
2101
- }
2102
- }
2116
+ processReachabilityNode(current, snapshot, reachability, visited, queue);
2103
2117
  }
2104
2118
  return reachability;
2105
2119
  }
@@ -2169,21 +2183,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
2169
2183
  }
2170
2184
  return deadExports;
2171
2185
  }
2186
+ function maxLineOfValue(value) {
2187
+ if (Array.isArray(value)) {
2188
+ return value.reduce((m, item) => Math.max(m, findMaxLineInNode(item)), 0);
2189
+ }
2190
+ if (value && typeof value === "object") {
2191
+ return findMaxLineInNode(value);
2192
+ }
2193
+ return 0;
2194
+ }
2195
+ function maxLineOfNodeKeys(node) {
2196
+ let max = 0;
2197
+ for (const key of Object.keys(node)) {
2198
+ max = Math.max(max, maxLineOfValue(node[key]));
2199
+ }
2200
+ return max;
2201
+ }
2172
2202
  function findMaxLineInNode(node) {
2173
2203
  if (!node || typeof node !== "object") return 0;
2174
2204
  const n = node;
2175
- let maxLine = n.loc?.end?.line ?? 0;
2176
- for (const key of Object.keys(node)) {
2177
- const value = node[key];
2178
- if (Array.isArray(value)) {
2179
- for (const item of value) {
2180
- maxLine = Math.max(maxLine, findMaxLineInNode(item));
2181
- }
2182
- } else if (value && typeof value === "object") {
2183
- maxLine = Math.max(maxLine, findMaxLineInNode(value));
2184
- }
2185
- }
2186
- return maxLine;
2205
+ const locLine = n.loc?.end?.line ?? 0;
2206
+ return Math.max(locLine, maxLineOfNodeKeys(node));
2187
2207
  }
2188
2208
  function countLinesFromAST(ast) {
2189
2209
  if (!ast.body || !Array.isArray(ast.body)) return 1;
@@ -2257,54 +2277,59 @@ function findDeadInternals(snapshot, _reachability) {
2257
2277
  }
2258
2278
  return deadInternals;
2259
2279
  }
2260
- async function detectDeadCode(snapshot, graphDeadCodeData) {
2261
- if (graphDeadCodeData) {
2262
- const deadFiles2 = [];
2263
- const deadExports2 = [];
2264
- const fileTypes = /* @__PURE__ */ new Set(["file", "module"]);
2265
- const exportTypes = /* @__PURE__ */ new Set(["function", "class", "method", "interface", "variable"]);
2266
- for (const node of graphDeadCodeData.unreachableNodes) {
2267
- if (fileTypes.has(node.type)) {
2268
- deadFiles2.push({
2269
- path: node.path || node.id,
2270
- reason: "NO_IMPORTERS",
2271
- exportCount: 0,
2272
- lineCount: 0
2273
- });
2274
- } else if (exportTypes.has(node.type)) {
2275
- const exportType = node.type === "method" ? "function" : node.type;
2276
- deadExports2.push({
2277
- file: node.path || node.id,
2278
- name: node.name,
2279
- line: 0,
2280
- type: exportType,
2281
- isDefault: false,
2282
- reason: "NO_IMPORTERS"
2283
- });
2284
- }
2285
- }
2286
- const reachableCount = graphDeadCodeData.reachableNodeIds instanceof Set ? graphDeadCodeData.reachableNodeIds.size : graphDeadCodeData.reachableNodeIds.length;
2287
- const fileNodes = graphDeadCodeData.unreachableNodes.filter((n) => fileTypes.has(n.type));
2288
- const exportNodes = graphDeadCodeData.unreachableNodes.filter((n) => exportTypes.has(n.type));
2289
- const totalFiles = reachableCount + fileNodes.length;
2290
- const totalExports2 = exportNodes.length + (reachableCount > 0 ? reachableCount : 0);
2291
- const report2 = {
2292
- deadExports: deadExports2,
2293
- deadFiles: deadFiles2,
2294
- deadInternals: [],
2295
- unusedImports: [],
2296
- stats: {
2297
- filesAnalyzed: totalFiles,
2298
- entryPointsUsed: [],
2299
- totalExports: totalExports2,
2300
- deadExportCount: deadExports2.length,
2301
- totalFiles,
2302
- deadFileCount: deadFiles2.length,
2303
- estimatedDeadLines: 0
2304
- }
2305
- };
2306
- return Ok(report2);
2280
+ var FILE_TYPES = /* @__PURE__ */ new Set(["file", "module"]);
2281
+ var EXPORT_TYPES = /* @__PURE__ */ new Set(["function", "class", "method", "interface", "variable"]);
2282
+ function classifyUnreachableNode(node, deadFiles, deadExports) {
2283
+ if (FILE_TYPES.has(node.type)) {
2284
+ deadFiles.push({
2285
+ path: node.path || node.id,
2286
+ reason: "NO_IMPORTERS",
2287
+ exportCount: 0,
2288
+ lineCount: 0
2289
+ });
2290
+ } else if (EXPORT_TYPES.has(node.type)) {
2291
+ const exportType = node.type === "method" ? "function" : node.type;
2292
+ deadExports.push({
2293
+ file: node.path || node.id,
2294
+ name: node.name,
2295
+ line: 0,
2296
+ type: exportType,
2297
+ isDefault: false,
2298
+ reason: "NO_IMPORTERS"
2299
+ });
2307
2300
  }
2301
+ }
2302
+ function computeGraphReportStats(data, deadFiles, deadExports) {
2303
+ const reachableCount = data.reachableNodeIds instanceof Set ? data.reachableNodeIds.size : data.reachableNodeIds.length;
2304
+ const fileNodes = data.unreachableNodes.filter((n) => FILE_TYPES.has(n.type));
2305
+ const exportNodes = data.unreachableNodes.filter((n) => EXPORT_TYPES.has(n.type));
2306
+ const totalFiles = reachableCount + fileNodes.length;
2307
+ const totalExports = exportNodes.length + (reachableCount > 0 ? reachableCount : 0);
2308
+ return {
2309
+ filesAnalyzed: totalFiles,
2310
+ entryPointsUsed: [],
2311
+ totalExports,
2312
+ deadExportCount: deadExports.length,
2313
+ totalFiles,
2314
+ deadFileCount: deadFiles.length,
2315
+ estimatedDeadLines: 0
2316
+ };
2317
+ }
2318
+ function buildReportFromGraph(data) {
2319
+ const deadFiles = [];
2320
+ const deadExports = [];
2321
+ for (const node of data.unreachableNodes) {
2322
+ classifyUnreachableNode(node, deadFiles, deadExports);
2323
+ }
2324
+ return {
2325
+ deadExports,
2326
+ deadFiles,
2327
+ deadInternals: [],
2328
+ unusedImports: [],
2329
+ stats: computeGraphReportStats(data, deadFiles, deadExports)
2330
+ };
2331
+ }
2332
+ function buildReportFromSnapshot(snapshot) {
2308
2333
  const reachability = buildReachabilityMap(snapshot);
2309
2334
  const usageMap = buildExportUsageMap(snapshot);
2310
2335
  const deadExports = findDeadExports(snapshot, usageMap, reachability);
@@ -2316,7 +2341,7 @@ async function detectDeadCode(snapshot, graphDeadCodeData) {
2316
2341
  0
2317
2342
  );
2318
2343
  const estimatedDeadLines = deadFiles.reduce((acc, file) => acc + file.lineCount, 0);
2319
- const report = {
2344
+ return {
2320
2345
  deadExports,
2321
2346
  deadFiles,
2322
2347
  deadInternals,
@@ -2331,6 +2356,9 @@ async function detectDeadCode(snapshot, graphDeadCodeData) {
2331
2356
  estimatedDeadLines
2332
2357
  }
2333
2358
  };
2359
+ }
2360
+ async function detectDeadCode(snapshot, graphDeadCodeData) {
2361
+ const report = graphDeadCodeData ? buildReportFromGraph(graphDeadCodeData) : buildReportFromSnapshot(snapshot);
2334
2362
  return Ok(report);
2335
2363
  }
2336
2364
 
@@ -2614,48 +2642,52 @@ async function detectSizeBudgetViolations(rootDir, config) {
2614
2642
  }
2615
2643
 
2616
2644
  // src/entropy/fixers/suggestions.ts
2645
+ function deadFileSuggestion(file) {
2646
+ return {
2647
+ type: "delete",
2648
+ priority: "high",
2649
+ source: "dead-code",
2650
+ relatedIssues: [`dead-file:${file.path}`],
2651
+ title: `Remove dead file: ${file.path.split("/").pop()}`,
2652
+ description: `This file is not imported by any other file and can be safely removed.`,
2653
+ files: [file.path],
2654
+ steps: [`Delete ${file.path}`, "Run tests to verify no regressions"],
2655
+ whyManual: "File deletion requires verification that no dynamic imports exist"
2656
+ };
2657
+ }
2658
+ function deadExportSuggestion(exp) {
2659
+ return {
2660
+ type: "refactor",
2661
+ priority: "medium",
2662
+ source: "dead-code",
2663
+ relatedIssues: [`dead-export:${exp.file}:${exp.name}`],
2664
+ title: `Remove unused export: ${exp.name}`,
2665
+ description: `The export "${exp.name}" is not used anywhere. Consider removing it.`,
2666
+ files: [exp.file],
2667
+ steps: [`Remove export "${exp.name}" from ${exp.file}`, "Run tests to verify no regressions"],
2668
+ whyManual: "Export removal may affect external consumers not in scope"
2669
+ };
2670
+ }
2671
+ function unusedImportSuggestion(imp) {
2672
+ const plural = imp.specifiers.length > 1;
2673
+ return {
2674
+ type: "delete",
2675
+ priority: "medium",
2676
+ source: "dead-code",
2677
+ relatedIssues: [`unused-import:${imp.file}:${imp.specifiers.join(",")}`],
2678
+ title: `Remove unused import${plural ? "s" : ""}: ${imp.specifiers.join(", ")}`,
2679
+ description: `The import${plural ? "s" : ""} from "${imp.source}" ${plural ? "are" : "is"} not used.`,
2680
+ files: [imp.file],
2681
+ steps: imp.isFullyUnused ? [`Remove entire import line from ${imp.file}`] : [`Remove unused specifiers (${imp.specifiers.join(", ")}) from import statement`],
2682
+ whyManual: "Import removal can be auto-fixed"
2683
+ };
2684
+ }
2617
2685
  function generateDeadCodeSuggestions(report) {
2618
- const suggestions = [];
2619
- for (const file of report.deadFiles) {
2620
- suggestions.push({
2621
- type: "delete",
2622
- priority: "high",
2623
- source: "dead-code",
2624
- relatedIssues: [`dead-file:${file.path}`],
2625
- title: `Remove dead file: ${file.path.split("/").pop()}`,
2626
- description: `This file is not imported by any other file and can be safely removed.`,
2627
- files: [file.path],
2628
- steps: [`Delete ${file.path}`, "Run tests to verify no regressions"],
2629
- whyManual: "File deletion requires verification that no dynamic imports exist"
2630
- });
2631
- }
2632
- for (const exp of report.deadExports) {
2633
- suggestions.push({
2634
- type: "refactor",
2635
- priority: "medium",
2636
- source: "dead-code",
2637
- relatedIssues: [`dead-export:${exp.file}:${exp.name}`],
2638
- title: `Remove unused export: ${exp.name}`,
2639
- description: `The export "${exp.name}" is not used anywhere. Consider removing it.`,
2640
- files: [exp.file],
2641
- steps: [`Remove export "${exp.name}" from ${exp.file}`, "Run tests to verify no regressions"],
2642
- whyManual: "Export removal may affect external consumers not in scope"
2643
- });
2644
- }
2645
- for (const imp of report.unusedImports) {
2646
- suggestions.push({
2647
- type: "delete",
2648
- priority: "medium",
2649
- source: "dead-code",
2650
- relatedIssues: [`unused-import:${imp.file}:${imp.specifiers.join(",")}`],
2651
- title: `Remove unused import${imp.specifiers.length > 1 ? "s" : ""}: ${imp.specifiers.join(", ")}`,
2652
- description: `The import${imp.specifiers.length > 1 ? "s" : ""} from "${imp.source}" ${imp.specifiers.length > 1 ? "are" : "is"} not used.`,
2653
- files: [imp.file],
2654
- steps: imp.isFullyUnused ? [`Remove entire import line from ${imp.file}`] : [`Remove unused specifiers (${imp.specifiers.join(", ")}) from import statement`],
2655
- whyManual: "Import removal can be auto-fixed"
2656
- });
2657
- }
2658
- return suggestions;
2686
+ return [
2687
+ ...report.deadFiles.map(deadFileSuggestion),
2688
+ ...report.deadExports.map(deadExportSuggestion),
2689
+ ...report.unusedImports.map(unusedImportSuggestion)
2690
+ ];
2659
2691
  }
2660
2692
  function generateDriftSuggestions(report) {
2661
2693
  const suggestions = [];
@@ -3098,43 +3130,55 @@ async function createBackup(filePath, backupDir) {
3098
3130
  );
3099
3131
  }
3100
3132
  }
3133
+ async function applyDeleteFile(fix, config) {
3134
+ if (config.createBackup && config.backupDir) {
3135
+ const backupResult = await createBackup(fix.file, config.backupDir);
3136
+ if (!backupResult.ok) return Err({ fix, error: backupResult.error.message });
3137
+ }
3138
+ await unlink2(fix.file);
3139
+ return Ok(void 0);
3140
+ }
3141
+ async function applyDeleteLines(fix) {
3142
+ if (fix.line !== void 0) {
3143
+ const content = await readFile3(fix.file, "utf-8");
3144
+ const lines = content.split("\n");
3145
+ lines.splice(fix.line - 1, 1);
3146
+ await writeFile3(fix.file, lines.join("\n"));
3147
+ }
3148
+ }
3149
+ async function applyReplace(fix) {
3150
+ if (fix.oldContent && fix.newContent !== void 0) {
3151
+ const content = await readFile3(fix.file, "utf-8");
3152
+ await writeFile3(fix.file, content.replace(fix.oldContent, fix.newContent));
3153
+ }
3154
+ }
3155
+ async function applyInsert(fix) {
3156
+ if (fix.line !== void 0 && fix.newContent) {
3157
+ const content = await readFile3(fix.file, "utf-8");
3158
+ const lines = content.split("\n");
3159
+ lines.splice(fix.line - 1, 0, fix.newContent);
3160
+ await writeFile3(fix.file, lines.join("\n"));
3161
+ }
3162
+ }
3101
3163
  async function applySingleFix(fix, config) {
3102
3164
  if (config.dryRun) {
3103
3165
  return Ok(fix);
3104
3166
  }
3105
3167
  try {
3106
3168
  switch (fix.action) {
3107
- case "delete-file":
3108
- if (config.createBackup && config.backupDir) {
3109
- const backupResult = await createBackup(fix.file, config.backupDir);
3110
- if (!backupResult.ok) {
3111
- return Err({ fix, error: backupResult.error.message });
3112
- }
3113
- }
3114
- await unlink2(fix.file);
3169
+ case "delete-file": {
3170
+ const result = await applyDeleteFile(fix, config);
3171
+ if (!result.ok) return result;
3115
3172
  break;
3173
+ }
3116
3174
  case "delete-lines":
3117
- if (fix.line !== void 0) {
3118
- const content = await readFile3(fix.file, "utf-8");
3119
- const lines = content.split("\n");
3120
- lines.splice(fix.line - 1, 1);
3121
- await writeFile3(fix.file, lines.join("\n"));
3122
- }
3175
+ await applyDeleteLines(fix);
3123
3176
  break;
3124
3177
  case "replace":
3125
- if (fix.oldContent && fix.newContent !== void 0) {
3126
- const content = await readFile3(fix.file, "utf-8");
3127
- const newContent = content.replace(fix.oldContent, fix.newContent);
3128
- await writeFile3(fix.file, newContent);
3129
- }
3178
+ await applyReplace(fix);
3130
3179
  break;
3131
3180
  case "insert":
3132
- if (fix.line !== void 0 && fix.newContent) {
3133
- const content = await readFile3(fix.file, "utf-8");
3134
- const lines = content.split("\n");
3135
- lines.splice(fix.line - 1, 0, fix.newContent);
3136
- await writeFile3(fix.file, lines.join("\n"));
3137
- }
3181
+ await applyInsert(fix);
3138
3182
  break;
3139
3183
  }
3140
3184
  return Ok(fix);
@@ -3307,6 +3351,21 @@ function applyHotspotDowngrade(finding, hotspot) {
3307
3351
  }
3308
3352
  return finding;
3309
3353
  }
3354
+ function mergeGroup(group) {
3355
+ if (group.length === 1) return [group[0]];
3356
+ const deadCode = group.find((f) => f.concern === "dead-code");
3357
+ const arch = group.find((f) => f.concern === "architecture");
3358
+ if (deadCode && arch) {
3359
+ return [
3360
+ {
3361
+ ...deadCode,
3362
+ description: `${deadCode.description} (also violates architecture: ${arch.type})`,
3363
+ suggestion: deadCode.fixAction ? `${deadCode.fixAction} (resolves both dead code and architecture violation)` : deadCode.suggestion
3364
+ }
3365
+ ];
3366
+ }
3367
+ return group;
3368
+ }
3310
3369
  function deduplicateCleanupFindings(findings) {
3311
3370
  const byFileAndLine = /* @__PURE__ */ new Map();
3312
3371
  for (const f of findings) {
@@ -3317,21 +3376,7 @@ function deduplicateCleanupFindings(findings) {
3317
3376
  }
3318
3377
  const result = [];
3319
3378
  for (const group of byFileAndLine.values()) {
3320
- if (group.length === 1) {
3321
- result.push(group[0]);
3322
- continue;
3323
- }
3324
- const deadCode = group.find((f) => f.concern === "dead-code");
3325
- const arch = group.find((f) => f.concern === "architecture");
3326
- if (deadCode && arch) {
3327
- result.push({
3328
- ...deadCode,
3329
- description: `${deadCode.description} (also violates architecture: ${arch.type})`,
3330
- suggestion: deadCode.fixAction ? `${deadCode.fixAction} (resolves both dead code and architecture violation)` : deadCode.suggestion
3331
- });
3332
- } else {
3333
- result.push(...group);
3334
- }
3379
+ result.push(...mergeGroup(group));
3335
3380
  }
3336
3381
  return result;
3337
3382
  }
@@ -3704,6 +3749,32 @@ var SKIP_DIRS = /* @__PURE__ */ new Set(["node_modules", "dist", ".git"]);
3704
3749
  var SOURCE_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx"]);
3705
3750
  var FUNCTION_DECL_RE = /(?:export\s+)?(?:async\s+)?function\s+(\w+)/;
3706
3751
  var CONST_DECL_RE = /(?:export\s+)?(?:const|let)\s+(\w+)\s*=/;
3752
+ function mergeGraphInferred(highFanInFunctions, seen) {
3753
+ let added = 0;
3754
+ for (const item of highFanInFunctions) {
3755
+ const key = `${item.file}::${item.function}`;
3756
+ if (!seen.has(key)) {
3757
+ seen.set(key, {
3758
+ file: item.file,
3759
+ function: item.function,
3760
+ source: "graph-inferred",
3761
+ fanIn: item.fanIn
3762
+ });
3763
+ added++;
3764
+ }
3765
+ }
3766
+ return added;
3767
+ }
3768
+ function isCommentOrBlank(line) {
3769
+ return line === "" || line === "*/" || line === "*" || line.startsWith("*") || line.startsWith("//");
3770
+ }
3771
+ function matchDeclarationName(line) {
3772
+ const funcMatch = line.match(FUNCTION_DECL_RE);
3773
+ if (funcMatch?.[1]) return funcMatch[1];
3774
+ const constMatch = line.match(CONST_DECL_RE);
3775
+ if (constMatch?.[1]) return constMatch[1];
3776
+ return null;
3777
+ }
3707
3778
  var CriticalPathResolver = class {
3708
3779
  projectRoot;
3709
3780
  constructor(projectRoot) {
@@ -3716,27 +3787,12 @@ var CriticalPathResolver = class {
3716
3787
  const key = `${entry.file}::${entry.function}`;
3717
3788
  seen.set(key, entry);
3718
3789
  }
3719
- let graphInferred = 0;
3720
- if (graphData) {
3721
- for (const item of graphData.highFanInFunctions) {
3722
- const key = `${item.file}::${item.function}`;
3723
- if (!seen.has(key)) {
3724
- seen.set(key, {
3725
- file: item.file,
3726
- function: item.function,
3727
- source: "graph-inferred",
3728
- fanIn: item.fanIn
3729
- });
3730
- graphInferred++;
3731
- }
3732
- }
3733
- }
3790
+ const graphInferred = graphData ? mergeGraphInferred(graphData.highFanInFunctions, seen) : 0;
3734
3791
  const entries = Array.from(seen.values());
3735
- const annotatedCount = annotated.length;
3736
3792
  return {
3737
3793
  entries,
3738
3794
  stats: {
3739
- annotated: annotatedCount,
3795
+ annotated: annotated.length,
3740
3796
  graphInferred,
3741
3797
  total: entries.length
3742
3798
  }
@@ -3763,6 +3819,14 @@ var CriticalPathResolver = class {
3763
3819
  }
3764
3820
  }
3765
3821
  }
3822
+ resolveFunctionName(lines, fromIndex) {
3823
+ for (let j = fromIndex; j < lines.length; j++) {
3824
+ const nextLine = lines[j].trim();
3825
+ if (isCommentOrBlank(nextLine)) continue;
3826
+ return matchDeclarationName(nextLine);
3827
+ }
3828
+ return null;
3829
+ }
3766
3830
  scanFile(filePath, entries) {
3767
3831
  let content;
3768
3832
  try {
@@ -3773,30 +3837,10 @@ var CriticalPathResolver = class {
3773
3837
  const lines = content.split("\n");
3774
3838
  const relativePath = path.relative(this.projectRoot, filePath).replace(/\\/g, "/");
3775
3839
  for (let i = 0; i < lines.length; i++) {
3776
- const line = lines[i];
3777
- if (!line.includes("@perf-critical")) continue;
3778
- for (let j = i + 1; j < lines.length; j++) {
3779
- const nextLine = lines[j].trim();
3780
- if (nextLine === "" || nextLine === "*/" || nextLine === "*") continue;
3781
- if (nextLine.startsWith("*") || nextLine.startsWith("//")) continue;
3782
- const funcMatch = nextLine.match(FUNCTION_DECL_RE);
3783
- if (funcMatch && funcMatch[1]) {
3784
- entries.push({
3785
- file: relativePath,
3786
- function: funcMatch[1],
3787
- source: "annotation"
3788
- });
3789
- } else {
3790
- const constMatch = nextLine.match(CONST_DECL_RE);
3791
- if (constMatch && constMatch[1]) {
3792
- entries.push({
3793
- file: relativePath,
3794
- function: constMatch[1],
3795
- source: "annotation"
3796
- });
3797
- }
3798
- }
3799
- break;
3840
+ if (!lines[i].includes("@perf-critical")) continue;
3841
+ const fnName = this.resolveFunctionName(lines, i + 1);
3842
+ if (fnName) {
3843
+ entries.push({ file: relativePath, function: fnName, source: "annotation" });
3800
3844
  }
3801
3845
  }
3802
3846
  }
@@ -3951,14 +3995,19 @@ function detectFileStatus(part) {
3951
3995
  if (part.includes("rename from")) return "renamed";
3952
3996
  return "modified";
3953
3997
  }
3954
- function parseDiffPart(part) {
3998
+ function parseDiffHeader(part) {
3955
3999
  if (!part.trim()) return null;
3956
4000
  const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
3957
4001
  if (!headerMatch || !headerMatch[2]) return null;
3958
- const additionRegex = /^\+(?!\+\+)/gm;
4002
+ return headerMatch[2];
4003
+ }
4004
+ function parseDiffPart(part) {
4005
+ const path31 = parseDiffHeader(part);
4006
+ if (!path31) return null;
4007
+ const additionRegex = /^\+(?!\+\+)/gm;
3959
4008
  const deletionRegex = /^-(?!--)/gm;
3960
4009
  return {
3961
- path: headerMatch[2],
4010
+ path: path31,
3962
4011
  status: detectFileStatus(part),
3963
4012
  additions: (part.match(additionRegex) || []).length,
3964
4013
  deletions: (part.match(deletionRegex) || []).length
@@ -3980,100 +4029,136 @@ function parseDiff(diff2) {
3980
4029
  });
3981
4030
  }
3982
4031
  }
3983
- async function analyzeDiff(changes, options, graphImpactData) {
3984
- if (!options?.enabled) {
3985
- return Ok([]);
4032
+ function checkForbiddenPatterns(diff2, forbiddenPatterns, nextId) {
4033
+ const items = [];
4034
+ if (!forbiddenPatterns) return items;
4035
+ for (const forbidden of forbiddenPatterns) {
4036
+ const pattern = typeof forbidden.pattern === "string" ? new RegExp(forbidden.pattern, "g") : forbidden.pattern;
4037
+ if (!pattern.test(diff2)) continue;
4038
+ items.push({
4039
+ id: nextId(),
4040
+ category: "diff",
4041
+ check: `Forbidden pattern: ${forbidden.pattern}`,
4042
+ passed: false,
4043
+ severity: forbidden.severity,
4044
+ details: forbidden.message,
4045
+ suggestion: `Remove occurrences of ${forbidden.pattern}`
4046
+ });
3986
4047
  }
4048
+ return items;
4049
+ }
4050
+ function checkMaxChangedFiles(files, maxChangedFiles, nextId) {
4051
+ if (!maxChangedFiles || files.length <= maxChangedFiles) return [];
4052
+ return [
4053
+ {
4054
+ id: nextId(),
4055
+ category: "diff",
4056
+ check: `PR size: ${files.length} files changed`,
4057
+ passed: false,
4058
+ severity: "warning",
4059
+ details: `This PR changes ${files.length} files, which exceeds the recommended maximum of ${maxChangedFiles}`,
4060
+ suggestion: "Consider breaking this into smaller PRs"
4061
+ }
4062
+ ];
4063
+ }
4064
+ function checkFileSizes(files, maxFileSize, nextId) {
3987
4065
  const items = [];
3988
- let itemId = 0;
3989
- if (options.forbiddenPatterns) {
3990
- for (const forbidden of options.forbiddenPatterns) {
3991
- const pattern = typeof forbidden.pattern === "string" ? new RegExp(forbidden.pattern, "g") : forbidden.pattern;
3992
- if (pattern.test(changes.diff)) {
3993
- items.push({
3994
- id: `diff-${++itemId}`,
3995
- category: "diff",
3996
- check: `Forbidden pattern: ${forbidden.pattern}`,
3997
- passed: false,
3998
- severity: forbidden.severity,
3999
- details: forbidden.message,
4000
- suggestion: `Remove occurrences of ${forbidden.pattern}`
4001
- });
4002
- }
4066
+ if (!maxFileSize) return items;
4067
+ for (const file of files) {
4068
+ const totalLines = file.additions + file.deletions;
4069
+ if (totalLines <= maxFileSize) continue;
4070
+ items.push({
4071
+ id: nextId(),
4072
+ category: "diff",
4073
+ check: `File size: ${file.path}`,
4074
+ passed: false,
4075
+ severity: "warning",
4076
+ details: `File has ${totalLines} lines changed, exceeding limit of ${maxFileSize}`,
4077
+ file: file.path,
4078
+ suggestion: "Consider splitting this file into smaller modules"
4079
+ });
4080
+ }
4081
+ return items;
4082
+ }
4083
+ function checkTestCoverageGraph(files, graphImpactData) {
4084
+ const items = [];
4085
+ for (const file of files) {
4086
+ if (file.status !== "added" || !file.path.endsWith(".ts") || file.path.includes(".test.")) {
4087
+ continue;
4003
4088
  }
4089
+ const hasGraphTest = graphImpactData.affectedTests.some((t) => t.coversFile === file.path);
4090
+ if (hasGraphTest) continue;
4091
+ items.push({
4092
+ id: `test-coverage-${file.path}`,
4093
+ category: "diff",
4094
+ check: "Test coverage (graph)",
4095
+ passed: false,
4096
+ severity: "warning",
4097
+ details: `New file ${file.path} has no test file linked in the graph`,
4098
+ file: file.path
4099
+ });
4004
4100
  }
4005
- if (options.maxChangedFiles && changes.files.length > options.maxChangedFiles) {
4101
+ return items;
4102
+ }
4103
+ function checkTestCoverageFilename(files, nextId) {
4104
+ const items = [];
4105
+ const addedSourceFiles = files.filter(
4106
+ (f) => f.status === "added" && f.path.endsWith(".ts") && !f.path.includes(".test.")
4107
+ );
4108
+ const testFiles = files.filter((f) => f.path.includes(".test."));
4109
+ for (const sourceFile of addedSourceFiles) {
4110
+ const expectedTestPath = sourceFile.path.replace(".ts", ".test.ts");
4111
+ const hasTest = testFiles.some(
4112
+ (t) => t.path.includes(expectedTestPath) || t.path.includes(sourceFile.path.replace(".ts", ""))
4113
+ );
4114
+ if (hasTest) continue;
4006
4115
  items.push({
4007
- id: `diff-${++itemId}`,
4116
+ id: nextId(),
4008
4117
  category: "diff",
4009
- check: `PR size: ${changes.files.length} files changed`,
4118
+ check: `Test coverage: ${sourceFile.path}`,
4010
4119
  passed: false,
4011
4120
  severity: "warning",
4012
- details: `This PR changes ${changes.files.length} files, which exceeds the recommended maximum of ${options.maxChangedFiles}`,
4013
- suggestion: "Consider breaking this into smaller PRs"
4121
+ details: "New source file added without corresponding test file",
4122
+ file: sourceFile.path,
4123
+ suggestion: `Add tests in ${expectedTestPath}`
4014
4124
  });
4015
4125
  }
4016
- if (options.maxFileSize) {
4017
- for (const file of changes.files) {
4018
- const totalLines = file.additions + file.deletions;
4019
- if (totalLines > options.maxFileSize) {
4020
- items.push({
4021
- id: `diff-${++itemId}`,
4022
- category: "diff",
4023
- check: `File size: ${file.path}`,
4024
- passed: false,
4025
- severity: "warning",
4026
- details: `File has ${totalLines} lines changed, exceeding limit of ${options.maxFileSize}`,
4027
- file: file.path,
4028
- suggestion: "Consider splitting this file into smaller modules"
4029
- });
4030
- }
4126
+ return items;
4127
+ }
4128
+ function checkDocCoverage2(files, graphImpactData) {
4129
+ const items = [];
4130
+ for (const file of files) {
4131
+ if (file.status !== "modified" || !file.path.endsWith(".ts") || file.path.includes(".test.")) {
4132
+ continue;
4031
4133
  }
4134
+ const hasDoc = graphImpactData.affectedDocs.some((d) => d.documentsFile === file.path);
4135
+ if (hasDoc) continue;
4136
+ items.push({
4137
+ id: `doc-coverage-${file.path}`,
4138
+ category: "diff",
4139
+ check: "Documentation coverage (graph)",
4140
+ passed: true,
4141
+ severity: "info",
4142
+ details: `Modified file ${file.path} has no documentation linked in the graph`,
4143
+ file: file.path
4144
+ });
4032
4145
  }
4146
+ return items;
4147
+ }
4148
+ async function analyzeDiff(changes, options, graphImpactData) {
4149
+ if (!options?.enabled) {
4150
+ return Ok([]);
4151
+ }
4152
+ let itemId = 0;
4153
+ const nextId = () => `diff-${++itemId}`;
4154
+ const items = [
4155
+ ...checkForbiddenPatterns(changes.diff, options.forbiddenPatterns, nextId),
4156
+ ...checkMaxChangedFiles(changes.files, options.maxChangedFiles, nextId),
4157
+ ...checkFileSizes(changes.files, options.maxFileSize, nextId)
4158
+ ];
4033
4159
  if (options.checkTestCoverage) {
4034
- if (graphImpactData) {
4035
- for (const file of changes.files) {
4036
- if (file.status === "added" && file.path.endsWith(".ts") && !file.path.includes(".test.")) {
4037
- const hasGraphTest = graphImpactData.affectedTests.some(
4038
- (t) => t.coversFile === file.path
4039
- );
4040
- if (!hasGraphTest) {
4041
- items.push({
4042
- id: `test-coverage-${file.path}`,
4043
- category: "diff",
4044
- check: "Test coverage (graph)",
4045
- passed: false,
4046
- severity: "warning",
4047
- details: `New file ${file.path} has no test file linked in the graph`,
4048
- file: file.path
4049
- });
4050
- }
4051
- }
4052
- }
4053
- } else {
4054
- const addedSourceFiles = changes.files.filter(
4055
- (f) => f.status === "added" && f.path.endsWith(".ts") && !f.path.includes(".test.")
4056
- );
4057
- const testFiles = changes.files.filter((f) => f.path.includes(".test."));
4058
- for (const sourceFile of addedSourceFiles) {
4059
- const expectedTestPath = sourceFile.path.replace(".ts", ".test.ts");
4060
- const hasTest = testFiles.some(
4061
- (t) => t.path.includes(expectedTestPath) || t.path.includes(sourceFile.path.replace(".ts", ""))
4062
- );
4063
- if (!hasTest) {
4064
- items.push({
4065
- id: `diff-${++itemId}`,
4066
- category: "diff",
4067
- check: `Test coverage: ${sourceFile.path}`,
4068
- passed: false,
4069
- severity: "warning",
4070
- details: "New source file added without corresponding test file",
4071
- file: sourceFile.path,
4072
- suggestion: `Add tests in ${expectedTestPath}`
4073
- });
4074
- }
4075
- }
4076
- }
4160
+ const coverageItems = graphImpactData ? checkTestCoverageGraph(changes.files, graphImpactData) : checkTestCoverageFilename(changes.files, nextId);
4161
+ items.push(...coverageItems);
4077
4162
  }
4078
4163
  if (graphImpactData && graphImpactData.impactScope > 20) {
4079
4164
  items.push({
@@ -4086,22 +4171,7 @@ async function analyzeDiff(changes, options, graphImpactData) {
4086
4171
  });
4087
4172
  }
4088
4173
  if (graphImpactData) {
4089
- for (const file of changes.files) {
4090
- if (file.status === "modified" && file.path.endsWith(".ts") && !file.path.includes(".test.")) {
4091
- const hasDoc = graphImpactData.affectedDocs.some((d) => d.documentsFile === file.path);
4092
- if (!hasDoc) {
4093
- items.push({
4094
- id: `doc-coverage-${file.path}`,
4095
- category: "diff",
4096
- check: "Documentation coverage (graph)",
4097
- passed: true,
4098
- severity: "info",
4099
- details: `Modified file ${file.path} has no documentation linked in the graph`,
4100
- file: file.path
4101
- });
4102
- }
4103
- }
4104
- }
4174
+ items.push(...checkDocCoverage2(changes.files, graphImpactData));
4105
4175
  }
4106
4176
  return Ok(items);
4107
4177
  }
@@ -4634,10 +4704,26 @@ function hasMatchingViolation(rule, violationsByCategory) {
4634
4704
  }
4635
4705
 
4636
4706
  // src/architecture/detect-stale.ts
4707
+ function evaluateStaleNode(node, now, cutoff) {
4708
+ const lastViolatedAt = node.lastViolatedAt ?? null;
4709
+ const createdAt = node.createdAt;
4710
+ const comparisonTimestamp = lastViolatedAt ?? createdAt;
4711
+ if (!comparisonTimestamp) return null;
4712
+ const timestampMs = new Date(comparisonTimestamp).getTime();
4713
+ if (timestampMs >= cutoff) return null;
4714
+ const daysSince = Math.floor((now - timestampMs) / (24 * 60 * 60 * 1e3));
4715
+ return {
4716
+ id: node.id,
4717
+ category: node.category,
4718
+ description: node.name ?? "",
4719
+ scope: node.scope ?? "project",
4720
+ lastViolatedAt,
4721
+ daysSinceLastViolation: daysSince
4722
+ };
4723
+ }
4637
4724
  function detectStaleConstraints(store, windowDays = 30, category) {
4638
4725
  const now = Date.now();
4639
- const windowMs = windowDays * 24 * 60 * 60 * 1e3;
4640
- const cutoff = now - windowMs;
4726
+ const cutoff = now - windowDays * 24 * 60 * 60 * 1e3;
4641
4727
  let constraints = store.findNodes({ type: "constraint" });
4642
4728
  if (category) {
4643
4729
  constraints = constraints.filter((n) => n.category === category);
@@ -4645,28 +4731,23 @@ function detectStaleConstraints(store, windowDays = 30, category) {
4645
4731
  const totalConstraints = constraints.length;
4646
4732
  const staleConstraints = [];
4647
4733
  for (const node of constraints) {
4648
- const lastViolatedAt = node.lastViolatedAt ?? null;
4649
- const createdAt = node.createdAt;
4650
- const comparisonTimestamp = lastViolatedAt ?? createdAt;
4651
- if (!comparisonTimestamp) continue;
4652
- const timestampMs = new Date(comparisonTimestamp).getTime();
4653
- if (timestampMs < cutoff) {
4654
- const daysSince = Math.floor((now - timestampMs) / (24 * 60 * 60 * 1e3));
4655
- staleConstraints.push({
4656
- id: node.id,
4657
- category: node.category,
4658
- description: node.name ?? "",
4659
- scope: node.scope ?? "project",
4660
- lastViolatedAt,
4661
- daysSinceLastViolation: daysSince
4662
- });
4663
- }
4734
+ const entry = evaluateStaleNode(node, now, cutoff);
4735
+ if (entry) staleConstraints.push(entry);
4664
4736
  }
4665
4737
  staleConstraints.sort((a, b) => b.daysSinceLastViolation - a.daysSinceLastViolation);
4666
4738
  return { staleConstraints, totalConstraints, windowDays };
4667
4739
  }
4668
4740
 
4669
4741
  // src/architecture/config.ts
4742
+ function mergeThresholdCategory(projectValue2, moduleValue) {
4743
+ if (projectValue2 !== void 0 && typeof projectValue2 === "object" && !Array.isArray(projectValue2) && typeof moduleValue === "object" && !Array.isArray(moduleValue)) {
4744
+ return {
4745
+ ...projectValue2,
4746
+ ...moduleValue
4747
+ };
4748
+ }
4749
+ return moduleValue;
4750
+ }
4670
4751
  function resolveThresholds(scope, config) {
4671
4752
  const projectThresholds = {};
4672
4753
  for (const [key, val] of Object.entries(config.thresholds)) {
@@ -4682,14 +4763,7 @@ function resolveThresholds(scope, config) {
4682
4763
  const merged = { ...projectThresholds };
4683
4764
  for (const [category, moduleValue] of Object.entries(moduleOverrides)) {
4684
4765
  const projectValue2 = projectThresholds[category];
4685
- if (projectValue2 !== void 0 && typeof projectValue2 === "object" && !Array.isArray(projectValue2) && typeof moduleValue === "object" && !Array.isArray(moduleValue)) {
4686
- merged[category] = {
4687
- ...projectValue2,
4688
- ...moduleValue
4689
- };
4690
- } else {
4691
- merged[category] = moduleValue;
4692
- }
4766
+ merged[category] = mergeThresholdCategory(projectValue2, moduleValue);
4693
4767
  }
4694
4768
  return merged;
4695
4769
  }
@@ -5218,8 +5292,7 @@ function parseListField(fieldMap, ...keys) {
5218
5292
  if (raw === EM_DASH || raw === "none") return [];
5219
5293
  return raw.split(",").map((s) => s.trim());
5220
5294
  }
5221
- function parseFeatureFields(name, body) {
5222
- const fieldMap = extractFieldMap(body);
5295
+ function validateStatus(name, fieldMap) {
5223
5296
  const statusRaw = fieldMap.get("Status");
5224
5297
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
5225
5298
  return Err2(
@@ -5228,12 +5301,10 @@ function parseFeatureFields(name, body) {
5228
5301
  )
5229
5302
  );
5230
5303
  }
5231
- const specRaw = fieldMap.get("Spec") ?? EM_DASH;
5232
- const plans = parseListField(fieldMap, "Plans", "Plan");
5233
- const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
5234
- const assigneeRaw = fieldMap.get("Assignee") ?? EM_DASH;
5304
+ return Ok2(statusRaw);
5305
+ }
5306
+ function validatePriority(name, fieldMap) {
5235
5307
  const priorityRaw = fieldMap.get("Priority") ?? EM_DASH;
5236
- const externalIdRaw = fieldMap.get("External-ID") ?? EM_DASH;
5237
5308
  if (priorityRaw !== EM_DASH && !VALID_PRIORITIES.has(priorityRaw)) {
5238
5309
  return Err2(
5239
5310
  new Error(
@@ -5241,16 +5312,28 @@ function parseFeatureFields(name, body) {
5241
5312
  )
5242
5313
  );
5243
5314
  }
5315
+ return Ok2(priorityRaw === EM_DASH ? null : priorityRaw);
5316
+ }
5317
+ function optionalField(fieldMap, key) {
5318
+ const raw = fieldMap.get(key) ?? EM_DASH;
5319
+ return raw === EM_DASH ? null : raw;
5320
+ }
5321
+ function parseFeatureFields(name, body) {
5322
+ const fieldMap = extractFieldMap(body);
5323
+ const statusResult = validateStatus(name, fieldMap);
5324
+ if (!statusResult.ok) return statusResult;
5325
+ const priorityResult = validatePriority(name, fieldMap);
5326
+ if (!priorityResult.ok) return priorityResult;
5244
5327
  return Ok2({
5245
5328
  name,
5246
- status: statusRaw,
5247
- spec: specRaw === EM_DASH ? null : specRaw,
5248
- plans,
5249
- blockedBy,
5329
+ status: statusResult.value,
5330
+ spec: optionalField(fieldMap, "Spec"),
5331
+ plans: parseListField(fieldMap, "Plans", "Plan"),
5332
+ blockedBy: parseListField(fieldMap, "Blocked by", "Blockers"),
5250
5333
  summary: fieldMap.get("Summary") ?? "",
5251
- assignee: assigneeRaw === EM_DASH ? null : assigneeRaw,
5252
- priority: priorityRaw === EM_DASH ? null : priorityRaw,
5253
- externalId: externalIdRaw === EM_DASH ? null : externalIdRaw
5334
+ assignee: optionalField(fieldMap, "Assignee"),
5335
+ priority: priorityResult.value,
5336
+ externalId: optionalField(fieldMap, "External-ID")
5254
5337
  });
5255
5338
  }
5256
5339
  function parseAssignmentHistory(body) {
@@ -5304,6 +5387,31 @@ var PredictionEngine = class {
5304
5387
  */
5305
5388
  predict(options) {
5306
5389
  const opts = this.resolveOptions(options);
5390
+ const snapshots = this.loadValidatedSnapshots();
5391
+ const thresholds = this.resolveThresholds(opts);
5392
+ const categoriesToProcess = opts.categories ?? [...ALL_CATEGORIES2];
5393
+ const { firstDate, lastSnapshot, currentT } = this.computeTimeOffsets(snapshots);
5394
+ const baselines = this.computeBaselines(
5395
+ categoriesToProcess,
5396
+ thresholds,
5397
+ snapshots,
5398
+ firstDate,
5399
+ currentT,
5400
+ opts.horizon
5401
+ );
5402
+ const specImpacts = this.computeSpecImpacts(opts);
5403
+ const categories = this.computeAdjustedForecasts(baselines, thresholds, specImpacts, currentT);
5404
+ const adjustedCategories = categories;
5405
+ return {
5406
+ generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
5407
+ snapshotsUsed: snapshots.length,
5408
+ timelineRange: { from: snapshots[0].capturedAt, to: lastSnapshot.capturedAt },
5409
+ stabilityForecast: this.computeStabilityForecast(adjustedCategories, thresholds, snapshots),
5410
+ categories: adjustedCategories,
5411
+ warnings: this.generateWarnings(adjustedCategories, opts.horizon)
5412
+ };
5413
+ }
5414
+ loadValidatedSnapshots() {
5307
5415
  const timeline = this.timelineManager.load();
5308
5416
  const snapshots = timeline.snapshots;
5309
5417
  if (snapshots.length < 3) {
@@ -5311,16 +5419,39 @@ var PredictionEngine = class {
5311
5419
  `PredictionEngine requires at least 3 snapshots, got ${snapshots.length}. Run "harness snapshot" to capture more data points.`
5312
5420
  );
5313
5421
  }
5314
- const thresholds = this.resolveThresholds(opts);
5315
- const categoriesToProcess = opts.categories ?? [...ALL_CATEGORIES2];
5422
+ return snapshots;
5423
+ }
5424
+ computeTimeOffsets(snapshots) {
5316
5425
  const firstDate = new Date(snapshots[0].capturedAt).getTime();
5317
5426
  const lastSnapshot = snapshots[snapshots.length - 1];
5318
5427
  const currentT = (new Date(lastSnapshot.capturedAt).getTime() - firstDate) / (7 * 24 * 60 * 60 * 1e3);
5428
+ return { firstDate, lastSnapshot, currentT };
5429
+ }
5430
+ // --- Private helpers ---
5431
+ resolveOptions(options) {
5432
+ return {
5433
+ horizon: options?.horizon ?? 12,
5434
+ includeRoadmap: options?.includeRoadmap ?? true,
5435
+ categories: options?.categories,
5436
+ thresholds: options?.thresholds
5437
+ };
5438
+ }
5439
+ resolveThresholds(opts) {
5440
+ const base = { ...DEFAULT_STABILITY_THRESHOLDS };
5441
+ if (opts.thresholds) {
5442
+ for (const [key, value] of Object.entries(opts.thresholds)) {
5443
+ if (value !== void 0) {
5444
+ base[key] = value;
5445
+ }
5446
+ }
5447
+ }
5448
+ return base;
5449
+ }
5450
+ computeBaselines(categoriesToProcess, thresholds, snapshots, firstDate, currentT, horizon) {
5319
5451
  const baselines = {};
5320
5452
  for (const category of ALL_CATEGORIES2) {
5321
5453
  const threshold = thresholds[category];
5322
- const shouldProcess = categoriesToProcess.includes(category);
5323
- if (!shouldProcess) {
5454
+ if (!categoriesToProcess.includes(category)) {
5324
5455
  baselines[category] = this.zeroForecast(category, threshold);
5325
5456
  continue;
5326
5457
  }
@@ -5330,108 +5461,61 @@ var PredictionEngine = class {
5330
5461
  timeSeries,
5331
5462
  currentT,
5332
5463
  threshold,
5333
- opts.horizon
5464
+ horizon
5334
5465
  );
5335
5466
  }
5336
- const specImpacts = this.computeSpecImpacts(opts);
5467
+ return baselines;
5468
+ }
5469
+ computeAdjustedForecasts(baselines, thresholds, specImpacts, currentT) {
5337
5470
  const categories = {};
5338
5471
  for (const category of ALL_CATEGORIES2) {
5339
5472
  const baseline = baselines[category];
5340
- const threshold = thresholds[category];
5341
- if (!specImpacts || specImpacts.length === 0) {
5342
- categories[category] = {
5343
- baseline,
5344
- adjusted: baseline,
5345
- contributingFeatures: []
5346
- };
5347
- continue;
5348
- }
5349
- let totalDelta = 0;
5350
- const contributing = [];
5351
- for (const impact of specImpacts) {
5352
- const delta = impact.deltas?.[category] ?? 0;
5353
- if (delta !== 0) {
5354
- totalDelta += delta;
5355
- contributing.push({
5356
- name: impact.featureName,
5357
- specPath: impact.specPath,
5358
- delta
5359
- });
5360
- }
5361
- }
5362
- if (totalDelta === 0) {
5363
- categories[category] = {
5364
- baseline,
5365
- adjusted: baseline,
5366
- contributingFeatures: []
5367
- };
5368
- continue;
5369
- }
5370
- const adjusted = {
5371
- ...baseline,
5372
- projectedValue4w: baseline.projectedValue4w + totalDelta,
5373
- projectedValue8w: baseline.projectedValue8w + totalDelta,
5374
- projectedValue12w: baseline.projectedValue12w + totalDelta
5375
- };
5376
- const adjustedFit = {
5377
- slope: baseline.regression.slope,
5378
- intercept: baseline.regression.intercept + totalDelta,
5379
- rSquared: baseline.regression.rSquared,
5380
- dataPoints: baseline.regression.dataPoints
5381
- };
5382
- adjusted.thresholdCrossingWeeks = weeksUntilThreshold(adjustedFit, currentT, threshold);
5383
- adjusted.regression = {
5384
- slope: adjustedFit.slope,
5385
- intercept: adjustedFit.intercept,
5386
- rSquared: adjustedFit.rSquared,
5387
- dataPoints: adjustedFit.dataPoints
5388
- };
5389
- categories[category] = {
5473
+ categories[category] = this.adjustForecastForCategory(
5474
+ category,
5390
5475
  baseline,
5391
- adjusted,
5392
- contributingFeatures: contributing
5393
- };
5476
+ thresholds[category],
5477
+ specImpacts,
5478
+ currentT
5479
+ );
5394
5480
  }
5395
- const warnings = this.generateWarnings(
5396
- categories,
5397
- opts.horizon
5398
- );
5399
- const stabilityForecast = this.computeStabilityForecast(
5400
- categories,
5401
- thresholds,
5402
- snapshots
5403
- );
5404
- return {
5405
- generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
5406
- snapshotsUsed: snapshots.length,
5407
- timelineRange: {
5408
- from: snapshots[0].capturedAt,
5409
- to: lastSnapshot.capturedAt
5410
- },
5411
- stabilityForecast,
5412
- categories,
5413
- warnings
5414
- };
5415
- }
5416
- // --- Private helpers ---
5417
- resolveOptions(options) {
5418
- return {
5419
- horizon: options?.horizon ?? 12,
5420
- includeRoadmap: options?.includeRoadmap ?? true,
5421
- categories: options?.categories,
5422
- thresholds: options?.thresholds
5423
- };
5481
+ return categories;
5424
5482
  }
5425
- resolveThresholds(opts) {
5426
- const base = { ...DEFAULT_STABILITY_THRESHOLDS };
5427
- if (opts.thresholds) {
5428
- for (const [key, value] of Object.entries(opts.thresholds)) {
5429
- if (value !== void 0) {
5430
- base[key] = value;
5431
- }
5483
+ adjustForecastForCategory(category, baseline, threshold, specImpacts, currentT) {
5484
+ if (!specImpacts || specImpacts.length === 0) {
5485
+ return { baseline, adjusted: baseline, contributingFeatures: [] };
5486
+ }
5487
+ let totalDelta = 0;
5488
+ const contributing = [];
5489
+ for (const impact of specImpacts) {
5490
+ const delta = impact.deltas?.[category] ?? 0;
5491
+ if (delta !== 0) {
5492
+ totalDelta += delta;
5493
+ contributing.push({ name: impact.featureName, specPath: impact.specPath, delta });
5432
5494
  }
5433
5495
  }
5434
- return base;
5496
+ if (totalDelta === 0) {
5497
+ return { baseline, adjusted: baseline, contributingFeatures: [] };
5498
+ }
5499
+ const adjusted = {
5500
+ ...baseline,
5501
+ projectedValue4w: baseline.projectedValue4w + totalDelta,
5502
+ projectedValue8w: baseline.projectedValue8w + totalDelta,
5503
+ projectedValue12w: baseline.projectedValue12w + totalDelta
5504
+ };
5505
+ const adjustedFit = {
5506
+ slope: baseline.regression.slope,
5507
+ intercept: baseline.regression.intercept + totalDelta,
5508
+ rSquared: baseline.regression.rSquared,
5509
+ dataPoints: baseline.regression.dataPoints
5510
+ };
5511
+ adjusted.thresholdCrossingWeeks = weeksUntilThreshold(adjustedFit, currentT, threshold);
5512
+ adjusted.regression = {
5513
+ slope: adjustedFit.slope,
5514
+ intercept: adjustedFit.intercept,
5515
+ rSquared: adjustedFit.rSquared,
5516
+ dataPoints: adjustedFit.dataPoints
5517
+ };
5518
+ return { baseline, adjusted, contributingFeatures: contributing };
5435
5519
  }
5436
5520
  /**
5437
5521
  * Extract time series for a single category from snapshots.
@@ -5509,31 +5593,40 @@ var PredictionEngine = class {
5509
5593
  for (const category of ALL_CATEGORIES2) {
5510
5594
  const af = categories[category];
5511
5595
  if (!af) continue;
5512
- const forecast = af.adjusted;
5513
- const crossing = forecast.thresholdCrossingWeeks;
5514
- if (crossing === null || crossing <= 0) continue;
5515
- let severity = null;
5516
- if (crossing <= criticalWindow && (forecast.confidence === "high" || forecast.confidence === "medium")) {
5517
- severity = "critical";
5518
- } else if (crossing <= warningWindow && (forecast.confidence === "high" || forecast.confidence === "medium")) {
5519
- severity = "warning";
5520
- } else if (crossing <= horizon) {
5521
- severity = "info";
5522
- }
5523
- if (severity) {
5524
- const contributingNames = af.contributingFeatures.map((f) => f.name);
5525
- warnings.push({
5526
- severity,
5527
- category,
5528
- message: `${category} projected to exceed threshold (~${crossing}w, ${forecast.confidence} confidence)`,
5529
- weeksUntil: crossing,
5530
- confidence: forecast.confidence,
5531
- contributingFeatures: contributingNames
5532
- });
5533
- }
5596
+ const warning = this.buildCategoryWarning(
5597
+ category,
5598
+ af,
5599
+ criticalWindow,
5600
+ warningWindow,
5601
+ horizon
5602
+ );
5603
+ if (warning) warnings.push(warning);
5534
5604
  }
5535
5605
  return warnings;
5536
5606
  }
5607
+ buildCategoryWarning(category, af, criticalWindow, warningWindow, horizon) {
5608
+ const forecast = af.adjusted;
5609
+ const crossing = forecast.thresholdCrossingWeeks;
5610
+ if (crossing === null || crossing <= 0) return null;
5611
+ const isHighConfidence = forecast.confidence === "high" || forecast.confidence === "medium";
5612
+ let severity = null;
5613
+ if (crossing <= criticalWindow && isHighConfidence) {
5614
+ severity = "critical";
5615
+ } else if (crossing <= warningWindow && isHighConfidence) {
5616
+ severity = "warning";
5617
+ } else if (crossing <= horizon) {
5618
+ severity = "info";
5619
+ }
5620
+ if (!severity) return null;
5621
+ return {
5622
+ severity,
5623
+ category,
5624
+ message: `${category} projected to exceed threshold (~${crossing}w, ${forecast.confidence} confidence)`,
5625
+ weeksUntil: crossing,
5626
+ confidence: forecast.confidence,
5627
+ contributingFeatures: af.contributingFeatures.map((f) => f.name)
5628
+ };
5629
+ }
5537
5630
  /**
5538
5631
  * Compute composite stability forecast by projecting per-category values
5539
5632
  * forward and computing stability scores at each horizon.
@@ -5602,14 +5695,9 @@ var PredictionEngine = class {
5602
5695
  const raw = fs5.readFileSync(roadmapPath, "utf-8");
5603
5696
  const parseResult = parseRoadmap(raw);
5604
5697
  if (!parseResult.ok) return null;
5605
- const features = [];
5606
- for (const milestone of parseResult.value.milestones) {
5607
- for (const feature of milestone.features) {
5608
- if (feature.status === "planned" || feature.status === "in-progress") {
5609
- features.push({ name: feature.name, spec: feature.spec });
5610
- }
5611
- }
5612
- }
5698
+ const features = parseResult.value.milestones.flatMap(
5699
+ (m) => m.features.filter((f) => f.status === "planned" || f.status === "in-progress").map((f) => ({ name: f.name, spec: f.spec }))
5700
+ );
5613
5701
  if (features.length === 0) return null;
5614
5702
  return this.estimator.estimateAll(features);
5615
5703
  } catch {
@@ -6257,10 +6345,10 @@ async function saveState(projectPath, state, stream, session) {
6257
6345
  }
6258
6346
  }
6259
6347
 
6260
- // src/state/learnings.ts
6348
+ // src/state/learnings-content.ts
6261
6349
  import * as fs11 from "fs";
6262
6350
  import * as path8 from "path";
6263
- import * as crypto from "crypto";
6351
+ import * as crypto2 from "crypto";
6264
6352
  function parseFrontmatter2(line) {
6265
6353
  const match = line.match(/^<!--\s+hash:([a-f0-9]+)(?:\s+tags:([^\s]+))?\s+-->/);
6266
6354
  if (!match) return null;
@@ -6268,8 +6356,27 @@ function parseFrontmatter2(line) {
6268
6356
  const tags = match[2] ? match[2].split(",").filter(Boolean) : [];
6269
6357
  return { hash, tags };
6270
6358
  }
6359
+ function parseDateFromEntry(entry) {
6360
+ const match = entry.match(/(\d{4}-\d{2}-\d{2})/);
6361
+ return match ? match[1] ?? null : null;
6362
+ }
6363
+ function extractIndexEntry(entry) {
6364
+ const lines = entry.split("\n");
6365
+ const summary = lines[0] ?? entry;
6366
+ const tags = [];
6367
+ const skillMatch = entry.match(/\[skill:([^\]]+)\]/);
6368
+ if (skillMatch?.[1]) tags.push(skillMatch[1]);
6369
+ const outcomeMatch = entry.match(/\[outcome:([^\]]+)\]/);
6370
+ if (outcomeMatch?.[1]) tags.push(outcomeMatch[1]);
6371
+ return {
6372
+ hash: computeEntryHash(entry),
6373
+ tags,
6374
+ summary,
6375
+ fullText: entry
6376
+ };
6377
+ }
6271
6378
  function computeEntryHash(text) {
6272
- return crypto.createHash("sha256").update(text).digest("hex").slice(0, 8);
6379
+ return crypto2.createHash("sha256").update(text).digest("hex").slice(0, 8);
6273
6380
  }
6274
6381
  function normalizeLearningContent(text) {
6275
6382
  let normalized = text;
@@ -6284,7 +6391,7 @@ function normalizeLearningContent(text) {
6284
6391
  return normalized;
6285
6392
  }
6286
6393
  function computeContentHash(text) {
6287
- return crypto.createHash("sha256").update(text).digest("hex").slice(0, 16);
6394
+ return crypto2.createHash("sha256").update(text).digest("hex").slice(0, 16);
6288
6395
  }
6289
6396
  function loadContentHashes(stateDir) {
6290
6397
  const hashesPath = path8.join(stateDir, CONTENT_HASHES_FILE);
@@ -6302,8 +6409,8 @@ function saveContentHashes(stateDir, index) {
6302
6409
  const hashesPath = path8.join(stateDir, CONTENT_HASHES_FILE);
6303
6410
  fs11.writeFileSync(hashesPath, JSON.stringify(index, null, 2) + "\n");
6304
6411
  }
6305
- function rebuildContentHashes(stateDir) {
6306
- const learningsPath = path8.join(stateDir, LEARNINGS_FILE);
6412
+ function rebuildContentHashes(stateDir, learningsFile) {
6413
+ const learningsPath = path8.join(stateDir, learningsFile);
6307
6414
  if (!fs11.existsSync(learningsPath)) return {};
6308
6415
  const content = fs11.readFileSync(learningsPath, "utf-8");
6309
6416
  const lines = content.split("\n");
@@ -6324,43 +6431,125 @@ function rebuildContentHashes(stateDir) {
6324
6431
  saveContentHashes(stateDir, index);
6325
6432
  return index;
6326
6433
  }
6327
- function extractIndexEntry(entry) {
6328
- const lines = entry.split("\n");
6329
- const summary = lines[0] ?? entry;
6330
- const tags = [];
6331
- const skillMatch = entry.match(/\[skill:([^\]]+)\]/);
6332
- if (skillMatch?.[1]) tags.push(skillMatch[1]);
6333
- const outcomeMatch = entry.match(/\[outcome:([^\]]+)\]/);
6334
- if (outcomeMatch?.[1]) tags.push(outcomeMatch[1]);
6335
- return {
6336
- hash: computeEntryHash(entry),
6337
- tags,
6338
- summary,
6339
- fullText: entry
6340
- };
6434
+ function analyzeLearningPatterns(entries) {
6435
+ const tagGroups = /* @__PURE__ */ new Map();
6436
+ for (const entry of entries) {
6437
+ const tagMatches = entry.matchAll(/\[(skill:[^\]]+)\]|\[(outcome:[^\]]+)\]/g);
6438
+ for (const match of tagMatches) {
6439
+ const tag = match[1] ?? match[2];
6440
+ if (tag) {
6441
+ const group = tagGroups.get(tag) ?? [];
6442
+ group.push(entry);
6443
+ tagGroups.set(tag, group);
6444
+ }
6445
+ }
6446
+ }
6447
+ const patterns = [];
6448
+ for (const [tag, groupEntries] of tagGroups) {
6449
+ if (groupEntries.length >= 3) {
6450
+ patterns.push({ tag, count: groupEntries.length, entries: groupEntries });
6451
+ }
6452
+ }
6453
+ return patterns.sort((a, b) => b.count - a.count);
6454
+ }
6455
+ function estimateTokens(text) {
6456
+ return Math.ceil(text.length / 4);
6341
6457
  }
6458
+ function scoreRelevance(entry, intent) {
6459
+ if (!intent || intent.trim() === "") return 0;
6460
+ const intentWords = intent.toLowerCase().split(/\s+/).filter((w) => w.length > 2);
6461
+ if (intentWords.length === 0) return 0;
6462
+ const entryLower = entry.toLowerCase();
6463
+ const matches = intentWords.filter((word) => entryLower.includes(word));
6464
+ return matches.length / intentWords.length;
6465
+ }
6466
+
6467
+ // src/state/learnings-loader.ts
6468
+ import * as fs12 from "fs";
6469
+ import * as path9 from "path";
6342
6470
  var learningsCacheMap = /* @__PURE__ */ new Map();
6343
6471
  function clearLearningsCache() {
6344
6472
  learningsCacheMap.clear();
6345
6473
  }
6474
+ function invalidateLearningsCacheEntry(key) {
6475
+ learningsCacheMap.delete(key);
6476
+ }
6477
+ async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6478
+ try {
6479
+ const dirResult = await getStateDir(projectPath, stream, session);
6480
+ if (!dirResult.ok) return dirResult;
6481
+ const stateDir = dirResult.value;
6482
+ const learningsPath = path9.join(stateDir, LEARNINGS_FILE);
6483
+ if (!fs12.existsSync(learningsPath)) {
6484
+ return Ok([]);
6485
+ }
6486
+ const stats = fs12.statSync(learningsPath);
6487
+ const cacheKey = learningsPath;
6488
+ const cached = learningsCacheMap.get(cacheKey);
6489
+ let entries;
6490
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
6491
+ entries = cached.entries;
6492
+ } else {
6493
+ const content = fs12.readFileSync(learningsPath, "utf-8");
6494
+ const lines = content.split("\n");
6495
+ entries = [];
6496
+ let currentBlock = [];
6497
+ for (const line of lines) {
6498
+ if (line.startsWith("# ")) continue;
6499
+ if (/^<!--\s+hash:[a-f0-9]+/.test(line)) continue;
6500
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
6501
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
6502
+ if (isDatedBullet || isHeading) {
6503
+ if (currentBlock.length > 0) {
6504
+ entries.push(currentBlock.join("\n"));
6505
+ }
6506
+ currentBlock = [line];
6507
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
6508
+ currentBlock.push(line);
6509
+ }
6510
+ }
6511
+ if (currentBlock.length > 0) {
6512
+ entries.push(currentBlock.join("\n"));
6513
+ }
6514
+ learningsCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
6515
+ evictIfNeeded(learningsCacheMap);
6516
+ }
6517
+ if (!skillName) {
6518
+ return Ok(entries);
6519
+ }
6520
+ const filtered = entries.filter((entry) => entry.includes(`[skill:${skillName}]`));
6521
+ return Ok(filtered);
6522
+ } catch (error) {
6523
+ return Err(
6524
+ new Error(
6525
+ `Failed to load learnings: ${error instanceof Error ? error.message : String(error)}`
6526
+ )
6527
+ );
6528
+ }
6529
+ }
6530
+
6531
+ // src/state/learnings.ts
6532
+ import * as fs13 from "fs";
6533
+ import * as path10 from "path";
6534
+ import * as crypto3 from "crypto";
6346
6535
  async function appendLearning(projectPath, learning, skillName, outcome, stream, session) {
6347
6536
  try {
6348
6537
  const dirResult = await getStateDir(projectPath, stream, session);
6349
6538
  if (!dirResult.ok) return dirResult;
6350
6539
  const stateDir = dirResult.value;
6351
- const learningsPath = path8.join(stateDir, LEARNINGS_FILE);
6352
- fs11.mkdirSync(stateDir, { recursive: true });
6540
+ const learningsPath = path10.join(stateDir, LEARNINGS_FILE);
6541
+ fs13.mkdirSync(stateDir, { recursive: true });
6353
6542
  const normalizedContent = normalizeLearningContent(learning);
6354
6543
  const contentHash = computeContentHash(normalizedContent);
6355
- const hashesPath = path8.join(stateDir, CONTENT_HASHES_FILE);
6544
+ const hashesPath = path10.join(stateDir, CONTENT_HASHES_FILE);
6356
6545
  let contentHashes;
6357
- if (fs11.existsSync(hashesPath)) {
6546
+ if (fs13.existsSync(hashesPath)) {
6358
6547
  contentHashes = loadContentHashes(stateDir);
6359
- if (Object.keys(contentHashes).length === 0 && fs11.existsSync(learningsPath)) {
6360
- contentHashes = rebuildContentHashes(stateDir);
6548
+ if (Object.keys(contentHashes).length === 0 && fs13.existsSync(learningsPath)) {
6549
+ contentHashes = rebuildContentHashes(stateDir, LEARNINGS_FILE);
6361
6550
  }
6362
- } else if (fs11.existsSync(learningsPath)) {
6363
- contentHashes = rebuildContentHashes(stateDir);
6551
+ } else if (fs13.existsSync(learningsPath)) {
6552
+ contentHashes = rebuildContentHashes(stateDir, LEARNINGS_FILE);
6364
6553
  } else {
6365
6554
  contentHashes = {};
6366
6555
  }
@@ -6379,7 +6568,7 @@ async function appendLearning(projectPath, learning, skillName, outcome, stream,
6379
6568
  } else {
6380
6569
  bulletLine = `- **${timestamp}:** ${learning}`;
6381
6570
  }
6382
- const hash = crypto.createHash("sha256").update(bulletLine).digest("hex").slice(0, 8);
6571
+ const hash = crypto3.createHash("sha256").update(bulletLine).digest("hex").slice(0, 8);
6383
6572
  const tagsStr = fmTags.length > 0 ? ` tags:${fmTags.join(",")}` : "";
6384
6573
  const frontmatter = `<!-- hash:${hash}${tagsStr} -->`;
6385
6574
  const entry = `
@@ -6387,19 +6576,19 @@ ${frontmatter}
6387
6576
  ${bulletLine}
6388
6577
  `;
6389
6578
  let existingLineCount;
6390
- if (!fs11.existsSync(learningsPath)) {
6391
- fs11.writeFileSync(learningsPath, `# Learnings
6579
+ if (!fs13.existsSync(learningsPath)) {
6580
+ fs13.writeFileSync(learningsPath, `# Learnings
6392
6581
  ${entry}`);
6393
6582
  existingLineCount = 1;
6394
6583
  } else {
6395
- const existingContent = fs11.readFileSync(learningsPath, "utf-8");
6584
+ const existingContent = fs13.readFileSync(learningsPath, "utf-8");
6396
6585
  existingLineCount = existingContent.split("\n").length;
6397
- fs11.appendFileSync(learningsPath, entry);
6586
+ fs13.appendFileSync(learningsPath, entry);
6398
6587
  }
6399
6588
  const bulletLine_lineNum = existingLineCount + 2;
6400
6589
  contentHashes[contentHash] = { date: timestamp ?? "", line: bulletLine_lineNum };
6401
6590
  saveContentHashes(stateDir, contentHashes);
6402
- learningsCacheMap.delete(learningsPath);
6591
+ invalidateLearningsCacheEntry(learningsPath);
6403
6592
  return Ok(void 0);
6404
6593
  } catch (error) {
6405
6594
  return Err(
@@ -6409,42 +6598,6 @@ ${entry}`);
6409
6598
  );
6410
6599
  }
6411
6600
  }
6412
- function estimateTokens(text) {
6413
- return Math.ceil(text.length / 4);
6414
- }
6415
- function scoreRelevance(entry, intent) {
6416
- if (!intent || intent.trim() === "") return 0;
6417
- const intentWords = intent.toLowerCase().split(/\s+/).filter((w) => w.length > 2);
6418
- if (intentWords.length === 0) return 0;
6419
- const entryLower = entry.toLowerCase();
6420
- const matches = intentWords.filter((word) => entryLower.includes(word));
6421
- return matches.length / intentWords.length;
6422
- }
6423
- function parseDateFromEntry(entry) {
6424
- const match = entry.match(/(\d{4}-\d{2}-\d{2})/);
6425
- return match ? match[1] ?? null : null;
6426
- }
6427
- function analyzeLearningPatterns(entries) {
6428
- const tagGroups = /* @__PURE__ */ new Map();
6429
- for (const entry of entries) {
6430
- const tagMatches = entry.matchAll(/\[(skill:[^\]]+)\]|\[(outcome:[^\]]+)\]/g);
6431
- for (const match of tagMatches) {
6432
- const tag = match[1] ?? match[2];
6433
- if (tag) {
6434
- const group = tagGroups.get(tag) ?? [];
6435
- group.push(entry);
6436
- tagGroups.set(tag, group);
6437
- }
6438
- }
6439
- }
6440
- const patterns = [];
6441
- for (const [tag, groupEntries] of tagGroups) {
6442
- if (groupEntries.length >= 3) {
6443
- patterns.push({ tag, count: groupEntries.length, entries: groupEntries });
6444
- }
6445
- }
6446
- return patterns.sort((a, b) => b.count - a.count);
6447
- }
6448
6601
  async function loadBudgetedLearnings(projectPath, options) {
6449
6602
  const { intent, tokenBudget = 1e3, skill, session, stream, depth = "summary" } = options;
6450
6603
  if (depth === "index") {
@@ -6508,11 +6661,11 @@ async function loadIndexEntries(projectPath, skillName, stream, session) {
6508
6661
  const dirResult = await getStateDir(projectPath, stream, session);
6509
6662
  if (!dirResult.ok) return dirResult;
6510
6663
  const stateDir = dirResult.value;
6511
- const learningsPath = path8.join(stateDir, LEARNINGS_FILE);
6512
- if (!fs11.existsSync(learningsPath)) {
6664
+ const learningsPath = path10.join(stateDir, LEARNINGS_FILE);
6665
+ if (!fs13.existsSync(learningsPath)) {
6513
6666
  return Ok([]);
6514
6667
  }
6515
- const content = fs11.readFileSync(learningsPath, "utf-8");
6668
+ const content = fs13.readFileSync(learningsPath, "utf-8");
6516
6669
  const lines = content.split("\n");
6517
6670
  const indexEntries = [];
6518
6671
  let pendingFrontmatter = null;
@@ -6565,74 +6718,25 @@ async function loadIndexEntries(projectPath, skillName, stream, session) {
6565
6718
  );
6566
6719
  }
6567
6720
  }
6568
- async function loadRelevantLearnings(projectPath, skillName, stream, session) {
6569
- try {
6570
- const dirResult = await getStateDir(projectPath, stream, session);
6571
- if (!dirResult.ok) return dirResult;
6572
- const stateDir = dirResult.value;
6573
- const learningsPath = path8.join(stateDir, LEARNINGS_FILE);
6574
- if (!fs11.existsSync(learningsPath)) {
6575
- return Ok([]);
6576
- }
6577
- const stats = fs11.statSync(learningsPath);
6578
- const cacheKey = learningsPath;
6579
- const cached = learningsCacheMap.get(cacheKey);
6580
- let entries;
6581
- if (cached && cached.mtimeMs === stats.mtimeMs) {
6582
- entries = cached.entries;
6583
- } else {
6584
- const content = fs11.readFileSync(learningsPath, "utf-8");
6585
- const lines = content.split("\n");
6586
- entries = [];
6587
- let currentBlock = [];
6588
- for (const line of lines) {
6589
- if (line.startsWith("# ")) continue;
6590
- if (/^<!--\s+hash:[a-f0-9]+/.test(line)) continue;
6591
- const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
6592
- const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
6593
- if (isDatedBullet || isHeading) {
6594
- if (currentBlock.length > 0) {
6595
- entries.push(currentBlock.join("\n"));
6596
- }
6597
- currentBlock = [line];
6598
- } else if (line.trim() !== "" && currentBlock.length > 0) {
6599
- currentBlock.push(line);
6600
- }
6601
- }
6602
- if (currentBlock.length > 0) {
6603
- entries.push(currentBlock.join("\n"));
6604
- }
6605
- learningsCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
6606
- evictIfNeeded(learningsCacheMap);
6607
- }
6608
- if (!skillName) {
6609
- return Ok(entries);
6610
- }
6611
- const filtered = entries.filter((entry) => entry.includes(`[skill:${skillName}]`));
6612
- return Ok(filtered);
6613
- } catch (error) {
6614
- return Err(
6615
- new Error(
6616
- `Failed to load learnings: ${error instanceof Error ? error.message : String(error)}`
6617
- )
6618
- );
6619
- }
6620
- }
6721
+
6722
+ // src/state/learnings-lifecycle.ts
6723
+ import * as fs14 from "fs";
6724
+ import * as path11 from "path";
6621
6725
  async function archiveLearnings(projectPath, entries, stream) {
6622
6726
  try {
6623
6727
  const dirResult = await getStateDir(projectPath, stream);
6624
6728
  if (!dirResult.ok) return dirResult;
6625
6729
  const stateDir = dirResult.value;
6626
- const archiveDir = path8.join(stateDir, "learnings-archive");
6627
- fs11.mkdirSync(archiveDir, { recursive: true });
6730
+ const archiveDir = path11.join(stateDir, "learnings-archive");
6731
+ fs14.mkdirSync(archiveDir, { recursive: true });
6628
6732
  const now = /* @__PURE__ */ new Date();
6629
6733
  const yearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
6630
- const archivePath = path8.join(archiveDir, `${yearMonth}.md`);
6734
+ const archivePath = path11.join(archiveDir, `${yearMonth}.md`);
6631
6735
  const archiveContent = entries.join("\n\n") + "\n";
6632
- if (fs11.existsSync(archivePath)) {
6633
- fs11.appendFileSync(archivePath, "\n" + archiveContent);
6736
+ if (fs14.existsSync(archivePath)) {
6737
+ fs14.appendFileSync(archivePath, "\n" + archiveContent);
6634
6738
  } else {
6635
- fs11.writeFileSync(archivePath, `# Learnings Archive
6739
+ fs14.writeFileSync(archivePath, `# Learnings Archive
6636
6740
 
6637
6741
  ${archiveContent}`);
6638
6742
  }
@@ -6650,8 +6754,8 @@ async function pruneLearnings(projectPath, stream) {
6650
6754
  const dirResult = await getStateDir(projectPath, stream);
6651
6755
  if (!dirResult.ok) return dirResult;
6652
6756
  const stateDir = dirResult.value;
6653
- const learningsPath = path8.join(stateDir, LEARNINGS_FILE);
6654
- if (!fs11.existsSync(learningsPath)) {
6757
+ const learningsPath = path11.join(stateDir, LEARNINGS_FILE);
6758
+ if (!fs14.existsSync(learningsPath)) {
6655
6759
  return Ok({ kept: 0, archived: 0, patterns: [] });
6656
6760
  }
6657
6761
  const loadResult = await loadRelevantLearnings(projectPath, void 0, stream);
@@ -6682,8 +6786,8 @@ async function pruneLearnings(projectPath, stream) {
6682
6786
  if (!archiveResult.ok) return archiveResult;
6683
6787
  }
6684
6788
  const newContent = "# Learnings\n\n" + toKeep.join("\n\n") + "\n";
6685
- fs11.writeFileSync(learningsPath, newContent);
6686
- learningsCacheMap.delete(learningsPath);
6789
+ fs14.writeFileSync(learningsPath, newContent);
6790
+ invalidateLearningsCacheEntry(learningsPath);
6687
6791
  return Ok({
6688
6792
  kept: toKeep.length,
6689
6793
  archived: toArchive.length,
@@ -6727,21 +6831,21 @@ async function promoteSessionLearnings(projectPath, sessionSlug, stream) {
6727
6831
  const dirResult = await getStateDir(projectPath, stream);
6728
6832
  if (!dirResult.ok) return dirResult;
6729
6833
  const stateDir = dirResult.value;
6730
- const globalPath = path8.join(stateDir, LEARNINGS_FILE);
6731
- const existingGlobal = fs11.existsSync(globalPath) ? fs11.readFileSync(globalPath, "utf-8") : "";
6834
+ const globalPath = path11.join(stateDir, LEARNINGS_FILE);
6835
+ const existingGlobal = fs14.existsSync(globalPath) ? fs14.readFileSync(globalPath, "utf-8") : "";
6732
6836
  const newEntries = toPromote.filter((entry) => !existingGlobal.includes(entry.trim()));
6733
6837
  if (newEntries.length === 0) {
6734
6838
  return Ok({ promoted: 0, skipped: skipped + toPromote.length });
6735
6839
  }
6736
6840
  const promotedContent = newEntries.join("\n\n") + "\n";
6737
6841
  if (!existingGlobal) {
6738
- fs11.writeFileSync(globalPath, `# Learnings
6842
+ fs14.writeFileSync(globalPath, `# Learnings
6739
6843
 
6740
6844
  ${promotedContent}`);
6741
6845
  } else {
6742
- fs11.appendFileSync(globalPath, "\n\n" + promotedContent);
6846
+ fs14.appendFileSync(globalPath, "\n\n" + promotedContent);
6743
6847
  }
6744
- learningsCacheMap.delete(globalPath);
6848
+ invalidateLearningsCacheEntry(globalPath);
6745
6849
  return Ok({
6746
6850
  promoted: newEntries.length,
6747
6851
  skipped: skipped + (toPromote.length - newEntries.length)
@@ -6761,8 +6865,8 @@ async function countLearningEntries(projectPath, stream) {
6761
6865
  }
6762
6866
 
6763
6867
  // src/state/failures.ts
6764
- import * as fs12 from "fs";
6765
- import * as path9 from "path";
6868
+ import * as fs15 from "fs";
6869
+ import * as path12 from "path";
6766
6870
  var failuresCacheMap = /* @__PURE__ */ new Map();
6767
6871
  function clearFailuresCache() {
6768
6872
  failuresCacheMap.clear();
@@ -6773,17 +6877,17 @@ async function appendFailure(projectPath, description, skillName, type, stream,
6773
6877
  const dirResult = await getStateDir(projectPath, stream, session);
6774
6878
  if (!dirResult.ok) return dirResult;
6775
6879
  const stateDir = dirResult.value;
6776
- const failuresPath = path9.join(stateDir, FAILURES_FILE);
6777
- fs12.mkdirSync(stateDir, { recursive: true });
6880
+ const failuresPath = path12.join(stateDir, FAILURES_FILE);
6881
+ fs15.mkdirSync(stateDir, { recursive: true });
6778
6882
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6779
6883
  const entry = `
6780
6884
  - **${timestamp} [skill:${skillName}] [type:${type}]:** ${description}
6781
6885
  `;
6782
- if (!fs12.existsSync(failuresPath)) {
6783
- fs12.writeFileSync(failuresPath, `# Failures
6886
+ if (!fs15.existsSync(failuresPath)) {
6887
+ fs15.writeFileSync(failuresPath, `# Failures
6784
6888
  ${entry}`);
6785
6889
  } else {
6786
- fs12.appendFileSync(failuresPath, entry);
6890
+ fs15.appendFileSync(failuresPath, entry);
6787
6891
  }
6788
6892
  failuresCacheMap.delete(failuresPath);
6789
6893
  return Ok(void 0);
@@ -6800,17 +6904,17 @@ async function loadFailures(projectPath, stream, session) {
6800
6904
  const dirResult = await getStateDir(projectPath, stream, session);
6801
6905
  if (!dirResult.ok) return dirResult;
6802
6906
  const stateDir = dirResult.value;
6803
- const failuresPath = path9.join(stateDir, FAILURES_FILE);
6804
- if (!fs12.existsSync(failuresPath)) {
6907
+ const failuresPath = path12.join(stateDir, FAILURES_FILE);
6908
+ if (!fs15.existsSync(failuresPath)) {
6805
6909
  return Ok([]);
6806
6910
  }
6807
- const stats = fs12.statSync(failuresPath);
6911
+ const stats = fs15.statSync(failuresPath);
6808
6912
  const cacheKey = failuresPath;
6809
6913
  const cached = failuresCacheMap.get(cacheKey);
6810
6914
  if (cached && cached.mtimeMs === stats.mtimeMs) {
6811
6915
  return Ok(cached.entries);
6812
6916
  }
6813
- const content = fs12.readFileSync(failuresPath, "utf-8");
6917
+ const content = fs15.readFileSync(failuresPath, "utf-8");
6814
6918
  const entries = [];
6815
6919
  for (const line of content.split("\n")) {
6816
6920
  const match = line.match(FAILURE_LINE_REGEX);
@@ -6839,20 +6943,20 @@ async function archiveFailures(projectPath, stream, session) {
6839
6943
  const dirResult = await getStateDir(projectPath, stream, session);
6840
6944
  if (!dirResult.ok) return dirResult;
6841
6945
  const stateDir = dirResult.value;
6842
- const failuresPath = path9.join(stateDir, FAILURES_FILE);
6843
- if (!fs12.existsSync(failuresPath)) {
6946
+ const failuresPath = path12.join(stateDir, FAILURES_FILE);
6947
+ if (!fs15.existsSync(failuresPath)) {
6844
6948
  return Ok(void 0);
6845
6949
  }
6846
- const archiveDir = path9.join(stateDir, "archive");
6847
- fs12.mkdirSync(archiveDir, { recursive: true });
6950
+ const archiveDir = path12.join(stateDir, "archive");
6951
+ fs15.mkdirSync(archiveDir, { recursive: true });
6848
6952
  const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6849
6953
  let archiveName = `failures-${date}.md`;
6850
6954
  let counter = 2;
6851
- while (fs12.existsSync(path9.join(archiveDir, archiveName))) {
6955
+ while (fs15.existsSync(path12.join(archiveDir, archiveName))) {
6852
6956
  archiveName = `failures-${date}-${counter}.md`;
6853
6957
  counter++;
6854
6958
  }
6855
- fs12.renameSync(failuresPath, path9.join(archiveDir, archiveName));
6959
+ fs15.renameSync(failuresPath, path12.join(archiveDir, archiveName));
6856
6960
  failuresCacheMap.delete(failuresPath);
6857
6961
  return Ok(void 0);
6858
6962
  } catch (error) {
@@ -6865,16 +6969,16 @@ async function archiveFailures(projectPath, stream, session) {
6865
6969
  }
6866
6970
 
6867
6971
  // src/state/handoff.ts
6868
- import * as fs13 from "fs";
6869
- import * as path10 from "path";
6972
+ import * as fs16 from "fs";
6973
+ import * as path13 from "path";
6870
6974
  async function saveHandoff(projectPath, handoff, stream, session) {
6871
6975
  try {
6872
6976
  const dirResult = await getStateDir(projectPath, stream, session);
6873
6977
  if (!dirResult.ok) return dirResult;
6874
6978
  const stateDir = dirResult.value;
6875
- const handoffPath = path10.join(stateDir, HANDOFF_FILE);
6876
- fs13.mkdirSync(stateDir, { recursive: true });
6877
- fs13.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
6979
+ const handoffPath = path13.join(stateDir, HANDOFF_FILE);
6980
+ fs16.mkdirSync(stateDir, { recursive: true });
6981
+ fs16.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
6878
6982
  return Ok(void 0);
6879
6983
  } catch (error) {
6880
6984
  return Err(
@@ -6887,11 +6991,11 @@ async function loadHandoff(projectPath, stream, session) {
6887
6991
  const dirResult = await getStateDir(projectPath, stream, session);
6888
6992
  if (!dirResult.ok) return dirResult;
6889
6993
  const stateDir = dirResult.value;
6890
- const handoffPath = path10.join(stateDir, HANDOFF_FILE);
6891
- if (!fs13.existsSync(handoffPath)) {
6994
+ const handoffPath = path13.join(stateDir, HANDOFF_FILE);
6995
+ if (!fs16.existsSync(handoffPath)) {
6892
6996
  return Ok(null);
6893
6997
  }
6894
- const raw = fs13.readFileSync(handoffPath, "utf-8");
6998
+ const raw = fs16.readFileSync(handoffPath, "utf-8");
6895
6999
  const parsed = JSON.parse(raw);
6896
7000
  const result = HandoffSchema.safeParse(parsed);
6897
7001
  if (!result.success) {
@@ -6906,33 +7010,33 @@ async function loadHandoff(projectPath, stream, session) {
6906
7010
  }
6907
7011
 
6908
7012
  // src/state/mechanical-gate.ts
6909
- import * as fs14 from "fs";
6910
- import * as path11 from "path";
7013
+ import * as fs17 from "fs";
7014
+ import * as path14 from "path";
6911
7015
  import { execSync as execSync2 } from "child_process";
6912
7016
  var SAFE_GATE_COMMAND = /^(?:npm|pnpm|yarn)\s+(?:test|run\s+[\w.-]+|run-script\s+[\w.-]+)$|^go\s+(?:test|build|vet|fmt)\s+[\w./ -]+$|^(?:python|python3)\s+-m\s+[\w.-]+$|^make\s+[\w.-]+$|^cargo\s+(?:test|build|check|clippy)(?:\s+[\w./ -]+)?$|^(?:gradle|mvn)\s+[\w:.-]+$/;
6913
7017
  function loadChecksFromConfig(gateConfigPath) {
6914
- if (!fs14.existsSync(gateConfigPath)) return [];
6915
- const raw = JSON.parse(fs14.readFileSync(gateConfigPath, "utf-8"));
7018
+ if (!fs17.existsSync(gateConfigPath)) return [];
7019
+ const raw = JSON.parse(fs17.readFileSync(gateConfigPath, "utf-8"));
6916
7020
  const config = GateConfigSchema.safeParse(raw);
6917
7021
  if (config.success && config.data.checks) return config.data.checks;
6918
7022
  return [];
6919
7023
  }
6920
7024
  function discoverChecksFromProject(projectPath) {
6921
7025
  const checks = [];
6922
- const packageJsonPath = path11.join(projectPath, "package.json");
6923
- if (fs14.existsSync(packageJsonPath)) {
6924
- const pkg = JSON.parse(fs14.readFileSync(packageJsonPath, "utf-8"));
7026
+ const packageJsonPath = path14.join(projectPath, "package.json");
7027
+ if (fs17.existsSync(packageJsonPath)) {
7028
+ const pkg = JSON.parse(fs17.readFileSync(packageJsonPath, "utf-8"));
6925
7029
  const scripts = pkg.scripts || {};
6926
7030
  if (scripts.test) checks.push({ name: "test", command: "npm test" });
6927
7031
  if (scripts.lint) checks.push({ name: "lint", command: "npm run lint" });
6928
7032
  if (scripts.typecheck) checks.push({ name: "typecheck", command: "npm run typecheck" });
6929
7033
  if (scripts.build) checks.push({ name: "build", command: "npm run build" });
6930
7034
  }
6931
- if (fs14.existsSync(path11.join(projectPath, "go.mod"))) {
7035
+ if (fs17.existsSync(path14.join(projectPath, "go.mod"))) {
6932
7036
  checks.push({ name: "test", command: "go test ./..." });
6933
7037
  checks.push({ name: "build", command: "go build ./..." });
6934
7038
  }
6935
- if (fs14.existsSync(path11.join(projectPath, "pyproject.toml")) || fs14.existsSync(path11.join(projectPath, "setup.py"))) {
7039
+ if (fs17.existsSync(path14.join(projectPath, "pyproject.toml")) || fs17.existsSync(path14.join(projectPath, "setup.py"))) {
6936
7040
  checks.push({ name: "test", command: "python -m pytest" });
6937
7041
  }
6938
7042
  return checks;
@@ -6972,8 +7076,8 @@ function executeCheck(check, projectPath) {
6972
7076
  }
6973
7077
  }
6974
7078
  async function runMechanicalGate(projectPath) {
6975
- const harnessDir = path11.join(projectPath, HARNESS_DIR);
6976
- const gateConfigPath = path11.join(harnessDir, GATE_CONFIG_FILE);
7079
+ const harnessDir = path14.join(projectPath, HARNESS_DIR);
7080
+ const gateConfigPath = path14.join(harnessDir, GATE_CONFIG_FILE);
6977
7081
  try {
6978
7082
  let checks = loadChecksFromConfig(gateConfigPath);
6979
7083
  if (checks.length === 0) {
@@ -6994,8 +7098,8 @@ async function runMechanicalGate(projectPath) {
6994
7098
  }
6995
7099
 
6996
7100
  // src/state/session-summary.ts
6997
- import * as fs15 from "fs";
6998
- import * as path12 from "path";
7101
+ import * as fs18 from "fs";
7102
+ import * as path15 from "path";
6999
7103
  function formatSummary(data) {
7000
7104
  const lines = [
7001
7105
  "## Session Summary",
@@ -7033,9 +7137,9 @@ function writeSessionSummary(projectPath, sessionSlug, data) {
7033
7137
  const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7034
7138
  if (!dirResult.ok) return dirResult;
7035
7139
  const sessionDir = dirResult.value;
7036
- const summaryPath = path12.join(sessionDir, SUMMARY_FILE);
7140
+ const summaryPath = path15.join(sessionDir, SUMMARY_FILE);
7037
7141
  const content = formatSummary(data);
7038
- fs15.writeFileSync(summaryPath, content);
7142
+ fs18.writeFileSync(summaryPath, content);
7039
7143
  const description = deriveIndexDescription(data);
7040
7144
  updateSessionIndex(projectPath, sessionSlug, description);
7041
7145
  return Ok(void 0);
@@ -7052,11 +7156,11 @@ function loadSessionSummary(projectPath, sessionSlug) {
7052
7156
  const dirResult = resolveSessionDir(projectPath, sessionSlug);
7053
7157
  if (!dirResult.ok) return dirResult;
7054
7158
  const sessionDir = dirResult.value;
7055
- const summaryPath = path12.join(sessionDir, SUMMARY_FILE);
7056
- if (!fs15.existsSync(summaryPath)) {
7159
+ const summaryPath = path15.join(sessionDir, SUMMARY_FILE);
7160
+ if (!fs18.existsSync(summaryPath)) {
7057
7161
  return Ok(null);
7058
7162
  }
7059
- const content = fs15.readFileSync(summaryPath, "utf-8");
7163
+ const content = fs18.readFileSync(summaryPath, "utf-8");
7060
7164
  return Ok(content);
7061
7165
  } catch (error) {
7062
7166
  return Err(
@@ -7068,11 +7172,11 @@ function loadSessionSummary(projectPath, sessionSlug) {
7068
7172
  }
7069
7173
  function listActiveSessions(projectPath) {
7070
7174
  try {
7071
- const indexPath2 = path12.join(projectPath, HARNESS_DIR, SESSIONS_DIR, SESSION_INDEX_FILE);
7072
- if (!fs15.existsSync(indexPath2)) {
7175
+ const indexPath2 = path15.join(projectPath, HARNESS_DIR, SESSIONS_DIR, SESSION_INDEX_FILE);
7176
+ if (!fs18.existsSync(indexPath2)) {
7073
7177
  return Ok(null);
7074
7178
  }
7075
- const content = fs15.readFileSync(indexPath2, "utf-8");
7179
+ const content = fs18.readFileSync(indexPath2, "utf-8");
7076
7180
  return Ok(content);
7077
7181
  } catch (error) {
7078
7182
  return Err(
@@ -7084,8 +7188,8 @@ function listActiveSessions(projectPath) {
7084
7188
  }
7085
7189
 
7086
7190
  // src/state/session-sections.ts
7087
- import * as fs16 from "fs";
7088
- import * as path13 from "path";
7191
+ import * as fs19 from "fs";
7192
+ import * as path16 from "path";
7089
7193
  import { SESSION_SECTION_NAMES } from "@harness-engineering/types";
7090
7194
  function emptySections() {
7091
7195
  const sections = {};
@@ -7098,12 +7202,12 @@ async function loadSessionState(projectPath, sessionSlug) {
7098
7202
  const dirResult = resolveSessionDir(projectPath, sessionSlug);
7099
7203
  if (!dirResult.ok) return dirResult;
7100
7204
  const sessionDir = dirResult.value;
7101
- const filePath = path13.join(sessionDir, SESSION_STATE_FILE);
7102
- if (!fs16.existsSync(filePath)) {
7205
+ const filePath = path16.join(sessionDir, SESSION_STATE_FILE);
7206
+ if (!fs19.existsSync(filePath)) {
7103
7207
  return Ok(emptySections());
7104
7208
  }
7105
7209
  try {
7106
- const raw = fs16.readFileSync(filePath, "utf-8");
7210
+ const raw = fs19.readFileSync(filePath, "utf-8");
7107
7211
  const parsed = JSON.parse(raw);
7108
7212
  const sections = emptySections();
7109
7213
  for (const name of SESSION_SECTION_NAMES) {
@@ -7124,9 +7228,9 @@ async function saveSessionState(projectPath, sessionSlug, sections) {
7124
7228
  const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
7125
7229
  if (!dirResult.ok) return dirResult;
7126
7230
  const sessionDir = dirResult.value;
7127
- const filePath = path13.join(sessionDir, SESSION_STATE_FILE);
7231
+ const filePath = path16.join(sessionDir, SESSION_STATE_FILE);
7128
7232
  try {
7129
- fs16.writeFileSync(filePath, JSON.stringify(sections, null, 2));
7233
+ fs19.writeFileSync(filePath, JSON.stringify(sections, null, 2));
7130
7234
  return Ok(void 0);
7131
7235
  } catch (error) {
7132
7236
  return Err(
@@ -7175,37 +7279,37 @@ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entry
7175
7279
  }
7176
7280
  function generateEntryId() {
7177
7281
  const timestamp = Date.now().toString(36);
7178
- const random = Math.random().toString(36).substring(2, 8);
7282
+ const random = Buffer.from(crypto.getRandomValues(new Uint8Array(4))).toString("hex");
7179
7283
  return `${timestamp}-${random}`;
7180
7284
  }
7181
7285
 
7182
7286
  // src/state/session-archive.ts
7183
- import * as fs17 from "fs";
7184
- import * as path14 from "path";
7287
+ import * as fs20 from "fs";
7288
+ import * as path17 from "path";
7185
7289
  async function archiveSession(projectPath, sessionSlug) {
7186
7290
  const dirResult = resolveSessionDir(projectPath, sessionSlug);
7187
7291
  if (!dirResult.ok) return dirResult;
7188
7292
  const sessionDir = dirResult.value;
7189
- if (!fs17.existsSync(sessionDir)) {
7293
+ if (!fs20.existsSync(sessionDir)) {
7190
7294
  return Err(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
7191
7295
  }
7192
- const archiveBase = path14.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
7296
+ const archiveBase = path17.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
7193
7297
  try {
7194
- fs17.mkdirSync(archiveBase, { recursive: true });
7298
+ fs20.mkdirSync(archiveBase, { recursive: true });
7195
7299
  const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
7196
7300
  let archiveName = `${sessionSlug}-${date}`;
7197
7301
  let counter = 1;
7198
- while (fs17.existsSync(path14.join(archiveBase, archiveName))) {
7302
+ while (fs20.existsSync(path17.join(archiveBase, archiveName))) {
7199
7303
  archiveName = `${sessionSlug}-${date}-${counter}`;
7200
7304
  counter++;
7201
7305
  }
7202
- const dest = path14.join(archiveBase, archiveName);
7306
+ const dest = path17.join(archiveBase, archiveName);
7203
7307
  try {
7204
- fs17.renameSync(sessionDir, dest);
7308
+ fs20.renameSync(sessionDir, dest);
7205
7309
  } catch (renameErr) {
7206
7310
  if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
7207
- fs17.cpSync(sessionDir, dest, { recursive: true });
7208
- fs17.rmSync(sessionDir, { recursive: true });
7311
+ fs20.cpSync(sessionDir, dest, { recursive: true });
7312
+ fs20.rmSync(sessionDir, { recursive: true });
7209
7313
  } else {
7210
7314
  throw renameErr;
7211
7315
  }
@@ -7221,8 +7325,8 @@ async function archiveSession(projectPath, sessionSlug) {
7221
7325
  }
7222
7326
 
7223
7327
  // src/state/events.ts
7224
- import * as fs18 from "fs";
7225
- import * as path15 from "path";
7328
+ import * as fs21 from "fs";
7329
+ import * as path18 from "path";
7226
7330
  import { z as z7 } from "zod";
7227
7331
  var SkillEventSchema = z7.object({
7228
7332
  timestamp: z7.string(),
@@ -7243,8 +7347,8 @@ function loadKnownHashes(eventsPath) {
7243
7347
  const cached = knownHashesCache.get(eventsPath);
7244
7348
  if (cached) return cached;
7245
7349
  const hashes = /* @__PURE__ */ new Set();
7246
- if (fs18.existsSync(eventsPath)) {
7247
- const content = fs18.readFileSync(eventsPath, "utf-8");
7350
+ if (fs21.existsSync(eventsPath)) {
7351
+ const content = fs21.readFileSync(eventsPath, "utf-8");
7248
7352
  const lines = content.split("\n").filter((line) => line.trim() !== "");
7249
7353
  for (const line of lines) {
7250
7354
  try {
@@ -7267,8 +7371,8 @@ async function emitEvent(projectPath, event, options) {
7267
7371
  const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
7268
7372
  if (!dirResult.ok) return dirResult;
7269
7373
  const stateDir = dirResult.value;
7270
- const eventsPath = path15.join(stateDir, EVENTS_FILE);
7271
- fs18.mkdirSync(stateDir, { recursive: true });
7374
+ const eventsPath = path18.join(stateDir, EVENTS_FILE);
7375
+ fs21.mkdirSync(stateDir, { recursive: true });
7272
7376
  const contentHash = computeEventHash(event, options?.session);
7273
7377
  const knownHashes = loadKnownHashes(eventsPath);
7274
7378
  if (knownHashes.has(contentHash)) {
@@ -7282,7 +7386,7 @@ async function emitEvent(projectPath, event, options) {
7282
7386
  if (options?.session) {
7283
7387
  fullEvent.session = options.session;
7284
7388
  }
7285
- fs18.appendFileSync(eventsPath, JSON.stringify(fullEvent) + "\n");
7389
+ fs21.appendFileSync(eventsPath, JSON.stringify(fullEvent) + "\n");
7286
7390
  knownHashes.add(contentHash);
7287
7391
  return Ok({ written: true });
7288
7392
  } catch (error) {
@@ -7296,11 +7400,11 @@ async function loadEvents(projectPath, options) {
7296
7400
  const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
7297
7401
  if (!dirResult.ok) return dirResult;
7298
7402
  const stateDir = dirResult.value;
7299
- const eventsPath = path15.join(stateDir, EVENTS_FILE);
7300
- if (!fs18.existsSync(eventsPath)) {
7403
+ const eventsPath = path18.join(stateDir, EVENTS_FILE);
7404
+ if (!fs21.existsSync(eventsPath)) {
7301
7405
  return Ok([]);
7302
7406
  }
7303
- const content = fs18.readFileSync(eventsPath, "utf-8");
7407
+ const content = fs21.readFileSync(eventsPath, "utf-8");
7304
7408
  const lines = content.split("\n").filter((line) => line.trim() !== "");
7305
7409
  const events = [];
7306
7410
  for (const line of lines) {
@@ -7320,15 +7424,25 @@ async function loadEvents(projectPath, options) {
7320
7424
  );
7321
7425
  }
7322
7426
  }
7427
+ function phaseTransitionFields(data) {
7428
+ return {
7429
+ from: data?.from ?? "?",
7430
+ to: data?.to ?? "?",
7431
+ suffix: data?.taskCount ? ` (${data.taskCount} tasks)` : ""
7432
+ };
7433
+ }
7323
7434
  function formatPhaseTransition(event) {
7324
7435
  const data = event.data;
7325
- const suffix = data?.taskCount ? ` (${data.taskCount} tasks)` : "";
7326
- return `phase: ${data?.from ?? "?"} -> ${data?.to ?? "?"}${suffix}`;
7436
+ const { from, to, suffix } = phaseTransitionFields(data);
7437
+ return `phase: ${from} -> ${to}${suffix}`;
7438
+ }
7439
+ function formatGateChecks(checks) {
7440
+ return checks?.map((c) => `${c.name} ${c.passed ? "Y" : "N"}`).join(", ");
7327
7441
  }
7328
7442
  function formatGateResult(event) {
7329
7443
  const data = event.data;
7330
7444
  const status = data?.passed ? "passed" : "failed";
7331
- const checks = data?.checks?.map((c) => `${c.name} ${c.passed ? "Y" : "N"}`).join(", ");
7445
+ const checks = formatGateChecks(data?.checks);
7332
7446
  return checks ? `gate: ${status} (${checks})` : `gate: ${status}`;
7333
7447
  }
7334
7448
  function formatHandoffDetail(event) {
@@ -7514,7 +7628,7 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
7514
7628
  }
7515
7629
 
7516
7630
  // src/security/scanner.ts
7517
- import * as fs20 from "fs/promises";
7631
+ import * as fs23 from "fs/promises";
7518
7632
  import { minimatch as minimatch4 } from "minimatch";
7519
7633
 
7520
7634
  // src/security/rules/registry.ts
@@ -7602,36 +7716,38 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
7602
7716
  }
7603
7717
 
7604
7718
  // src/security/stack-detector.ts
7605
- import * as fs19 from "fs";
7606
- import * as path16 from "path";
7607
- function detectStack(projectRoot) {
7608
- const stacks = [];
7609
- const pkgJsonPath = path16.join(projectRoot, "package.json");
7610
- if (fs19.existsSync(pkgJsonPath)) {
7611
- stacks.push("node");
7612
- try {
7613
- const pkgJson = JSON.parse(fs19.readFileSync(pkgJsonPath, "utf-8"));
7614
- const allDeps = {
7615
- ...pkgJson.dependencies,
7616
- ...pkgJson.devDependencies
7617
- };
7618
- if (allDeps.react || allDeps["react-dom"]) stacks.push("react");
7619
- if (allDeps.express) stacks.push("express");
7620
- if (allDeps.koa) stacks.push("koa");
7621
- if (allDeps.fastify) stacks.push("fastify");
7622
- if (allDeps.next) stacks.push("next");
7623
- if (allDeps.vue) stacks.push("vue");
7624
- if (allDeps.angular || allDeps["@angular/core"]) stacks.push("angular");
7625
- } catch {
7626
- }
7719
+ import * as fs22 from "fs";
7720
+ import * as path19 from "path";
7721
+ function nodeSubStacks(allDeps) {
7722
+ const found = [];
7723
+ if (allDeps.react || allDeps["react-dom"]) found.push("react");
7724
+ if (allDeps.express) found.push("express");
7725
+ if (allDeps.koa) found.push("koa");
7726
+ if (allDeps.fastify) found.push("fastify");
7727
+ if (allDeps.next) found.push("next");
7728
+ if (allDeps.vue) found.push("vue");
7729
+ if (allDeps.angular || allDeps["@angular/core"]) found.push("angular");
7730
+ return found;
7731
+ }
7732
+ function detectNodeStacks(projectRoot) {
7733
+ const pkgJsonPath = path19.join(projectRoot, "package.json");
7734
+ if (!fs22.existsSync(pkgJsonPath)) return [];
7735
+ const stacks = ["node"];
7736
+ try {
7737
+ const pkgJson = JSON.parse(fs22.readFileSync(pkgJsonPath, "utf-8"));
7738
+ const allDeps = { ...pkgJson.dependencies, ...pkgJson.devDependencies };
7739
+ stacks.push(...nodeSubStacks(allDeps));
7740
+ } catch {
7627
7741
  }
7628
- const goModPath = path16.join(projectRoot, "go.mod");
7629
- if (fs19.existsSync(goModPath)) {
7742
+ return stacks;
7743
+ }
7744
+ function detectStack(projectRoot) {
7745
+ const stacks = [...detectNodeStacks(projectRoot)];
7746
+ if (fs22.existsSync(path19.join(projectRoot, "go.mod"))) {
7630
7747
  stacks.push("go");
7631
7748
  }
7632
- const requirementsPath = path16.join(projectRoot, "requirements.txt");
7633
- const pyprojectPath = path16.join(projectRoot, "pyproject.toml");
7634
- if (fs19.existsSync(requirementsPath) || fs19.existsSync(pyprojectPath)) {
7749
+ const hasPython = fs22.existsSync(path19.join(projectRoot, "requirements.txt")) || fs22.existsSync(path19.join(projectRoot, "pyproject.toml"));
7750
+ if (hasPython) {
7635
7751
  stacks.push("python");
7636
7752
  }
7637
7753
  return stacks;
@@ -8462,7 +8578,7 @@ var SecurityScanner = class {
8462
8578
  }
8463
8579
  async scanFile(filePath) {
8464
8580
  if (!this.config.enabled) return [];
8465
- const content = await fs20.readFile(filePath, "utf-8");
8581
+ const content = await fs23.readFile(filePath, "utf-8");
8466
8582
  return this.scanContentForFile(content, filePath, 1);
8467
8583
  }
8468
8584
  scanContentForFile(content, filePath, startLine = 1) {
@@ -8475,6 +8591,56 @@ var SecurityScanner = class {
8475
8591
  });
8476
8592
  return this.scanLinesWithRules(lines, applicableRules, filePath, startLine);
8477
8593
  }
8594
+ /** Build a finding for a suppression comment that is missing its justification. */
8595
+ buildSuppressionFinding(rule, filePath, lineNumber, line) {
8596
+ return {
8597
+ ruleId: rule.id,
8598
+ ruleName: rule.name,
8599
+ category: rule.category,
8600
+ severity: this.config.strict ? "error" : "warning",
8601
+ confidence: "high",
8602
+ file: filePath,
8603
+ line: lineNumber,
8604
+ match: line.trim(),
8605
+ context: line,
8606
+ message: `Suppression of ${rule.id} requires justification: // harness-ignore ${rule.id}: <reason>`,
8607
+ remediation: `Add justification after colon: // harness-ignore ${rule.id}: false positive because ...`
8608
+ };
8609
+ }
8610
+ /** Check one line against a rule's patterns; return a finding or null. */
8611
+ matchRuleLine(rule, resolved, filePath, lineNumber, line) {
8612
+ for (const pattern of rule.patterns) {
8613
+ pattern.lastIndex = 0;
8614
+ if (!pattern.test(line)) continue;
8615
+ return {
8616
+ ruleId: rule.id,
8617
+ ruleName: rule.name,
8618
+ category: rule.category,
8619
+ severity: resolved,
8620
+ confidence: rule.confidence,
8621
+ file: filePath,
8622
+ line: lineNumber,
8623
+ match: line.trim(),
8624
+ context: line,
8625
+ message: rule.message,
8626
+ remediation: rule.remediation,
8627
+ ...rule.references ? { references: rule.references } : {}
8628
+ };
8629
+ }
8630
+ return null;
8631
+ }
8632
+ /** Scan a single line against a resolved rule; push any findings into the array. */
8633
+ scanLineForRule(rule, resolved, line, lineNumber, filePath, findings) {
8634
+ const suppressionMatch = parseHarnessIgnore(line, rule.id);
8635
+ if (suppressionMatch) {
8636
+ if (!suppressionMatch.justification) {
8637
+ findings.push(this.buildSuppressionFinding(rule, filePath, lineNumber, line));
8638
+ }
8639
+ return;
8640
+ }
8641
+ const finding = this.matchRuleLine(rule, resolved, filePath, lineNumber, line);
8642
+ if (finding) findings.push(finding);
8643
+ }
8478
8644
  /**
8479
8645
  * Core scanning loop shared by scanContent and scanContentForFile.
8480
8646
  * Evaluates each rule against each line, handling suppression (FP gate)
@@ -8491,46 +8657,7 @@ var SecurityScanner = class {
8491
8657
  );
8492
8658
  if (resolved === "off") continue;
8493
8659
  for (let i = 0; i < lines.length; i++) {
8494
- const line = lines[i] ?? "";
8495
- const suppressionMatch = parseHarnessIgnore(line, rule.id);
8496
- if (suppressionMatch) {
8497
- if (!suppressionMatch.justification) {
8498
- findings.push({
8499
- ruleId: rule.id,
8500
- ruleName: rule.name,
8501
- category: rule.category,
8502
- severity: this.config.strict ? "error" : "warning",
8503
- confidence: "high",
8504
- file: filePath,
8505
- line: startLine + i,
8506
- match: line.trim(),
8507
- context: line,
8508
- message: `Suppression of ${rule.id} requires justification: // harness-ignore ${rule.id}: <reason>`,
8509
- remediation: `Add justification after colon: // harness-ignore ${rule.id}: false positive because ...`
8510
- });
8511
- }
8512
- continue;
8513
- }
8514
- for (const pattern of rule.patterns) {
8515
- pattern.lastIndex = 0;
8516
- if (pattern.test(line)) {
8517
- findings.push({
8518
- ruleId: rule.id,
8519
- ruleName: rule.name,
8520
- category: rule.category,
8521
- severity: resolved,
8522
- confidence: rule.confidence,
8523
- file: filePath,
8524
- line: startLine + i,
8525
- match: line.trim(),
8526
- context: line,
8527
- message: rule.message,
8528
- remediation: rule.remediation,
8529
- ...rule.references ? { references: rule.references } : {}
8530
- });
8531
- break;
8532
- }
8533
- }
8660
+ this.scanLineForRule(rule, resolved, lines[i] ?? "", startLine + i, filePath, findings);
8534
8661
  }
8535
8662
  }
8536
8663
  return findings;
@@ -8789,19 +8916,19 @@ var DESTRUCTIVE_BASH = [
8789
8916
  ];
8790
8917
 
8791
8918
  // src/security/taint.ts
8792
- import { readFileSync as readFileSync17, writeFileSync as writeFileSync12, unlinkSync, mkdirSync as mkdirSync12, readdirSync as readdirSync3 } from "fs";
8793
- import { join as join24, dirname as dirname9 } from "path";
8919
+ import { readFileSync as readFileSync20, writeFileSync as writeFileSync14, unlinkSync, mkdirSync as mkdirSync13, readdirSync as readdirSync3 } from "fs";
8920
+ import { join as join27, dirname as dirname9 } from "path";
8794
8921
  var TAINT_DURATION_MS = 30 * 60 * 1e3;
8795
8922
  var DEFAULT_SESSION_ID = "default";
8796
8923
  function getTaintFilePath(projectRoot, sessionId) {
8797
8924
  const id = sessionId || DEFAULT_SESSION_ID;
8798
- return join24(projectRoot, ".harness", `session-taint-${id}.json`);
8925
+ return join27(projectRoot, ".harness", `session-taint-${id}.json`);
8799
8926
  }
8800
8927
  function readTaint(projectRoot, sessionId) {
8801
8928
  const filePath = getTaintFilePath(projectRoot, sessionId);
8802
8929
  let content;
8803
8930
  try {
8804
- content = readFileSync17(filePath, "utf8");
8931
+ content = readFileSync20(filePath, "utf8");
8805
8932
  } catch {
8806
8933
  return null;
8807
8934
  }
@@ -8846,7 +8973,7 @@ function writeTaint(projectRoot, sessionId, reason, findings, source) {
8846
8973
  const filePath = getTaintFilePath(projectRoot, id);
8847
8974
  const now = (/* @__PURE__ */ new Date()).toISOString();
8848
8975
  const dir = dirname9(filePath);
8849
- mkdirSync12(dir, { recursive: true });
8976
+ mkdirSync13(dir, { recursive: true });
8850
8977
  const existing = readTaint(projectRoot, id);
8851
8978
  const maxSeverity = findings.some((f) => f.severity === "high") ? "high" : "medium";
8852
8979
  const taintFindings = findings.map((f) => ({
@@ -8864,7 +8991,7 @@ function writeTaint(projectRoot, sessionId, reason, findings, source) {
8864
8991
  severity: existing?.severity === "high" || maxSeverity === "high" ? "high" : "medium",
8865
8992
  findings: [...existing?.findings || [], ...taintFindings]
8866
8993
  };
8867
- writeFileSync12(filePath, JSON.stringify(state, null, 2) + "\n");
8994
+ writeFileSync14(filePath, JSON.stringify(state, null, 2) + "\n");
8868
8995
  return state;
8869
8996
  }
8870
8997
  function clearTaint(projectRoot, sessionId) {
@@ -8877,14 +9004,14 @@ function clearTaint(projectRoot, sessionId) {
8877
9004
  return 0;
8878
9005
  }
8879
9006
  }
8880
- const harnessDir = join24(projectRoot, ".harness");
9007
+ const harnessDir = join27(projectRoot, ".harness");
8881
9008
  let count = 0;
8882
9009
  try {
8883
9010
  const files = readdirSync3(harnessDir);
8884
9011
  for (const file of files) {
8885
9012
  if (file.startsWith("session-taint-") && file.endsWith(".json")) {
8886
9013
  try {
8887
- unlinkSync(join24(harnessDir, file));
9014
+ unlinkSync(join27(harnessDir, file));
8888
9015
  count++;
8889
9016
  } catch {
8890
9017
  }
@@ -8895,7 +9022,7 @@ function clearTaint(projectRoot, sessionId) {
8895
9022
  return count;
8896
9023
  }
8897
9024
  function listTaintedSessions(projectRoot) {
8898
- const harnessDir = join24(projectRoot, ".harness");
9025
+ const harnessDir = join27(projectRoot, ".harness");
8899
9026
  const sessions = [];
8900
9027
  try {
8901
9028
  const files = readdirSync3(harnessDir);
@@ -8965,7 +9092,7 @@ function mapSecurityFindings(secFindings, existing) {
8965
9092
  }
8966
9093
 
8967
9094
  // src/ci/check-orchestrator.ts
8968
- import * as path17 from "path";
9095
+ import * as path20 from "path";
8969
9096
  import { GraphStore, queryTraceability } from "@harness-engineering/graph";
8970
9097
  var ALL_CHECKS = [
8971
9098
  "validate",
@@ -8980,7 +9107,7 @@ var ALL_CHECKS = [
8980
9107
  ];
8981
9108
  async function runValidateCheck(projectRoot, config) {
8982
9109
  const issues = [];
8983
- const agentsPath = path17.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
9110
+ const agentsPath = path20.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
8984
9111
  const result = await validateAgentsMap(agentsPath);
8985
9112
  if (!result.ok) {
8986
9113
  issues.push({ severity: "error", message: result.error.message });
@@ -9037,7 +9164,7 @@ async function runDepsCheck(projectRoot, config) {
9037
9164
  }
9038
9165
  async function runDocsCheck(projectRoot, config) {
9039
9166
  const issues = [];
9040
- const docsDir = path17.join(projectRoot, config.docsDir ?? "docs");
9167
+ const docsDir = path20.join(projectRoot, config.docsDir ?? "docs");
9041
9168
  const entropyConfig = config.entropy || {};
9042
9169
  const result = await checkDocCoverage("project", {
9043
9170
  docsDir,
@@ -9222,7 +9349,7 @@ async function runTraceabilityCheck(projectRoot, config) {
9222
9349
  const issues = [];
9223
9350
  const traceConfig = config.traceability || {};
9224
9351
  if (traceConfig.enabled === false) return issues;
9225
- const graphDir = path17.join(projectRoot, ".harness", "graph");
9352
+ const graphDir = path20.join(projectRoot, ".harness", "graph");
9226
9353
  const store = new GraphStore();
9227
9354
  const loaded = await store.load(graphDir);
9228
9355
  if (!loaded) {
@@ -9351,7 +9478,7 @@ async function runCIChecks(input) {
9351
9478
  }
9352
9479
 
9353
9480
  // src/review/mechanical-checks.ts
9354
- import * as path18 from "path";
9481
+ import * as path21 from "path";
9355
9482
  async function runMechanicalChecks(options) {
9356
9483
  const { projectRoot, config, skip = [], changedFiles } = options;
9357
9484
  const findings = [];
@@ -9363,7 +9490,7 @@ async function runMechanicalChecks(options) {
9363
9490
  };
9364
9491
  if (!skip.includes("validate")) {
9365
9492
  try {
9366
- const agentsPath = path18.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
9493
+ const agentsPath = path21.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
9367
9494
  const result = await validateAgentsMap(agentsPath);
9368
9495
  if (!result.ok) {
9369
9496
  statuses.validate = "fail";
@@ -9400,7 +9527,7 @@ async function runMechanicalChecks(options) {
9400
9527
  statuses.validate = "fail";
9401
9528
  findings.push({
9402
9529
  tool: "validate",
9403
- file: path18.join(projectRoot, "AGENTS.md"),
9530
+ file: path21.join(projectRoot, "AGENTS.md"),
9404
9531
  message: err instanceof Error ? err.message : String(err),
9405
9532
  severity: "error"
9406
9533
  });
@@ -9464,7 +9591,7 @@ async function runMechanicalChecks(options) {
9464
9591
  (async () => {
9465
9592
  const localFindings = [];
9466
9593
  try {
9467
- const docsDir = path18.join(projectRoot, config.docsDir ?? "docs");
9594
+ const docsDir = path21.join(projectRoot, config.docsDir ?? "docs");
9468
9595
  const result = await checkDocCoverage("project", { docsDir });
9469
9596
  if (!result.ok) {
9470
9597
  statuses["check-docs"] = "warn";
@@ -9491,7 +9618,7 @@ async function runMechanicalChecks(options) {
9491
9618
  statuses["check-docs"] = "warn";
9492
9619
  localFindings.push({
9493
9620
  tool: "check-docs",
9494
- file: path18.join(projectRoot, "docs"),
9621
+ file: path21.join(projectRoot, "docs"),
9495
9622
  message: err instanceof Error ? err.message : String(err),
9496
9623
  severity: "warning"
9497
9624
  });
@@ -9639,7 +9766,7 @@ function detectChangeType(commitMessage, diff2) {
9639
9766
  }
9640
9767
 
9641
9768
  // src/review/context-scoper.ts
9642
- import * as path19 from "path";
9769
+ import * as path22 from "path";
9643
9770
  var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
9644
9771
  var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
9645
9772
  function computeContextBudget(diffLines) {
@@ -9647,18 +9774,18 @@ function computeContextBudget(diffLines) {
9647
9774
  return diffLines;
9648
9775
  }
9649
9776
  function isWithinProject(absPath, projectRoot) {
9650
- const resolvedRoot = path19.resolve(projectRoot) + path19.sep;
9651
- const resolvedPath = path19.resolve(absPath);
9652
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path19.resolve(projectRoot);
9777
+ const resolvedRoot = path22.resolve(projectRoot) + path22.sep;
9778
+ const resolvedPath = path22.resolve(absPath);
9779
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path22.resolve(projectRoot);
9653
9780
  }
9654
9781
  async function readContextFile(projectRoot, filePath, reason) {
9655
- const absPath = path19.isAbsolute(filePath) ? filePath : path19.join(projectRoot, filePath);
9782
+ const absPath = path22.isAbsolute(filePath) ? filePath : path22.join(projectRoot, filePath);
9656
9783
  if (!isWithinProject(absPath, projectRoot)) return null;
9657
9784
  const result = await readFileContent(absPath);
9658
9785
  if (!result.ok) return null;
9659
9786
  const content = result.value;
9660
9787
  const lines = content.split("\n").length;
9661
- const relPath = path19.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9788
+ const relPath = path22.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
9662
9789
  return { path: relPath, content, reason, lines };
9663
9790
  }
9664
9791
  function extractImportSources(content) {
@@ -9673,18 +9800,18 @@ function extractImportSources(content) {
9673
9800
  }
9674
9801
  async function resolveImportPath(projectRoot, fromFile, importSource) {
9675
9802
  if (!importSource.startsWith(".")) return null;
9676
- const fromDir = path19.dirname(path19.join(projectRoot, fromFile));
9677
- const basePath = path19.resolve(fromDir, importSource);
9803
+ const fromDir = path22.dirname(path22.join(projectRoot, fromFile));
9804
+ const basePath = path22.resolve(fromDir, importSource);
9678
9805
  if (!isWithinProject(basePath, projectRoot)) return null;
9679
9806
  const relBase = relativePosix(projectRoot, basePath);
9680
9807
  const candidates = [
9681
9808
  relBase + ".ts",
9682
9809
  relBase + ".tsx",
9683
9810
  relBase + ".mts",
9684
- path19.join(relBase, "index.ts")
9811
+ path22.join(relBase, "index.ts")
9685
9812
  ];
9686
9813
  for (const candidate of candidates) {
9687
- const absCandidate = path19.join(projectRoot, candidate);
9814
+ const absCandidate = path22.join(projectRoot, candidate);
9688
9815
  if (await fileExists(absCandidate)) {
9689
9816
  return candidate;
9690
9817
  }
@@ -9692,7 +9819,7 @@ async function resolveImportPath(projectRoot, fromFile, importSource) {
9692
9819
  return null;
9693
9820
  }
9694
9821
  async function findTestFiles(projectRoot, sourceFile) {
9695
- const baseName = path19.basename(sourceFile, path19.extname(sourceFile));
9822
+ const baseName = path22.basename(sourceFile, path22.extname(sourceFile));
9696
9823
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
9697
9824
  const results = await findFiles(pattern, projectRoot);
9698
9825
  return results.map((f) => relativePosix(projectRoot, f));
@@ -9950,37 +10077,30 @@ function extractConventionRules(bundle) {
9950
10077
  }
9951
10078
  return rules;
9952
10079
  }
9953
- function findMissingJsDoc(bundle) {
10080
+ var EXPORT_RE = /export\s+(?:async\s+)?(?:function|const|class|interface|type)\s+(\w+)/;
10081
+ function hasPrecedingJsDoc(lines, i) {
10082
+ for (let j = i - 1; j >= 0; j--) {
10083
+ const prev = lines[j].trim();
10084
+ if (prev === "") continue;
10085
+ return prev.endsWith("*/");
10086
+ }
10087
+ return false;
10088
+ }
10089
+ function scanFileForMissingJsDoc(filePath, lines) {
9954
10090
  const missing = [];
9955
- for (const cf of bundle.changedFiles) {
9956
- const lines = cf.content.split("\n");
9957
- for (let i = 0; i < lines.length; i++) {
9958
- const line = lines[i];
9959
- const exportMatch = line.match(
9960
- /export\s+(?:async\s+)?(?:function|const|class|interface|type)\s+(\w+)/
9961
- );
9962
- if (exportMatch) {
9963
- let hasJsDoc = false;
9964
- for (let j = i - 1; j >= 0; j--) {
9965
- const prev = lines[j].trim();
9966
- if (prev === "") continue;
9967
- if (prev.endsWith("*/")) {
9968
- hasJsDoc = true;
9969
- }
9970
- break;
9971
- }
9972
- if (!hasJsDoc) {
9973
- missing.push({
9974
- file: cf.path,
9975
- line: i + 1,
9976
- exportName: exportMatch[1]
9977
- });
9978
- }
9979
- }
10091
+ for (let i = 0; i < lines.length; i++) {
10092
+ const exportMatch = lines[i].match(EXPORT_RE);
10093
+ if (exportMatch && !hasPrecedingJsDoc(lines, i)) {
10094
+ missing.push({ file: filePath, line: i + 1, exportName: exportMatch[1] });
9980
10095
  }
9981
10096
  }
9982
10097
  return missing;
9983
10098
  }
10099
+ function findMissingJsDoc(bundle) {
10100
+ return bundle.changedFiles.flatMap(
10101
+ (cf) => scanFileForMissingJsDoc(cf.path, cf.content.split("\n"))
10102
+ );
10103
+ }
9984
10104
  function checkMissingJsDoc(bundle, rules) {
9985
10105
  const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
9986
10106
  if (!jsDocRule) return [];
@@ -10045,29 +10165,27 @@ function checkChangeTypeSpecific(bundle) {
10045
10165
  return [];
10046
10166
  }
10047
10167
  }
10168
+ function checkFileResultTypeConvention(cf, bundle, rule) {
10169
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
10170
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
10171
+ if (!hasTryCatch || usesResult) return null;
10172
+ return {
10173
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
10174
+ file: cf.path,
10175
+ lineRange: [1, cf.lines],
10176
+ domain: "compliance",
10177
+ severity: "suggestion",
10178
+ title: "Fallible operation uses try/catch instead of Result type",
10179
+ rationale: `Convention requires using Result type for fallible operations (from ${rule.source}).`,
10180
+ suggestion: "Refactor error handling to use the Result type pattern.",
10181
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${rule.text}"`],
10182
+ validatedBy: "heuristic"
10183
+ };
10184
+ }
10048
10185
  function checkResultTypeConvention(bundle, rules) {
10049
10186
  const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
10050
10187
  if (!resultTypeRule) return [];
10051
- const findings = [];
10052
- for (const cf of bundle.changedFiles) {
10053
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
10054
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
10055
- if (hasTryCatch && !usesResult) {
10056
- findings.push({
10057
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
10058
- file: cf.path,
10059
- lineRange: [1, cf.lines],
10060
- domain: "compliance",
10061
- severity: "suggestion",
10062
- title: "Fallible operation uses try/catch instead of Result type",
10063
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
10064
- suggestion: "Refactor error handling to use the Result type pattern.",
10065
- evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
10066
- validatedBy: "heuristic"
10067
- });
10068
- }
10069
- }
10070
- return findings;
10188
+ return bundle.changedFiles.map((cf) => checkFileResultTypeConvention(cf, bundle, resultTypeRule)).filter((f) => f !== null);
10071
10189
  }
10072
10190
  function runComplianceAgent(bundle) {
10073
10191
  const rules = extractConventionRules(bundle);
@@ -10093,53 +10211,58 @@ var BUG_DETECTION_DESCRIPTOR = {
10093
10211
  "Test coverage \u2014 tests for happy path, error paths, and edge cases"
10094
10212
  ]
10095
10213
  };
10214
+ function hasPrecedingZeroCheck(lines, i) {
10215
+ const preceding = lines.slice(Math.max(0, i - 3), i).join("\n");
10216
+ return preceding.includes("=== 0") || preceding.includes("!== 0") || preceding.includes("== 0") || preceding.includes("!= 0");
10217
+ }
10096
10218
  function detectDivisionByZero(bundle) {
10097
10219
  const findings = [];
10098
10220
  for (const cf of bundle.changedFiles) {
10099
10221
  const lines = cf.content.split("\n");
10100
10222
  for (let i = 0; i < lines.length; i++) {
10101
10223
  const line = lines[i];
10102
- if (line.match(/[^=!<>]\s*\/\s*[a-zA-Z_]\w*/) && !line.includes("//")) {
10103
- const preceding = lines.slice(Math.max(0, i - 3), i).join("\n");
10104
- if (!preceding.includes("=== 0") && !preceding.includes("!== 0") && !preceding.includes("== 0") && !preceding.includes("!= 0")) {
10105
- findings.push({
10106
- id: makeFindingId("bug", cf.path, i + 1, "division by zero"),
10107
- file: cf.path,
10108
- lineRange: [i + 1, i + 1],
10109
- domain: "bug",
10110
- severity: "important",
10111
- title: "Potential division by zero without guard",
10112
- rationale: "Division operation found without a preceding zero check on the divisor. This can cause Infinity or NaN at runtime.",
10113
- suggestion: "Add a check for zero before dividing, or use a safe division utility.",
10114
- evidence: [`Line ${i + 1}: ${line.trim()}`],
10115
- validatedBy: "heuristic"
10116
- });
10117
- }
10118
- }
10224
+ if (!line.match(/[^=!<>]\s*\/\s*[a-zA-Z_]\w*/) || line.includes("//")) continue;
10225
+ if (hasPrecedingZeroCheck(lines, i)) continue;
10226
+ findings.push({
10227
+ id: makeFindingId("bug", cf.path, i + 1, "division by zero"),
10228
+ file: cf.path,
10229
+ lineRange: [i + 1, i + 1],
10230
+ domain: "bug",
10231
+ severity: "important",
10232
+ title: "Potential division by zero without guard",
10233
+ rationale: "Division operation found without a preceding zero check on the divisor. This can cause Infinity or NaN at runtime.",
10234
+ suggestion: "Add a check for zero before dividing, or use a safe division utility.",
10235
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
10236
+ validatedBy: "heuristic"
10237
+ });
10119
10238
  }
10120
10239
  }
10121
10240
  return findings;
10122
10241
  }
10242
+ function isEmptyCatch(lines, i) {
10243
+ const line = lines[i];
10244
+ if (line.match(/catch\s*\([^)]*\)\s*\{\s*\}/)) return true;
10245
+ return line.match(/catch\s*\([^)]*\)\s*\{/) !== null && i + 1 < lines.length && lines[i + 1].trim() === "}";
10246
+ }
10123
10247
  function detectEmptyCatch(bundle) {
10124
10248
  const findings = [];
10125
10249
  for (const cf of bundle.changedFiles) {
10126
10250
  const lines = cf.content.split("\n");
10127
10251
  for (let i = 0; i < lines.length; i++) {
10252
+ if (!isEmptyCatch(lines, i)) continue;
10128
10253
  const line = lines[i];
10129
- if (line.match(/catch\s*\([^)]*\)\s*\{\s*\}/) || line.match(/catch\s*\([^)]*\)\s*\{/) && i + 1 < lines.length && lines[i + 1].trim() === "}") {
10130
- findings.push({
10131
- id: makeFindingId("bug", cf.path, i + 1, "empty catch block"),
10132
- file: cf.path,
10133
- lineRange: [i + 1, i + 2],
10134
- domain: "bug",
10135
- severity: "important",
10136
- title: "Empty catch block silently swallows error",
10137
- rationale: "Catching an error without handling, logging, or re-throwing it hides failures and makes debugging difficult.",
10138
- suggestion: "Log the error, re-throw it, or handle it explicitly. If intentionally ignoring, add a comment explaining why.",
10139
- evidence: [`Line ${i + 1}: ${line.trim()}`],
10140
- validatedBy: "heuristic"
10141
- });
10142
- }
10254
+ findings.push({
10255
+ id: makeFindingId("bug", cf.path, i + 1, "empty catch block"),
10256
+ file: cf.path,
10257
+ lineRange: [i + 1, i + 2],
10258
+ domain: "bug",
10259
+ severity: "important",
10260
+ title: "Empty catch block silently swallows error",
10261
+ rationale: "Catching an error without handling, logging, or re-throwing it hides failures and makes debugging difficult.",
10262
+ suggestion: "Log the error, re-throw it, or handle it explicitly. If intentionally ignoring, add a comment explaining why.",
10263
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
10264
+ validatedBy: "heuristic"
10265
+ });
10143
10266
  }
10144
10267
  }
10145
10268
  return findings;
@@ -10197,34 +10320,102 @@ var SECRET_PATTERNS = [
10197
10320
  ];
10198
10321
  var SQL_CONCAT_PATTERN = /(?:SELECT|INSERT|UPDATE|DELETE|DROP|CREATE|ALTER)\s+.*?\+\s*\w+|`[^`]*\$\{[^}]*\}[^`]*(?:SELECT|INSERT|UPDATE|DELETE|WHERE)/i;
10199
10322
  var SHELL_EXEC_PATTERN = /(?:exec|execSync|spawn|spawnSync)\s*\(\s*`[^`]*\$\{/;
10323
+ function makeEvalFinding(file, lineNum, line) {
10324
+ return {
10325
+ id: makeFindingId("security", file, lineNum, "eval usage CWE-94"),
10326
+ file,
10327
+ lineRange: [lineNum, lineNum],
10328
+ domain: "security",
10329
+ severity: "critical",
10330
+ title: `Dangerous ${"eval"}() or new ${"Function"}() usage`,
10331
+ rationale: `${"eval"}() and new ${"Function"}() execute arbitrary code. If user input reaches these calls, it enables Remote Code Execution (CWE-94).`,
10332
+ suggestion: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
10333
+ evidence: [`Line ${lineNum}: ${line.trim()}`],
10334
+ validatedBy: "heuristic",
10335
+ cweId: "CWE-94",
10336
+ owaspCategory: "A03:2021 Injection",
10337
+ confidence: "high",
10338
+ remediation: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
10339
+ references: [
10340
+ "https://cwe.mitre.org/data/definitions/94.html",
10341
+ "https://owasp.org/Top10/A03_2021-Injection/"
10342
+ ]
10343
+ };
10344
+ }
10345
+ function makeSecretFinding(file, lineNum) {
10346
+ return {
10347
+ id: makeFindingId("security", file, lineNum, "hardcoded secret CWE-798"),
10348
+ file,
10349
+ lineRange: [lineNum, lineNum],
10350
+ domain: "security",
10351
+ severity: "critical",
10352
+ title: "Hardcoded secret or API key detected",
10353
+ rationale: "Hardcoded secrets in source code can be extracted from version history even after removal. Use environment variables or a secrets manager (CWE-798).",
10354
+ suggestion: "Move the secret to an environment variable and access it via process.env.",
10355
+ evidence: [`Line ${lineNum}: [secret detected \u2014 value redacted]`],
10356
+ validatedBy: "heuristic",
10357
+ cweId: "CWE-798",
10358
+ owaspCategory: "A07:2021 Identification and Authentication Failures",
10359
+ confidence: "high",
10360
+ remediation: "Move the secret to an environment variable and access it via process.env.",
10361
+ references: [
10362
+ "https://cwe.mitre.org/data/definitions/798.html",
10363
+ "https://owasp.org/Top10/A07_2021-Identification_and_Authentication_Failures/"
10364
+ ]
10365
+ };
10366
+ }
10367
+ function makeSqlFinding(file, lineNum, line) {
10368
+ return {
10369
+ id: makeFindingId("security", file, lineNum, "SQL injection CWE-89"),
10370
+ file,
10371
+ lineRange: [lineNum, lineNum],
10372
+ domain: "security",
10373
+ severity: "critical",
10374
+ title: "Potential SQL injection via string concatenation",
10375
+ rationale: "Building SQL queries with string concatenation or template literals allows attackers to inject malicious SQL (CWE-89).",
10376
+ suggestion: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
10377
+ evidence: [`Line ${lineNum}: ${line.trim()}`],
10378
+ validatedBy: "heuristic",
10379
+ cweId: "CWE-89",
10380
+ owaspCategory: "A03:2021 Injection",
10381
+ confidence: "high",
10382
+ remediation: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
10383
+ references: [
10384
+ "https://cwe.mitre.org/data/definitions/89.html",
10385
+ "https://owasp.org/Top10/A03_2021-Injection/"
10386
+ ]
10387
+ };
10388
+ }
10389
+ function makeCommandFinding(file, lineNum, line) {
10390
+ return {
10391
+ id: makeFindingId("security", file, lineNum, "command injection CWE-78"),
10392
+ file,
10393
+ lineRange: [lineNum, lineNum],
10394
+ domain: "security",
10395
+ severity: "critical",
10396
+ title: "Potential command injection via shell exec with interpolation",
10397
+ rationale: "Using exec/spawn with template literal interpolation allows attackers to inject shell commands (CWE-78).",
10398
+ suggestion: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
10399
+ evidence: [`Line ${lineNum}: ${line.trim()}`],
10400
+ validatedBy: "heuristic",
10401
+ cweId: "CWE-78",
10402
+ owaspCategory: "A03:2021 Injection",
10403
+ confidence: "high",
10404
+ remediation: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
10405
+ references: [
10406
+ "https://cwe.mitre.org/data/definitions/78.html",
10407
+ "https://owasp.org/Top10/A03_2021-Injection/"
10408
+ ]
10409
+ };
10410
+ }
10200
10411
  function detectEvalUsage(bundle) {
10201
10412
  const findings = [];
10202
10413
  for (const cf of bundle.changedFiles) {
10203
10414
  const lines = cf.content.split("\n");
10204
10415
  for (let i = 0; i < lines.length; i++) {
10205
10416
  const line = lines[i];
10206
- if (EVAL_PATTERN.test(line)) {
10207
- findings.push({
10208
- id: makeFindingId("security", cf.path, i + 1, "eval usage CWE-94"),
10209
- file: cf.path,
10210
- lineRange: [i + 1, i + 1],
10211
- domain: "security",
10212
- severity: "critical",
10213
- title: `Dangerous ${"eval"}() or new ${"Function"}() usage`,
10214
- rationale: `${"eval"}() and new ${"Function"}() execute arbitrary code. If user input reaches these calls, it enables Remote Code Execution (CWE-94).`,
10215
- suggestion: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
10216
- evidence: [`Line ${i + 1}: ${line.trim()}`],
10217
- validatedBy: "heuristic",
10218
- cweId: "CWE-94",
10219
- owaspCategory: "A03:2021 Injection",
10220
- confidence: "high",
10221
- remediation: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
10222
- references: [
10223
- "https://cwe.mitre.org/data/definitions/94.html",
10224
- "https://owasp.org/Top10/A03_2021-Injection/"
10225
- ]
10226
- });
10227
- }
10417
+ if (!EVAL_PATTERN.test(line)) continue;
10418
+ findings.push(makeEvalFinding(cf.path, i + 1, line));
10228
10419
  }
10229
10420
  }
10230
10421
  return findings;
@@ -10236,31 +10427,9 @@ function detectHardcodedSecrets(bundle) {
10236
10427
  for (let i = 0; i < lines.length; i++) {
10237
10428
  const line = lines[i];
10238
10429
  const codePart = line.includes("//") ? line.slice(0, line.indexOf("//")) : line;
10239
- for (const pattern of SECRET_PATTERNS) {
10240
- if (pattern.test(codePart)) {
10241
- findings.push({
10242
- id: makeFindingId("security", cf.path, i + 1, "hardcoded secret CWE-798"),
10243
- file: cf.path,
10244
- lineRange: [i + 1, i + 1],
10245
- domain: "security",
10246
- severity: "critical",
10247
- title: "Hardcoded secret or API key detected",
10248
- rationale: "Hardcoded secrets in source code can be extracted from version history even after removal. Use environment variables or a secrets manager (CWE-798).",
10249
- suggestion: "Move the secret to an environment variable and access it via process.env.",
10250
- evidence: [`Line ${i + 1}: [secret detected \u2014 value redacted]`],
10251
- validatedBy: "heuristic",
10252
- cweId: "CWE-798",
10253
- owaspCategory: "A07:2021 Identification and Authentication Failures",
10254
- confidence: "high",
10255
- remediation: "Move the secret to an environment variable and access it via process.env.",
10256
- references: [
10257
- "https://cwe.mitre.org/data/definitions/798.html",
10258
- "https://owasp.org/Top10/A07_2021-Identification_and_Authentication_Failures/"
10259
- ]
10260
- });
10261
- break;
10262
- }
10263
- }
10430
+ const matched = SECRET_PATTERNS.some((p) => p.test(codePart));
10431
+ if (!matched) continue;
10432
+ findings.push(makeSecretFinding(cf.path, i + 1));
10264
10433
  }
10265
10434
  }
10266
10435
  return findings;
@@ -10271,28 +10440,8 @@ function detectSqlInjection(bundle) {
10271
10440
  const lines = cf.content.split("\n");
10272
10441
  for (let i = 0; i < lines.length; i++) {
10273
10442
  const line = lines[i];
10274
- if (SQL_CONCAT_PATTERN.test(line)) {
10275
- findings.push({
10276
- id: makeFindingId("security", cf.path, i + 1, "SQL injection CWE-89"),
10277
- file: cf.path,
10278
- lineRange: [i + 1, i + 1],
10279
- domain: "security",
10280
- severity: "critical",
10281
- title: "Potential SQL injection via string concatenation",
10282
- rationale: "Building SQL queries with string concatenation or template literals allows attackers to inject malicious SQL (CWE-89).",
10283
- suggestion: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
10284
- evidence: [`Line ${i + 1}: ${line.trim()}`],
10285
- validatedBy: "heuristic",
10286
- cweId: "CWE-89",
10287
- owaspCategory: "A03:2021 Injection",
10288
- confidence: "high",
10289
- remediation: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
10290
- references: [
10291
- "https://cwe.mitre.org/data/definitions/89.html",
10292
- "https://owasp.org/Top10/A03_2021-Injection/"
10293
- ]
10294
- });
10295
- }
10443
+ if (!SQL_CONCAT_PATTERN.test(line)) continue;
10444
+ findings.push(makeSqlFinding(cf.path, i + 1, line));
10296
10445
  }
10297
10446
  }
10298
10447
  return findings;
@@ -10303,28 +10452,8 @@ function detectCommandInjection(bundle) {
10303
10452
  const lines = cf.content.split("\n");
10304
10453
  for (let i = 0; i < lines.length; i++) {
10305
10454
  const line = lines[i];
10306
- if (SHELL_EXEC_PATTERN.test(line)) {
10307
- findings.push({
10308
- id: makeFindingId("security", cf.path, i + 1, "command injection CWE-78"),
10309
- file: cf.path,
10310
- lineRange: [i + 1, i + 1],
10311
- domain: "security",
10312
- severity: "critical",
10313
- title: "Potential command injection via shell exec with interpolation",
10314
- rationale: "Using exec/spawn with template literal interpolation allows attackers to inject shell commands (CWE-78).",
10315
- suggestion: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
10316
- evidence: [`Line ${i + 1}: ${line.trim()}`],
10317
- validatedBy: "heuristic",
10318
- cweId: "CWE-78",
10319
- owaspCategory: "A03:2021 Injection",
10320
- confidence: "high",
10321
- remediation: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
10322
- references: [
10323
- "https://cwe.mitre.org/data/definitions/78.html",
10324
- "https://owasp.org/Top10/A03_2021-Injection/"
10325
- ]
10326
- });
10327
- }
10455
+ if (!SHELL_EXEC_PATTERN.test(line)) continue;
10456
+ findings.push(makeCommandFinding(cf.path, i + 1, line));
10328
10457
  }
10329
10458
  }
10330
10459
  return findings;
@@ -10357,10 +10486,15 @@ function isViolationLine(line) {
10357
10486
  const lower = line.toLowerCase();
10358
10487
  return lower.includes("violation") || lower.includes("layer");
10359
10488
  }
10360
- function createLayerViolationFinding(line, fallbackPath) {
10361
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
10489
+ var VIOLATION_FILE_RE = /(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/;
10490
+ function extractViolationLocation(line, fallbackPath) {
10491
+ const fileMatch = line.match(VIOLATION_FILE_RE);
10362
10492
  const file = fileMatch?.[1] ?? fallbackPath;
10363
10493
  const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
10494
+ return { file, lineNum };
10495
+ }
10496
+ function createLayerViolationFinding(line, fallbackPath) {
10497
+ const { file, lineNum } = extractViolationLocation(line, fallbackPath);
10364
10498
  return {
10365
10499
  id: makeFindingId("arch", file, lineNum, "layer violation"),
10366
10500
  file,
@@ -10501,7 +10635,7 @@ async function fanOutReview(options) {
10501
10635
  }
10502
10636
 
10503
10637
  // src/review/validate-findings.ts
10504
- import * as path20 from "path";
10638
+ import * as path23 from "path";
10505
10639
  var DOWNGRADE_MAP = {
10506
10640
  critical: "important",
10507
10641
  important: "suggestion",
@@ -10522,7 +10656,7 @@ function normalizePath(filePath, projectRoot) {
10522
10656
  let normalized = filePath;
10523
10657
  normalized = normalized.replace(/\\/g, "/");
10524
10658
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
10525
- if (path20.isAbsolute(normalized)) {
10659
+ if (path23.isAbsolute(normalized)) {
10526
10660
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
10527
10661
  if (normalized.startsWith(root)) {
10528
10662
  normalized = normalized.slice(root.length);
@@ -10533,6 +10667,26 @@ function normalizePath(filePath, projectRoot) {
10533
10667
  }
10534
10668
  return normalized;
10535
10669
  }
10670
+ function resolveImportPath2(currentFile, importPath) {
10671
+ const dir = path23.dirname(currentFile);
10672
+ let resolved = path23.join(dir, importPath).replace(/\\/g, "/");
10673
+ if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
10674
+ resolved += ".ts";
10675
+ }
10676
+ return path23.normalize(resolved).replace(/\\/g, "/");
10677
+ }
10678
+ function enqueueImports(content, current, visited, queue, maxDepth) {
10679
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10680
+ let match;
10681
+ while ((match = importRegex.exec(content)) !== null) {
10682
+ const importPath = match[1];
10683
+ if (!importPath.startsWith(".")) continue;
10684
+ const resolved = resolveImportPath2(current.file, importPath);
10685
+ if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
10686
+ queue.push({ file: resolved, depth: current.depth + 1 });
10687
+ }
10688
+ }
10689
+ }
10536
10690
  function followImportChain(fromFile, fileContents, maxDepth = 2) {
10537
10691
  const visited = /* @__PURE__ */ new Set();
10538
10692
  const queue = [{ file: fromFile, depth: 0 }];
@@ -10542,82 +10696,63 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
10542
10696
  visited.add(current.file);
10543
10697
  const content = fileContents.get(current.file);
10544
10698
  if (!content) continue;
10545
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
10546
- let match;
10547
- while ((match = importRegex.exec(content)) !== null) {
10548
- const importPath = match[1];
10549
- if (!importPath.startsWith(".")) continue;
10550
- const dir = path20.dirname(current.file);
10551
- let resolved = path20.join(dir, importPath).replace(/\\/g, "/");
10552
- if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
10553
- resolved += ".ts";
10554
- }
10555
- resolved = path20.normalize(resolved).replace(/\\/g, "/");
10556
- if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
10557
- queue.push({ file: resolved, depth: current.depth + 1 });
10558
- }
10559
- }
10699
+ enqueueImports(content, current, visited, queue, maxDepth);
10560
10700
  }
10561
10701
  visited.delete(fromFile);
10562
10702
  return visited;
10563
10703
  }
10704
+ function isMechanicallyExcluded(finding, exclusionSet, projectRoot) {
10705
+ const normalizedFile = normalizePath(finding.file, projectRoot);
10706
+ if (exclusionSet.isExcluded(normalizedFile, finding.lineRange)) return true;
10707
+ if (exclusionSet.isExcluded(finding.file, finding.lineRange)) return true;
10708
+ const absoluteFile = path23.isAbsolute(finding.file) ? finding.file : path23.join(projectRoot, finding.file).replace(/\\/g, "/");
10709
+ return exclusionSet.isExcluded(absoluteFile, finding.lineRange);
10710
+ }
10711
+ async function validateWithGraph(crossFileRefs, graph) {
10712
+ try {
10713
+ for (const ref of crossFileRefs) {
10714
+ const reachable = await graph.isReachable(ref.from, ref.to);
10715
+ if (!reachable) return { result: "discard" };
10716
+ }
10717
+ return { result: "keep" };
10718
+ } catch {
10719
+ return { result: "fallback" };
10720
+ }
10721
+ }
10722
+ function validateWithHeuristic(finding, crossFileRefs, fileContents, projectRoot) {
10723
+ if (fileContents) {
10724
+ for (const ref of crossFileRefs) {
10725
+ const normalizedFrom = normalizePath(ref.from, projectRoot);
10726
+ const reachable = followImportChain(normalizedFrom, fileContents, 2);
10727
+ const normalizedTo = normalizePath(ref.to, projectRoot);
10728
+ if (reachable.has(normalizedTo)) {
10729
+ return { ...finding, validatedBy: "heuristic" };
10730
+ }
10731
+ }
10732
+ }
10733
+ return {
10734
+ ...finding,
10735
+ severity: DOWNGRADE_MAP[finding.severity],
10736
+ validatedBy: "heuristic"
10737
+ };
10738
+ }
10739
+ async function processFinding(finding, exclusionSet, graph, projectRoot, fileContents) {
10740
+ if (isMechanicallyExcluded(finding, exclusionSet, projectRoot)) return null;
10741
+ const crossFileRefs = extractCrossFileRefs(finding);
10742
+ if (crossFileRefs.length === 0) return { ...finding };
10743
+ if (graph) {
10744
+ const { result } = await validateWithGraph(crossFileRefs, graph);
10745
+ if (result === "keep") return { ...finding, validatedBy: "graph" };
10746
+ if (result === "discard") return null;
10747
+ }
10748
+ return validateWithHeuristic(finding, crossFileRefs, fileContents, projectRoot);
10749
+ }
10564
10750
  async function validateFindings(options) {
10565
10751
  const { findings, exclusionSet, graph, projectRoot, fileContents } = options;
10566
10752
  const validated = [];
10567
10753
  for (const finding of findings) {
10568
- const normalizedFile = normalizePath(finding.file, projectRoot);
10569
- if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
10570
- continue;
10571
- }
10572
- const absoluteFile = path20.isAbsolute(finding.file) ? finding.file : path20.join(projectRoot, finding.file).replace(/\\/g, "/");
10573
- if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
10574
- continue;
10575
- }
10576
- const crossFileRefs = extractCrossFileRefs(finding);
10577
- if (crossFileRefs.length === 0) {
10578
- validated.push({ ...finding });
10579
- continue;
10580
- }
10581
- if (graph) {
10582
- try {
10583
- let allReachable = true;
10584
- for (const ref of crossFileRefs) {
10585
- const reachable = await graph.isReachable(ref.from, ref.to);
10586
- if (!reachable) {
10587
- allReachable = false;
10588
- break;
10589
- }
10590
- }
10591
- if (allReachable) {
10592
- validated.push({ ...finding, validatedBy: "graph" });
10593
- }
10594
- continue;
10595
- } catch {
10596
- }
10597
- }
10598
- {
10599
- let chainValidated = false;
10600
- if (fileContents) {
10601
- for (const ref of crossFileRefs) {
10602
- const normalizedFrom = normalizePath(ref.from, projectRoot);
10603
- const reachable = followImportChain(normalizedFrom, fileContents, 2);
10604
- const normalizedTo = normalizePath(ref.to, projectRoot);
10605
- if (reachable.has(normalizedTo)) {
10606
- chainValidated = true;
10607
- break;
10608
- }
10609
- }
10610
- }
10611
- if (chainValidated) {
10612
- validated.push({ ...finding, validatedBy: "heuristic" });
10613
- } else {
10614
- validated.push({
10615
- ...finding,
10616
- severity: DOWNGRADE_MAP[finding.severity],
10617
- validatedBy: "heuristic"
10618
- });
10619
- }
10620
- }
10754
+ const result = await processFinding(finding, exclusionSet, graph, projectRoot, fileContents);
10755
+ if (result !== null) validated.push(result);
10621
10756
  }
10622
10757
  return validated;
10623
10758
  }
@@ -11218,25 +11353,32 @@ function serializeRoadmap(roadmap) {
11218
11353
  function serializeMilestoneHeading(milestone) {
11219
11354
  return milestone.isBacklog ? "## Backlog" : `## ${milestone.name}`;
11220
11355
  }
11356
+ function orDash(value) {
11357
+ return value ?? EM_DASH2;
11358
+ }
11359
+ function listOrDash(items) {
11360
+ return items.length > 0 ? items.join(", ") : EM_DASH2;
11361
+ }
11362
+ function serializeExtendedLines(feature) {
11363
+ const hasExtended = feature.assignee !== null || feature.priority !== null || feature.externalId !== null;
11364
+ if (!hasExtended) return [];
11365
+ return [
11366
+ `- **Assignee:** ${orDash(feature.assignee)}`,
11367
+ `- **Priority:** ${orDash(feature.priority)}`,
11368
+ `- **External-ID:** ${orDash(feature.externalId)}`
11369
+ ];
11370
+ }
11221
11371
  function serializeFeature(feature) {
11222
- const spec = feature.spec ?? EM_DASH2;
11223
- const plans = feature.plans.length > 0 ? feature.plans.join(", ") : EM_DASH2;
11224
- const blockedBy = feature.blockedBy.length > 0 ? feature.blockedBy.join(", ") : EM_DASH2;
11225
11372
  const lines = [
11226
11373
  `### ${feature.name}`,
11227
11374
  "",
11228
11375
  `- **Status:** ${feature.status}`,
11229
- `- **Spec:** ${spec}`,
11376
+ `- **Spec:** ${orDash(feature.spec)}`,
11230
11377
  `- **Summary:** ${feature.summary}`,
11231
- `- **Blockers:** ${blockedBy}`,
11232
- `- **Plan:** ${plans}`
11378
+ `- **Blockers:** ${listOrDash(feature.blockedBy)}`,
11379
+ `- **Plan:** ${listOrDash(feature.plans)}`,
11380
+ ...serializeExtendedLines(feature)
11233
11381
  ];
11234
- const hasExtended = feature.assignee !== null || feature.priority !== null || feature.externalId !== null;
11235
- if (hasExtended) {
11236
- lines.push(`- **Assignee:** ${feature.assignee ?? EM_DASH2}`);
11237
- lines.push(`- **Priority:** ${feature.priority ?? EM_DASH2}`);
11238
- lines.push(`- **External-ID:** ${feature.externalId ?? EM_DASH2}`);
11239
- }
11240
11382
  return lines;
11241
11383
  }
11242
11384
  function serializeAssignmentHistory(records) {
@@ -11252,8 +11394,8 @@ function serializeAssignmentHistory(records) {
11252
11394
  }
11253
11395
 
11254
11396
  // src/roadmap/sync.ts
11255
- import * as fs21 from "fs";
11256
- import * as path21 from "path";
11397
+ import * as fs24 from "fs";
11398
+ import * as path24 from "path";
11257
11399
  import { Ok as Ok3 } from "@harness-engineering/types";
11258
11400
 
11259
11401
  // src/roadmap/status-rank.ts
@@ -11270,6 +11412,26 @@ function isRegression(from, to) {
11270
11412
  }
11271
11413
 
11272
11414
  // src/roadmap/sync.ts
11415
+ function collectAutopilotStatuses(autopilotPath, featurePlans, allTaskStatuses) {
11416
+ try {
11417
+ const raw = fs24.readFileSync(autopilotPath, "utf-8");
11418
+ const autopilot = JSON.parse(raw);
11419
+ if (!autopilot.phases) return;
11420
+ const linkedPhases = autopilot.phases.filter(
11421
+ (phase) => phase.planPath ? featurePlans.some((p) => p === phase.planPath || phase.planPath.endsWith(p)) : false
11422
+ );
11423
+ for (const phase of linkedPhases) {
11424
+ if (phase.status === "complete") {
11425
+ allTaskStatuses.push("complete");
11426
+ } else if (phase.status === "pending") {
11427
+ allTaskStatuses.push("pending");
11428
+ } else {
11429
+ allTaskStatuses.push("in_progress");
11430
+ }
11431
+ }
11432
+ } catch {
11433
+ }
11434
+ }
11273
11435
  function inferStatus(feature, projectPath, allFeatures) {
11274
11436
  if (feature.blockedBy.length > 0) {
11275
11437
  const blockerNotDone = feature.blockedBy.some((blockerName) => {
@@ -11283,10 +11445,10 @@ function inferStatus(feature, projectPath, allFeatures) {
11283
11445
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
11284
11446
  const useRootState = featuresWithPlans.length <= 1;
11285
11447
  if (useRootState) {
11286
- const rootStatePath = path21.join(projectPath, ".harness", "state.json");
11287
- if (fs21.existsSync(rootStatePath)) {
11448
+ const rootStatePath = path24.join(projectPath, ".harness", "state.json");
11449
+ if (fs24.existsSync(rootStatePath)) {
11288
11450
  try {
11289
- const raw = fs21.readFileSync(rootStatePath, "utf-8");
11451
+ const raw = fs24.readFileSync(rootStatePath, "utf-8");
11290
11452
  const state = JSON.parse(raw);
11291
11453
  if (state.progress) {
11292
11454
  for (const status of Object.values(state.progress)) {
@@ -11297,34 +11459,15 @@ function inferStatus(feature, projectPath, allFeatures) {
11297
11459
  }
11298
11460
  }
11299
11461
  }
11300
- const sessionsDir = path21.join(projectPath, ".harness", "sessions");
11301
- if (fs21.existsSync(sessionsDir)) {
11462
+ const sessionsDir = path24.join(projectPath, ".harness", "sessions");
11463
+ if (fs24.existsSync(sessionsDir)) {
11302
11464
  try {
11303
- const sessionDirs = fs21.readdirSync(sessionsDir, { withFileTypes: true });
11465
+ const sessionDirs = fs24.readdirSync(sessionsDir, { withFileTypes: true });
11304
11466
  for (const entry of sessionDirs) {
11305
11467
  if (!entry.isDirectory()) continue;
11306
- const autopilotPath = path21.join(sessionsDir, entry.name, "autopilot-state.json");
11307
- if (!fs21.existsSync(autopilotPath)) continue;
11308
- try {
11309
- const raw = fs21.readFileSync(autopilotPath, "utf-8");
11310
- const autopilot = JSON.parse(raw);
11311
- if (!autopilot.phases) continue;
11312
- const linkedPhases = autopilot.phases.filter(
11313
- (phase) => phase.planPath ? feature.plans.some((p) => p === phase.planPath || phase.planPath.endsWith(p)) : false
11314
- );
11315
- if (linkedPhases.length > 0) {
11316
- for (const phase of linkedPhases) {
11317
- if (phase.status === "complete") {
11318
- allTaskStatuses.push("complete");
11319
- } else if (phase.status === "pending") {
11320
- allTaskStatuses.push("pending");
11321
- } else {
11322
- allTaskStatuses.push("in_progress");
11323
- }
11324
- }
11325
- }
11326
- } catch {
11327
- }
11468
+ const autopilotPath = path24.join(sessionsDir, entry.name, "autopilot-state.json");
11469
+ if (!fs24.existsSync(autopilotPath)) continue;
11470
+ collectAutopilotStatuses(autopilotPath, feature.plans, allTaskStatuses);
11328
11471
  }
11329
11472
  } catch {
11330
11473
  }
@@ -11641,23 +11784,36 @@ var GitHubIssuesSyncAdapter = class {
11641
11784
  return Err3(error instanceof Error ? error : new Error(String(error)));
11642
11785
  }
11643
11786
  }
11787
+ buildLabelsParam() {
11788
+ const filterLabels = this.config.labels ?? [];
11789
+ return filterLabels.length > 0 ? `&labels=${filterLabels.join(",")}` : "";
11790
+ }
11791
+ issueToTicketState(issue) {
11792
+ return {
11793
+ externalId: buildExternalId(this.owner, this.repo, issue.number),
11794
+ title: issue.title,
11795
+ status: issue.state,
11796
+ labels: issue.labels.map((l) => l.name),
11797
+ assignee: issue.assignee ? `@${issue.assignee.login}` : null
11798
+ };
11799
+ }
11800
+ async fetchIssuePage(page, labelsParam) {
11801
+ const perPage = 100;
11802
+ return fetchWithRetry(
11803
+ this.fetchFn,
11804
+ `${this.apiBase}/repos/${this.owner}/${this.repo}/issues?state=all&per_page=${perPage}&page=${page}${labelsParam}`,
11805
+ { method: "GET", headers: this.headers() },
11806
+ this.retryOpts
11807
+ );
11808
+ }
11644
11809
  async fetchAllTickets() {
11645
11810
  try {
11646
- const filterLabels = this.config.labels ?? [];
11647
- const labelsParam = filterLabels.length > 0 ? `&labels=${filterLabels.join(",")}` : "";
11811
+ const labelsParam = this.buildLabelsParam();
11648
11812
  const tickets = [];
11649
11813
  let page = 1;
11650
11814
  const perPage = 100;
11651
11815
  while (true) {
11652
- const response = await fetchWithRetry(
11653
- this.fetchFn,
11654
- `${this.apiBase}/repos/${this.owner}/${this.repo}/issues?state=all&per_page=${perPage}&page=${page}${labelsParam}`,
11655
- {
11656
- method: "GET",
11657
- headers: this.headers()
11658
- },
11659
- this.retryOpts
11660
- );
11816
+ const response = await this.fetchIssuePage(page, labelsParam);
11661
11817
  if (!response.ok) {
11662
11818
  const text = await response.text();
11663
11819
  return Err3(new Error(`GitHub API error ${response.status}: ${text}`));
@@ -11665,13 +11821,7 @@ var GitHubIssuesSyncAdapter = class {
11665
11821
  const data = await response.json();
11666
11822
  const issues = data.filter((d) => !d.pull_request);
11667
11823
  for (const issue of issues) {
11668
- tickets.push({
11669
- externalId: buildExternalId(this.owner, this.repo, issue.number),
11670
- title: issue.title,
11671
- status: issue.state,
11672
- labels: issue.labels.map((l) => l.name),
11673
- assignee: issue.assignee ? `@${issue.assignee.login}` : null
11674
- });
11824
+ tickets.push(this.issueToTicketState(issue));
11675
11825
  }
11676
11826
  if (data.length < perPage) break;
11677
11827
  page++;
@@ -11708,42 +11858,54 @@ var GitHubIssuesSyncAdapter = class {
11708
11858
  };
11709
11859
 
11710
11860
  // src/roadmap/sync-engine.ts
11711
- import * as fs22 from "fs";
11861
+ import * as fs25 from "fs";
11712
11862
  function emptySyncResult() {
11713
11863
  return { created: [], updated: [], assignmentChanges: [], errors: [] };
11714
11864
  }
11715
- async function syncToExternal(roadmap, adapter, config, prefetchedTickets) {
11716
- const result = emptySyncResult();
11717
- const existingByTitle = /* @__PURE__ */ new Map();
11865
+ function buildDedupIndex(tickets, config) {
11866
+ const index = /* @__PURE__ */ new Map();
11867
+ if (!tickets) return index;
11718
11868
  const configLabels = new Set((config.labels ?? []).map((l) => l.toLowerCase()));
11719
- if (prefetchedTickets) {
11720
- for (const ticket of prefetchedTickets) {
11721
- const hasConfigLabels = configLabels.size === 0 || ticket.labels.some((l) => configLabels.has(l.toLowerCase()));
11722
- if (!hasConfigLabels) continue;
11723
- const key = ticket.title.toLowerCase();
11724
- const prev = existingByTitle.get(key);
11725
- if (!prev || prev.status === "closed" && ticket.status === "open") {
11726
- existingByTitle.set(key, ticket);
11727
- }
11869
+ for (const ticket of tickets) {
11870
+ const hasConfigLabels = configLabels.size === 0 || ticket.labels.some((l) => configLabels.has(l.toLowerCase()));
11871
+ if (!hasConfigLabels) continue;
11872
+ const key = ticket.title.toLowerCase();
11873
+ const prev = index.get(key);
11874
+ if (!prev || prev.status === "closed" && ticket.status === "open") {
11875
+ index.set(key, ticket);
11728
11876
  }
11729
11877
  }
11878
+ return index;
11879
+ }
11880
+ async function resolveExternalId(feature, milestone, adapter, dedupIndex, result) {
11881
+ if (feature.externalId) return true;
11882
+ const existing = dedupIndex.get(feature.name.toLowerCase());
11883
+ if (existing) {
11884
+ feature.externalId = existing.externalId;
11885
+ return true;
11886
+ }
11887
+ const createResult = await adapter.createTicket(feature, milestone);
11888
+ if (createResult.ok) {
11889
+ feature.externalId = createResult.value.externalId;
11890
+ result.created.push(createResult.value);
11891
+ } else {
11892
+ result.errors.push({ featureOrId: feature.name, error: createResult.error });
11893
+ }
11894
+ return false;
11895
+ }
11896
+ async function syncToExternal(roadmap, adapter, config, prefetchedTickets) {
11897
+ const result = emptySyncResult();
11898
+ const dedupIndex = buildDedupIndex(prefetchedTickets, config);
11730
11899
  for (const milestone of roadmap.milestones) {
11731
11900
  for (const feature of milestone.features) {
11732
- if (!feature.externalId) {
11733
- const existing = existingByTitle.get(feature.name.toLowerCase());
11734
- if (existing) {
11735
- feature.externalId = existing.externalId;
11736
- } else {
11737
- const createResult = await adapter.createTicket(feature, milestone.name);
11738
- if (createResult.ok) {
11739
- feature.externalId = createResult.value.externalId;
11740
- result.created.push(createResult.value);
11741
- } else {
11742
- result.errors.push({ featureOrId: feature.name, error: createResult.error });
11743
- }
11744
- continue;
11745
- }
11746
- }
11901
+ const shouldUpdate = await resolveExternalId(
11902
+ feature,
11903
+ milestone.name,
11904
+ adapter,
11905
+ dedupIndex,
11906
+ result
11907
+ );
11908
+ if (!shouldUpdate) continue;
11747
11909
  const updateResult = await adapter.updateTicket(feature.externalId, feature, milestone.name);
11748
11910
  if (updateResult.ok) {
11749
11911
  result.updated.push(feature.externalId);
@@ -11754,6 +11916,22 @@ async function syncToExternal(roadmap, adapter, config, prefetchedTickets) {
11754
11916
  }
11755
11917
  return result;
11756
11918
  }
11919
+ function applyTicketToFeature(ticketState, feature, config, forceSync, result) {
11920
+ if (ticketState.assignee !== feature.assignee) {
11921
+ result.assignmentChanges.push({
11922
+ feature: feature.name,
11923
+ from: feature.assignee,
11924
+ to: ticketState.assignee
11925
+ });
11926
+ feature.assignee = ticketState.assignee;
11927
+ }
11928
+ const resolvedStatus = resolveReverseStatus(ticketState.status, ticketState.labels, config);
11929
+ if (!resolvedStatus || resolvedStatus === feature.status) return;
11930
+ const newStatus = resolvedStatus;
11931
+ if (!forceSync && isRegression(feature.status, newStatus)) return;
11932
+ if (!forceSync && feature.status === "blocked" && newStatus === "planned") return;
11933
+ feature.status = newStatus;
11934
+ }
11757
11935
  async function syncFromExternal(roadmap, adapter, config, options, prefetchedTickets) {
11758
11936
  const result = emptySyncResult();
11759
11937
  const forceSync = options?.forceSync ?? false;
@@ -11780,22 +11958,7 @@ async function syncFromExternal(roadmap, adapter, config, options, prefetchedTic
11780
11958
  for (const ticketState of tickets) {
11781
11959
  const feature = featureByExternalId.get(ticketState.externalId);
11782
11960
  if (!feature) continue;
11783
- if (ticketState.assignee !== feature.assignee) {
11784
- result.assignmentChanges.push({
11785
- feature: feature.name,
11786
- from: feature.assignee,
11787
- to: ticketState.assignee
11788
- });
11789
- feature.assignee = ticketState.assignee;
11790
- }
11791
- const resolvedStatus = resolveReverseStatus(ticketState.status, ticketState.labels, config);
11792
- if (resolvedStatus && resolvedStatus !== feature.status) {
11793
- const newStatus = resolvedStatus;
11794
- if (!forceSync && isRegression(feature.status, newStatus)) {
11795
- continue;
11796
- }
11797
- feature.status = newStatus;
11798
- }
11961
+ applyTicketToFeature(ticketState, feature, config, forceSync, result);
11799
11962
  }
11800
11963
  return result;
11801
11964
  }
@@ -11808,7 +11971,7 @@ async function fullSync(roadmapPath, adapter, config, options) {
11808
11971
  });
11809
11972
  await previousSync;
11810
11973
  try {
11811
- const raw = fs22.readFileSync(roadmapPath, "utf-8");
11974
+ const raw = fs25.readFileSync(roadmapPath, "utf-8");
11812
11975
  const parseResult = parseRoadmap(raw);
11813
11976
  if (!parseResult.ok) {
11814
11977
  return {
@@ -11821,7 +11984,7 @@ async function fullSync(roadmapPath, adapter, config, options) {
11821
11984
  const tickets = fetchResult.ok ? fetchResult.value : void 0;
11822
11985
  const pushResult = await syncToExternal(roadmap, adapter, config, tickets);
11823
11986
  const pullResult = await syncFromExternal(roadmap, adapter, config, options, tickets);
11824
- fs22.writeFileSync(roadmapPath, serializeRoadmap(roadmap), "utf-8");
11987
+ fs25.writeFileSync(roadmapPath, serializeRoadmap(roadmap), "utf-8");
11825
11988
  return {
11826
11989
  created: pushResult.created,
11827
11990
  updated: pushResult.updated,
@@ -11843,6 +12006,24 @@ var PRIORITY_RANK = {
11843
12006
  var POSITION_WEIGHT = 0.5;
11844
12007
  var DEPENDENTS_WEIGHT = 0.3;
11845
12008
  var AFFINITY_WEIGHT = 0.2;
12009
+ function isEligibleCandidate(feature, allFeatureNames, doneFeatures) {
12010
+ if (feature.status !== "planned" && feature.status !== "backlog") return false;
12011
+ const isBlocked = feature.blockedBy.some((blocker) => {
12012
+ const key = blocker.toLowerCase();
12013
+ return allFeatureNames.has(key) && !doneFeatures.has(key);
12014
+ });
12015
+ return !isBlocked;
12016
+ }
12017
+ function computeAffinityScore(feature, milestoneName, milestoneMap, userCompletedFeatures) {
12018
+ if (userCompletedFeatures.size === 0) return 0;
12019
+ const completedBlocker = feature.blockedBy.some(
12020
+ (b) => userCompletedFeatures.has(b.toLowerCase())
12021
+ );
12022
+ if (completedBlocker) return 1;
12023
+ const siblings = milestoneMap.get(milestoneName) ?? [];
12024
+ const completedSibling = siblings.some((s) => userCompletedFeatures.has(s));
12025
+ return completedSibling ? 0.5 : 0;
12026
+ }
11846
12027
  function scoreRoadmapCandidates(roadmap, options) {
11847
12028
  const allFeatures = roadmap.milestones.flatMap((m) => m.features);
11848
12029
  const allFeatureNames = new Set(allFeatures.map((f) => f.name.toLowerCase()));
@@ -11881,33 +12062,18 @@ function scoreRoadmapCandidates(roadmap, options) {
11881
12062
  const candidates = [];
11882
12063
  let globalPosition = 0;
11883
12064
  for (const ms of roadmap.milestones) {
11884
- for (let featureIdx = 0; featureIdx < ms.features.length; featureIdx++) {
11885
- const feature = ms.features[featureIdx];
12065
+ for (const feature of ms.features) {
11886
12066
  globalPosition++;
11887
- if (feature.status !== "planned" && feature.status !== "backlog") continue;
11888
- const isBlocked = feature.blockedBy.some((blocker) => {
11889
- const key = blocker.toLowerCase();
11890
- return allFeatureNames.has(key) && !doneFeatures.has(key);
11891
- });
11892
- if (isBlocked) continue;
12067
+ if (!isEligibleCandidate(feature, allFeatureNames, doneFeatures)) continue;
11893
12068
  const positionScore = 1 - (globalPosition - 1) / totalPositions;
11894
12069
  const deps = dependentsCount.get(feature.name.toLowerCase()) ?? 0;
11895
12070
  const dependentsScore = deps / maxDependents;
11896
- let affinityScore = 0;
11897
- if (userCompletedFeatures.size > 0) {
11898
- const completedBlockers = feature.blockedBy.filter(
11899
- (b) => userCompletedFeatures.has(b.toLowerCase())
11900
- );
11901
- if (completedBlockers.length > 0) {
11902
- affinityScore = 1;
11903
- } else {
11904
- const siblings = milestoneMap.get(ms.name) ?? [];
11905
- const completedSiblings = siblings.filter((s) => userCompletedFeatures.has(s));
11906
- if (completedSiblings.length > 0) {
11907
- affinityScore = 0.5;
11908
- }
11909
- }
11910
- }
12071
+ const affinityScore = computeAffinityScore(
12072
+ feature,
12073
+ ms.name,
12074
+ milestoneMap,
12075
+ userCompletedFeatures
12076
+ );
11911
12077
  const weightedScore = POSITION_WEIGHT * positionScore + DEPENDENTS_WEIGHT * dependentsScore + AFFINITY_WEIGHT * affinityScore;
11912
12078
  const priorityTier = feature.priority ? PRIORITY_RANK[feature.priority] : null;
11913
12079
  candidates.push({
@@ -11980,18 +12146,18 @@ var EmitInteractionInputSchema = z9.object({
11980
12146
  });
11981
12147
 
11982
12148
  // src/blueprint/scanner.ts
11983
- import * as fs23 from "fs/promises";
11984
- import * as path22 from "path";
12149
+ import * as fs26 from "fs/promises";
12150
+ import * as path25 from "path";
11985
12151
  var ProjectScanner = class {
11986
12152
  constructor(rootDir) {
11987
12153
  this.rootDir = rootDir;
11988
12154
  }
11989
12155
  rootDir;
11990
12156
  async scan() {
11991
- let projectName = path22.basename(this.rootDir);
12157
+ let projectName = path25.basename(this.rootDir);
11992
12158
  try {
11993
- const pkgPath = path22.join(this.rootDir, "package.json");
11994
- const pkgRaw = await fs23.readFile(pkgPath, "utf-8");
12159
+ const pkgPath = path25.join(this.rootDir, "package.json");
12160
+ const pkgRaw = await fs26.readFile(pkgPath, "utf-8");
11995
12161
  const pkg = JSON.parse(pkgRaw);
11996
12162
  if (pkg.name) projectName = pkg.name;
11997
12163
  } catch {
@@ -12032,8 +12198,8 @@ var ProjectScanner = class {
12032
12198
  };
12033
12199
 
12034
12200
  // src/blueprint/generator.ts
12035
- import * as fs24 from "fs/promises";
12036
- import * as path23 from "path";
12201
+ import * as fs27 from "fs/promises";
12202
+ import * as path26 from "path";
12037
12203
  import * as ejs from "ejs";
12038
12204
 
12039
12205
  // src/blueprint/templates.ts
@@ -12117,19 +12283,19 @@ var BlueprintGenerator = class {
12117
12283
  styles: STYLES,
12118
12284
  scripts: SCRIPTS
12119
12285
  });
12120
- await fs24.mkdir(options.outputDir, { recursive: true });
12121
- await fs24.writeFile(path23.join(options.outputDir, "index.html"), html);
12286
+ await fs27.mkdir(options.outputDir, { recursive: true });
12287
+ await fs27.writeFile(path26.join(options.outputDir, "index.html"), html);
12122
12288
  }
12123
12289
  };
12124
12290
 
12125
12291
  // src/update-checker.ts
12126
- import * as fs25 from "fs";
12127
- import * as path24 from "path";
12292
+ import * as fs28 from "fs";
12293
+ import * as path27 from "path";
12128
12294
  import * as os from "os";
12129
12295
  import { spawn } from "child_process";
12130
12296
  function getStatePath() {
12131
12297
  const home = process.env["HOME"] || os.homedir();
12132
- return path24.join(home, ".harness", "update-check.json");
12298
+ return path27.join(home, ".harness", "update-check.json");
12133
12299
  }
12134
12300
  function isUpdateCheckEnabled(configInterval) {
12135
12301
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -12142,7 +12308,7 @@ function shouldRunCheck(state, intervalMs) {
12142
12308
  }
12143
12309
  function readCheckState() {
12144
12310
  try {
12145
- const raw = fs25.readFileSync(getStatePath(), "utf-8");
12311
+ const raw = fs28.readFileSync(getStatePath(), "utf-8");
12146
12312
  const parsed = JSON.parse(raw);
12147
12313
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
12148
12314
  const state = parsed;
@@ -12159,7 +12325,7 @@ function readCheckState() {
12159
12325
  }
12160
12326
  function spawnBackgroundCheck(currentVersion) {
12161
12327
  const statePath = getStatePath();
12162
- const stateDir = path24.dirname(statePath);
12328
+ const stateDir = path27.dirname(statePath);
12163
12329
  const script = `
12164
12330
  const { execSync } = require('child_process');
12165
12331
  const fs = require('fs');
@@ -12248,9 +12414,9 @@ async function resolveWasmPath(grammarName) {
12248
12414
  const { createRequire } = await import("module");
12249
12415
  const require2 = createRequire(import.meta.url ?? __filename);
12250
12416
  const pkgPath = require2.resolve("tree-sitter-wasms/package.json");
12251
- const path28 = await import("path");
12252
- const pkgDir = path28.dirname(pkgPath);
12253
- return path28.join(pkgDir, "out", `${grammarName}.wasm`);
12417
+ const path31 = await import("path");
12418
+ const pkgDir = path31.dirname(pkgPath);
12419
+ return path31.join(pkgDir, "out", `${grammarName}.wasm`);
12254
12420
  }
12255
12421
  async function loadLanguage(lang) {
12256
12422
  const grammarName = GRAMMAR_MAP[lang];
@@ -12654,8 +12820,8 @@ function getModelPrice(model, dataset) {
12654
12820
  }
12655
12821
 
12656
12822
  // src/pricing/cache.ts
12657
- import * as fs26 from "fs/promises";
12658
- import * as path25 from "path";
12823
+ import * as fs29 from "fs/promises";
12824
+ import * as path28 from "path";
12659
12825
 
12660
12826
  // src/pricing/fallback.json
12661
12827
  var fallback_default = {
@@ -12708,14 +12874,14 @@ var LITELLM_PRICING_URL = "https://raw.githubusercontent.com/BerriAI/litellm/mai
12708
12874
  var CACHE_TTL_MS = 24 * 60 * 60 * 1e3;
12709
12875
  var STALENESS_WARNING_DAYS = 7;
12710
12876
  function getCachePath(projectRoot) {
12711
- return path25.join(projectRoot, ".harness", "cache", "pricing.json");
12877
+ return path28.join(projectRoot, ".harness", "cache", "pricing.json");
12712
12878
  }
12713
12879
  function getStalenessMarkerPath(projectRoot) {
12714
- return path25.join(projectRoot, ".harness", "cache", "staleness-marker.json");
12880
+ return path28.join(projectRoot, ".harness", "cache", "staleness-marker.json");
12715
12881
  }
12716
12882
  async function readDiskCache(projectRoot) {
12717
12883
  try {
12718
- const raw = await fs26.readFile(getCachePath(projectRoot), "utf-8");
12884
+ const raw = await fs29.readFile(getCachePath(projectRoot), "utf-8");
12719
12885
  return JSON.parse(raw);
12720
12886
  } catch {
12721
12887
  return null;
@@ -12723,8 +12889,8 @@ async function readDiskCache(projectRoot) {
12723
12889
  }
12724
12890
  async function writeDiskCache(projectRoot, data) {
12725
12891
  const cachePath = getCachePath(projectRoot);
12726
- await fs26.mkdir(path25.dirname(cachePath), { recursive: true });
12727
- await fs26.writeFile(cachePath, JSON.stringify(data, null, 2));
12892
+ await fs29.mkdir(path28.dirname(cachePath), { recursive: true });
12893
+ await fs29.writeFile(cachePath, JSON.stringify(data, null, 2));
12728
12894
  }
12729
12895
  async function fetchFromNetwork() {
12730
12896
  try {
@@ -12751,7 +12917,7 @@ function loadFallbackDataset() {
12751
12917
  async function checkAndWarnStaleness(projectRoot) {
12752
12918
  const markerPath = getStalenessMarkerPath(projectRoot);
12753
12919
  try {
12754
- const raw = await fs26.readFile(markerPath, "utf-8");
12920
+ const raw = await fs29.readFile(markerPath, "utf-8");
12755
12921
  const marker = JSON.parse(raw);
12756
12922
  const firstUse = new Date(marker.firstFallbackUse).getTime();
12757
12923
  const now = Date.now();
@@ -12763,8 +12929,8 @@ async function checkAndWarnStaleness(projectRoot) {
12763
12929
  }
12764
12930
  } catch {
12765
12931
  try {
12766
- await fs26.mkdir(path25.dirname(markerPath), { recursive: true });
12767
- await fs26.writeFile(
12932
+ await fs29.mkdir(path28.dirname(markerPath), { recursive: true });
12933
+ await fs29.writeFile(
12768
12934
  markerPath,
12769
12935
  JSON.stringify({ firstFallbackUse: (/* @__PURE__ */ new Date()).toISOString() })
12770
12936
  );
@@ -12774,7 +12940,7 @@ async function checkAndWarnStaleness(projectRoot) {
12774
12940
  }
12775
12941
  async function clearStalenessMarker(projectRoot) {
12776
12942
  try {
12777
- await fs26.unlink(getStalenessMarkerPath(projectRoot));
12943
+ await fs29.unlink(getStalenessMarkerPath(projectRoot));
12778
12944
  } catch {
12779
12945
  }
12780
12946
  }
@@ -12820,8 +12986,7 @@ function calculateCost(record, dataset) {
12820
12986
  }
12821
12987
 
12822
12988
  // src/usage/aggregator.ts
12823
- function aggregateBySession(records) {
12824
- if (records.length === 0) return [];
12989
+ function bucketRecordsBySession(records) {
12825
12990
  const sessionMap = /* @__PURE__ */ new Map();
12826
12991
  for (const record of records) {
12827
12992
  const tagged = record;
@@ -12837,58 +13002,104 @@ function aggregateBySession(records) {
12837
13002
  }
12838
13003
  bucket.allRecords.push(tagged);
12839
13004
  }
13005
+ return sessionMap;
13006
+ }
13007
+ function accumulateCost(running, recordCost) {
13008
+ if (recordCost != null && running != null) {
13009
+ return running + recordCost;
13010
+ }
13011
+ if (recordCost == null) {
13012
+ return null;
13013
+ }
13014
+ return running;
13015
+ }
13016
+ function sumRecordTokens(tokenSource) {
13017
+ const tokens = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
13018
+ let cacheCreation;
13019
+ let cacheRead;
13020
+ let costMicroUSD = 0;
13021
+ let model;
13022
+ for (const r of tokenSource) {
13023
+ tokens.inputTokens += r.tokens.inputTokens;
13024
+ tokens.outputTokens += r.tokens.outputTokens;
13025
+ tokens.totalTokens += r.tokens.totalTokens;
13026
+ if (r.cacheCreationTokens != null) {
13027
+ cacheCreation = (cacheCreation ?? 0) + r.cacheCreationTokens;
13028
+ }
13029
+ if (r.cacheReadTokens != null) {
13030
+ cacheRead = (cacheRead ?? 0) + r.cacheReadTokens;
13031
+ }
13032
+ costMicroUSD = accumulateCost(costMicroUSD, r.costMicroUSD);
13033
+ if (!model && r.model) {
13034
+ model = r.model;
13035
+ }
13036
+ }
13037
+ return { tokens, cacheCreation, cacheRead, costMicroUSD, model };
13038
+ }
13039
+ function findModel(records) {
13040
+ for (const r of records) {
13041
+ if (r.model) return r.model;
13042
+ }
13043
+ return void 0;
13044
+ }
13045
+ function determineSource(hasHarness, hasCC) {
13046
+ if (hasHarness && hasCC) return "merged";
13047
+ if (hasCC) return "claude-code";
13048
+ return "harness";
13049
+ }
13050
+ function applyOptionalFields(session, totals, model) {
13051
+ if (model) session.model = model;
13052
+ if (totals.cacheCreation != null) session.cacheCreationTokens = totals.cacheCreation;
13053
+ if (totals.cacheRead != null) session.cacheReadTokens = totals.cacheRead;
13054
+ }
13055
+ function buildSessionUsage(sessionId, bucket) {
13056
+ const hasHarness = bucket.harnessRecords.length > 0;
13057
+ const hasCC = bucket.ccRecords.length > 0;
13058
+ const tokenSource = hasHarness ? bucket.harnessRecords : bucket.ccRecords;
13059
+ const totals = sumRecordTokens(tokenSource);
13060
+ const model = totals.model ?? (hasCC ? findModel(bucket.ccRecords) : void 0);
13061
+ const timestamps = bucket.allRecords.map((r) => r.timestamp).sort();
13062
+ const session = {
13063
+ sessionId,
13064
+ firstTimestamp: timestamps[0] ?? "",
13065
+ lastTimestamp: timestamps[timestamps.length - 1] ?? "",
13066
+ tokens: totals.tokens,
13067
+ costMicroUSD: totals.costMicroUSD,
13068
+ source: determineSource(hasHarness, hasCC)
13069
+ };
13070
+ applyOptionalFields(session, totals, model);
13071
+ return session;
13072
+ }
13073
+ function accumulateIntoDayBucket(day, record) {
13074
+ day.sessions.add(record.sessionId);
13075
+ day.tokens.inputTokens += record.tokens.inputTokens;
13076
+ day.tokens.outputTokens += record.tokens.outputTokens;
13077
+ day.tokens.totalTokens += record.tokens.totalTokens;
13078
+ if (record.cacheCreationTokens != null) {
13079
+ day.cacheCreation = (day.cacheCreation ?? 0) + record.cacheCreationTokens;
13080
+ }
13081
+ if (record.cacheReadTokens != null) {
13082
+ day.cacheRead = (day.cacheRead ?? 0) + record.cacheReadTokens;
13083
+ }
13084
+ day.costMicroUSD = accumulateCost(day.costMicroUSD, record.costMicroUSD);
13085
+ if (record.model) {
13086
+ day.models.add(record.model);
13087
+ }
13088
+ }
13089
+ function createDayBucket() {
13090
+ return {
13091
+ sessions: /* @__PURE__ */ new Set(),
13092
+ tokens: { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
13093
+ costMicroUSD: 0,
13094
+ models: /* @__PURE__ */ new Set()
13095
+ };
13096
+ }
13097
+ function aggregateBySession(records) {
13098
+ if (records.length === 0) return [];
13099
+ const sessionMap = bucketRecordsBySession(records);
12840
13100
  const results = [];
12841
13101
  for (const [sessionId, bucket] of sessionMap) {
12842
- const hasHarness = bucket.harnessRecords.length > 0;
12843
- const hasCC = bucket.ccRecords.length > 0;
12844
- const isMerged = hasHarness && hasCC;
12845
- const tokenSource = hasHarness ? bucket.harnessRecords : bucket.ccRecords;
12846
- const tokens = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
12847
- let cacheCreation;
12848
- let cacheRead;
12849
- let costMicroUSD = 0;
12850
- let model;
12851
- for (const r of tokenSource) {
12852
- tokens.inputTokens += r.tokens.inputTokens;
12853
- tokens.outputTokens += r.tokens.outputTokens;
12854
- tokens.totalTokens += r.tokens.totalTokens;
12855
- if (r.cacheCreationTokens != null) {
12856
- cacheCreation = (cacheCreation ?? 0) + r.cacheCreationTokens;
12857
- }
12858
- if (r.cacheReadTokens != null) {
12859
- cacheRead = (cacheRead ?? 0) + r.cacheReadTokens;
12860
- }
12861
- if (r.costMicroUSD != null && costMicroUSD != null) {
12862
- costMicroUSD += r.costMicroUSD;
12863
- } else if (r.costMicroUSD == null) {
12864
- costMicroUSD = null;
12865
- }
12866
- if (!model && r.model) {
12867
- model = r.model;
12868
- }
12869
- }
12870
- if (!model && hasCC) {
12871
- for (const r of bucket.ccRecords) {
12872
- if (r.model) {
12873
- model = r.model;
12874
- break;
12875
- }
12876
- }
12877
- }
12878
- const timestamps = bucket.allRecords.map((r) => r.timestamp).sort();
12879
- const source = isMerged ? "merged" : hasCC ? "claude-code" : "harness";
12880
- const session = {
12881
- sessionId,
12882
- firstTimestamp: timestamps[0] ?? "",
12883
- lastTimestamp: timestamps[timestamps.length - 1] ?? "",
12884
- tokens,
12885
- costMicroUSD,
12886
- source
12887
- };
12888
- if (model) session.model = model;
12889
- if (cacheCreation != null) session.cacheCreationTokens = cacheCreation;
12890
- if (cacheRead != null) session.cacheReadTokens = cacheRead;
12891
- results.push(session);
13102
+ results.push(buildSessionUsage(sessionId, bucket));
12892
13103
  }
12893
13104
  results.sort((a, b) => b.firstTimestamp.localeCompare(a.firstTimestamp));
12894
13105
  return results;
@@ -12899,32 +13110,9 @@ function aggregateByDay(records) {
12899
13110
  for (const record of records) {
12900
13111
  const date = record.timestamp.slice(0, 10);
12901
13112
  if (!dayMap.has(date)) {
12902
- dayMap.set(date, {
12903
- sessions: /* @__PURE__ */ new Set(),
12904
- tokens: { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
12905
- costMicroUSD: 0,
12906
- models: /* @__PURE__ */ new Set()
12907
- });
12908
- }
12909
- const day = dayMap.get(date);
12910
- day.sessions.add(record.sessionId);
12911
- day.tokens.inputTokens += record.tokens.inputTokens;
12912
- day.tokens.outputTokens += record.tokens.outputTokens;
12913
- day.tokens.totalTokens += record.tokens.totalTokens;
12914
- if (record.cacheCreationTokens != null) {
12915
- day.cacheCreation = (day.cacheCreation ?? 0) + record.cacheCreationTokens;
12916
- }
12917
- if (record.cacheReadTokens != null) {
12918
- day.cacheRead = (day.cacheRead ?? 0) + record.cacheReadTokens;
12919
- }
12920
- if (record.costMicroUSD != null && day.costMicroUSD != null) {
12921
- day.costMicroUSD += record.costMicroUSD;
12922
- } else if (record.costMicroUSD == null) {
12923
- day.costMicroUSD = null;
12924
- }
12925
- if (record.model) {
12926
- day.models.add(record.model);
13113
+ dayMap.set(date, createDayBucket());
12927
13114
  }
13115
+ accumulateIntoDayBucket(dayMap.get(date), record);
12928
13116
  }
12929
13117
  const results = [];
12930
13118
  for (const [date, day] of dayMap) {
@@ -12944,8 +13132,29 @@ function aggregateByDay(records) {
12944
13132
  }
12945
13133
 
12946
13134
  // src/usage/jsonl-reader.ts
12947
- import * as fs27 from "fs";
12948
- import * as path26 from "path";
13135
+ import * as fs30 from "fs";
13136
+ import * as path29 from "path";
13137
+ function extractTokenUsage(entry, lineNumber) {
13138
+ const tokenUsage = entry.token_usage;
13139
+ if (!tokenUsage || typeof tokenUsage !== "object") {
13140
+ console.warn(
13141
+ `[harness usage] Skipping malformed JSONL line ${lineNumber}: missing token_usage`
13142
+ );
13143
+ return null;
13144
+ }
13145
+ return tokenUsage;
13146
+ }
13147
+ function applyOptionalFields2(record, entry) {
13148
+ if (entry.cache_creation_tokens != null) {
13149
+ record.cacheCreationTokens = entry.cache_creation_tokens;
13150
+ }
13151
+ if (entry.cache_read_tokens != null) {
13152
+ record.cacheReadTokens = entry.cache_read_tokens;
13153
+ }
13154
+ if (entry.model != null) {
13155
+ record.model = entry.model;
13156
+ }
13157
+ }
12949
13158
  function parseLine(line, lineNumber) {
12950
13159
  let entry;
12951
13160
  try {
@@ -12954,13 +13163,8 @@ function parseLine(line, lineNumber) {
12954
13163
  console.warn(`[harness usage] Skipping malformed JSONL line ${lineNumber}`);
12955
13164
  return null;
12956
13165
  }
12957
- const tokenUsage = entry.token_usage;
12958
- if (!tokenUsage || typeof tokenUsage !== "object") {
12959
- console.warn(
12960
- `[harness usage] Skipping malformed JSONL line ${lineNumber}: missing token_usage`
12961
- );
12962
- return null;
12963
- }
13166
+ const tokenUsage = extractTokenUsage(entry, lineNumber);
13167
+ if (!tokenUsage) return null;
12964
13168
  const inputTokens = tokenUsage.input_tokens ?? 0;
12965
13169
  const outputTokens = tokenUsage.output_tokens ?? 0;
12966
13170
  const record = {
@@ -12972,22 +13176,14 @@ function parseLine(line, lineNumber) {
12972
13176
  totalTokens: inputTokens + outputTokens
12973
13177
  }
12974
13178
  };
12975
- if (entry.cache_creation_tokens != null) {
12976
- record.cacheCreationTokens = entry.cache_creation_tokens;
12977
- }
12978
- if (entry.cache_read_tokens != null) {
12979
- record.cacheReadTokens = entry.cache_read_tokens;
12980
- }
12981
- if (entry.model != null) {
12982
- record.model = entry.model;
12983
- }
13179
+ applyOptionalFields2(record, entry);
12984
13180
  return record;
12985
13181
  }
12986
13182
  function readCostRecords(projectRoot) {
12987
- const costsFile = path26.join(projectRoot, ".harness", "metrics", "costs.jsonl");
13183
+ const costsFile = path29.join(projectRoot, ".harness", "metrics", "costs.jsonl");
12988
13184
  let raw;
12989
13185
  try {
12990
- raw = fs27.readFileSync(costsFile, "utf-8");
13186
+ raw = fs30.readFileSync(costsFile, "utf-8");
12991
13187
  } catch {
12992
13188
  return [];
12993
13189
  }
@@ -13005,8 +13201,8 @@ function readCostRecords(projectRoot) {
13005
13201
  }
13006
13202
 
13007
13203
  // src/usage/cc-parser.ts
13008
- import * as fs28 from "fs";
13009
- import * as path27 from "path";
13204
+ import * as fs31 from "fs";
13205
+ import * as path30 from "path";
13010
13206
  import * as os2 from "os";
13011
13207
  function extractUsage(entry) {
13012
13208
  if (entry.type !== "assistant") return null;
@@ -13015,6 +13211,14 @@ function extractUsage(entry) {
13015
13211
  const usage = message.usage;
13016
13212
  return usage && typeof usage === "object" && !Array.isArray(usage) ? usage : null;
13017
13213
  }
13214
+ function applyOptionalCCFields(record, message, usage) {
13215
+ const model = message.model;
13216
+ if (model) record.model = model;
13217
+ const cacheCreate = usage.cache_creation_input_tokens;
13218
+ const cacheRead = usage.cache_read_input_tokens;
13219
+ if (typeof cacheCreate === "number" && cacheCreate > 0) record.cacheCreationTokens = cacheCreate;
13220
+ if (typeof cacheRead === "number" && cacheRead > 0) record.cacheReadTokens = cacheRead;
13221
+ }
13018
13222
  function buildRecord(entry, usage) {
13019
13223
  const inputTokens = Number(usage.input_tokens) || 0;
13020
13224
  const outputTokens = Number(usage.output_tokens) || 0;
@@ -13025,12 +13229,7 @@ function buildRecord(entry, usage) {
13025
13229
  tokens: { inputTokens, outputTokens, totalTokens: inputTokens + outputTokens },
13026
13230
  _source: "claude-code"
13027
13231
  };
13028
- const model = message.model;
13029
- if (model) record.model = model;
13030
- const cacheCreate = usage.cache_creation_input_tokens;
13031
- const cacheRead = usage.cache_read_input_tokens;
13032
- if (typeof cacheCreate === "number" && cacheCreate > 0) record.cacheCreationTokens = cacheCreate;
13033
- if (typeof cacheRead === "number" && cacheRead > 0) record.cacheReadTokens = cacheRead;
13232
+ applyOptionalCCFields(record, message, usage);
13034
13233
  return record;
13035
13234
  }
13036
13235
  function parseCCLine(line, filePath, lineNumber) {
@@ -13039,7 +13238,7 @@ function parseCCLine(line, filePath, lineNumber) {
13039
13238
  entry = JSON.parse(line);
13040
13239
  } catch {
13041
13240
  console.warn(
13042
- `[harness usage] Skipping malformed CC JSONL line ${lineNumber} in ${path27.basename(filePath)}`
13241
+ `[harness usage] Skipping malformed CC JSONL line ${lineNumber} in ${path30.basename(filePath)}`
13043
13242
  );
13044
13243
  return null;
13045
13244
  }
@@ -13053,7 +13252,7 @@ function parseCCLine(line, filePath, lineNumber) {
13053
13252
  function readCCFile(filePath) {
13054
13253
  let raw;
13055
13254
  try {
13056
- raw = fs28.readFileSync(filePath, "utf-8");
13255
+ raw = fs31.readFileSync(filePath, "utf-8");
13057
13256
  } catch {
13058
13257
  return [];
13059
13258
  }
@@ -13075,10 +13274,10 @@ function readCCFile(filePath) {
13075
13274
  }
13076
13275
  function parseCCRecords() {
13077
13276
  const homeDir = process.env.HOME ?? os2.homedir();
13078
- const projectsDir = path27.join(homeDir, ".claude", "projects");
13277
+ const projectsDir = path30.join(homeDir, ".claude", "projects");
13079
13278
  let projectDirs;
13080
13279
  try {
13081
- projectDirs = fs28.readdirSync(projectsDir, { withFileTypes: true }).filter((d) => d.isDirectory()).map((d) => path27.join(projectsDir, d.name));
13280
+ projectDirs = fs31.readdirSync(projectsDir, { withFileTypes: true }).filter((d) => d.isDirectory()).map((d) => path30.join(projectsDir, d.name));
13082
13281
  } catch {
13083
13282
  return [];
13084
13283
  }
@@ -13086,7 +13285,7 @@ function parseCCRecords() {
13086
13285
  for (const dir of projectDirs) {
13087
13286
  let files;
13088
13287
  try {
13089
- files = fs28.readdirSync(dir).filter((f) => f.endsWith(".jsonl")).map((f) => path27.join(dir, f));
13288
+ files = fs31.readdirSync(dir).filter((f) => f.endsWith(".jsonl")).map((f) => path30.join(dir, f));
13090
13289
  } catch {
13091
13290
  continue;
13092
13291
  }
@@ -13098,7 +13297,7 @@ function parseCCRecords() {
13098
13297
  }
13099
13298
 
13100
13299
  // src/index.ts
13101
- var VERSION = "0.15.0";
13300
+ var VERSION = "0.21.1";
13102
13301
  export {
13103
13302
  AGENT_DESCRIPTORS,
13104
13303
  ARCHITECTURE_DESCRIPTOR,
@@ -13128,7 +13327,6 @@ export {
13128
13327
  ConfirmationSchema,
13129
13328
  ConsoleSink,
13130
13329
  ConstraintRuleSchema,
13131
- ContentPipeline,
13132
13330
  ContributingFeatureSchema,
13133
13331
  ContributionsSchema,
13134
13332
  CouplingCollector,
@@ -13237,6 +13435,7 @@ export {
13237
13435
  clearFailuresCache,
13238
13436
  clearLearningsCache,
13239
13437
  clearTaint,
13438
+ computeContentHash,
13240
13439
  computeOverallSeverity,
13241
13440
  computeScanExitCode,
13242
13441
  configureFeedback,
@@ -13330,6 +13529,7 @@ export {
13330
13529
  migrateToStreams,
13331
13530
  networkRules,
13332
13531
  nodeRules,
13532
+ normalizeLearningContent,
13333
13533
  parseCCRecords,
13334
13534
  parseDateFromEntry,
13335
13535
  parseDiff,